1use gh_workflow::{
2 Concurrency, Event, Expression, Job, PullRequest, Push, Run, Step, Use, Workflow,
3};
4use indexmap::IndexMap;
5
6use crate::tasks::workflows::{
7 nix_build::build_nix,
8 runners::Arch,
9 steps::{BASH_SHELL, CommonJobConditions, repository_owner_guard_expression},
10 vars::PathCondition,
11};
12
13use super::{
14 runners::{self, Platform},
15 steps::{self, FluentBuilder, NamedJob, named, release_job},
16};
17
18pub(crate) fn run_tests() -> Workflow {
19 // Specify anything which should potentially skip full test suite in this regex:
20 // - docs/
21 // - script/update_top_ranking_issues/
22 // - .github/ISSUE_TEMPLATE/
23 // - .github/workflows/ (except .github/workflows/ci.yml)
24 let should_run_tests = PathCondition::inverted(
25 "run_tests",
26 r"^(docs/|script/update_top_ranking_issues/|\.github/(ISSUE_TEMPLATE|workflows/(?!run_tests)))",
27 );
28 let should_check_docs = PathCondition::new("run_docs", r"^(docs/|crates/.*\.rs)");
29 let should_check_scripts = PathCondition::new(
30 "run_action_checks",
31 r"^\.github/(workflows/|actions/|actionlint.yml)|tooling/xtask|script/",
32 );
33 let should_check_licences =
34 PathCondition::new("run_licenses", r"^(Cargo.lock|script/.*licenses)");
35 let should_build_nix = PathCondition::new(
36 "run_nix",
37 r"^(nix/|flake\.|Cargo\.|rust-toolchain.toml|\.cargo/config.toml)",
38 );
39
40 let orchestrate = orchestrate(&[
41 &should_check_scripts,
42 &should_check_docs,
43 &should_check_licences,
44 &should_build_nix,
45 &should_run_tests,
46 ]);
47
48 let mut jobs = vec![
49 orchestrate,
50 check_style(),
51 should_run_tests.guard(run_platform_tests(Platform::Windows)),
52 should_run_tests.guard(run_platform_tests(Platform::Linux)),
53 should_run_tests.guard(run_platform_tests(Platform::Mac)),
54 should_run_tests.guard(doctests()),
55 should_run_tests.guard(check_workspace_binaries()),
56 should_run_tests.guard(check_dependencies()), // could be more specific here?
57 should_check_docs.guard(check_docs()),
58 should_check_licences.guard(check_licenses()),
59 should_check_scripts.guard(check_scripts()),
60 should_build_nix.guard(build_nix(
61 Platform::Linux,
62 Arch::X86_64,
63 "debug",
64 // *don't* cache the built output
65 Some("-zed-editor-[0-9.]*-nightly"),
66 &[],
67 )),
68 should_build_nix.guard(build_nix(
69 Platform::Mac,
70 Arch::AARCH64,
71 "debug",
72 // *don't* cache the built output
73 Some("-zed-editor-[0-9.]*-nightly"),
74 &[],
75 )),
76 ];
77 let tests_pass = tests_pass(&jobs);
78
79 jobs.push(should_run_tests.guard(check_postgres_and_protobuf_migrations())); // could be more specific here?
80
81 named::workflow()
82 .add_event(
83 Event::default()
84 .push(
85 Push::default()
86 .add_branch("main")
87 .add_branch("v[0-9]+.[0-9]+.x"),
88 )
89 .pull_request(PullRequest::default().add_branch("**")),
90 )
91 .concurrency(
92 Concurrency::default()
93 .group(concat!(
94 "${{ github.workflow }}-${{ github.ref_name }}-",
95 "${{ github.ref_name == 'main' && github.sha || 'anysha' }}"
96 ))
97 .cancel_in_progress(true),
98 )
99 .add_env(("CARGO_TERM_COLOR", "always"))
100 .add_env(("RUST_BACKTRACE", 1))
101 .add_env(("CARGO_INCREMENTAL", 0))
102 .map(|mut workflow| {
103 for job in jobs {
104 workflow = workflow.add_job(job.name, job.job)
105 }
106 workflow
107 })
108 .add_job(tests_pass.name, tests_pass.job)
109}
110
111// Generates a bash script that checks changed files against regex patterns
112// and sets GitHub output variables accordingly
113pub fn orchestrate(rules: &[&PathCondition]) -> NamedJob {
114 let name = "orchestrate".to_owned();
115 let step_name = "filter".to_owned();
116 let mut script = String::new();
117
118 script.push_str(indoc::indoc! {r#"
119 if [ -z "$GITHUB_BASE_REF" ]; then
120 echo "Not in a PR context (i.e., push to main/stable/preview)"
121 COMPARE_REV="$(git rev-parse HEAD~1)"
122 else
123 echo "In a PR context comparing to pull_request.base.ref"
124 git fetch origin "$GITHUB_BASE_REF" --depth=350
125 COMPARE_REV="$(git merge-base "origin/${GITHUB_BASE_REF}" HEAD)"
126 fi
127 CHANGED_FILES="$(git diff --name-only "$COMPARE_REV" ${{ github.sha }})"
128
129 check_pattern() {
130 local output_name="$1"
131 local pattern="$2"
132 local grep_arg="$3"
133
134 echo "$CHANGED_FILES" | grep "$grep_arg" "$pattern" && \
135 echo "${output_name}=true" >> "$GITHUB_OUTPUT" || \
136 echo "${output_name}=false" >> "$GITHUB_OUTPUT"
137 }
138
139 "#});
140
141 let mut outputs = IndexMap::new();
142
143 for rule in rules {
144 assert!(
145 rule.set_by_step
146 .borrow_mut()
147 .replace(name.clone())
148 .is_none()
149 );
150 assert!(
151 outputs
152 .insert(
153 rule.name.to_owned(),
154 format!("${{{{ steps.{}.outputs.{} }}}}", step_name, rule.name)
155 )
156 .is_none()
157 );
158
159 let grep_arg = if rule.invert { "-qvP" } else { "-qP" };
160 script.push_str(&format!(
161 "check_pattern \"{}\" '{}' {}\n",
162 rule.name, rule.pattern, grep_arg
163 ));
164 }
165
166 let job = Job::default()
167 .runs_on(runners::LINUX_SMALL)
168 .with_repository_owner_guard()
169 .outputs(outputs)
170 .add_step(steps::checkout_repo().add_with((
171 "fetch-depth",
172 "${{ github.ref == 'refs/heads/main' && 2 || 350 }}",
173 )))
174 .add_step(
175 Step::new(step_name.clone())
176 .run(script)
177 .id(step_name)
178 .shell(BASH_SHELL),
179 );
180
181 NamedJob { name, job }
182}
183
184pub fn tests_pass(jobs: &[NamedJob]) -> NamedJob {
185 let mut script = String::from(indoc::indoc! {r#"
186 set +x
187 EXIT_CODE=0
188
189 check_result() {
190 echo "* $1: $2"
191 if [[ "$2" != "skipped" && "$2" != "success" ]]; then EXIT_CODE=1; fi
192 }
193
194 "#});
195
196 script.push_str(
197 &jobs
198 .iter()
199 .map(|job| {
200 format!(
201 "check_result \"{}\" \"${{{{ needs.{}.result }}}}\"",
202 job.name, job.name
203 )
204 })
205 .collect::<Vec<_>>()
206 .join("\n"),
207 );
208
209 script.push_str("\n\nexit $EXIT_CODE\n");
210
211 let job = Job::default()
212 .runs_on(runners::LINUX_SMALL)
213 .needs(
214 jobs.iter()
215 .map(|j| j.name.to_string())
216 .collect::<Vec<String>>(),
217 )
218 .cond(repository_owner_guard_expression(true))
219 .add_step(named::bash(&script));
220
221 named::job(job)
222}
223
224fn check_style() -> NamedJob {
225 fn check_for_typos() -> Step<Use> {
226 named::uses(
227 "crate-ci",
228 "typos",
229 "80c8a4945eec0f6d464eaf9e65ed98ef085283d1",
230 ) // v1.38.1
231 .with(("config", "./typos.toml"))
232 }
233 named::job(
234 release_job(&[])
235 .runs_on(runners::LINUX_MEDIUM)
236 .add_step(steps::checkout_repo())
237 .add_step(steps::cache_rust_dependencies_namespace())
238 .add_step(steps::setup_pnpm())
239 .add_step(steps::script("./script/prettier"))
240 .add_step(steps::script("./script/check-todos"))
241 .add_step(steps::script("./script/check-keymaps"))
242 .add_step(check_for_typos())
243 .add_step(steps::cargo_fmt()),
244 )
245}
246
247fn check_dependencies() -> NamedJob {
248 fn install_cargo_machete() -> Step<Use> {
249 named::uses(
250 "clechasseur",
251 "rs-cargo",
252 "8435b10f6e71c2e3d4d3b7573003a8ce4bfc6386", // v2
253 )
254 .add_with(("command", "install"))
255 .add_with(("args", "cargo-machete@0.7.0"))
256 }
257
258 fn run_cargo_machete() -> Step<Use> {
259 named::uses(
260 "clechasseur",
261 "rs-cargo",
262 "8435b10f6e71c2e3d4d3b7573003a8ce4bfc6386", // v2
263 )
264 .add_with(("command", "machete"))
265 }
266
267 fn check_cargo_lock() -> Step<Run> {
268 named::bash("cargo update --locked --workspace")
269 }
270
271 fn check_vulnerable_dependencies() -> Step<Use> {
272 named::uses(
273 "actions",
274 "dependency-review-action",
275 "67d4f4bd7a9b17a0db54d2a7519187c65e339de8", // v4
276 )
277 .if_condition(Expression::new("github.event_name == 'pull_request'"))
278 .with(("license-check", false))
279 }
280
281 named::job(
282 release_job(&[])
283 .runs_on(runners::LINUX_SMALL)
284 .add_step(steps::checkout_repo())
285 .add_step(steps::cache_rust_dependencies_namespace())
286 .add_step(install_cargo_machete())
287 .add_step(run_cargo_machete())
288 .add_step(check_cargo_lock())
289 .add_step(check_vulnerable_dependencies()),
290 )
291}
292
293fn check_workspace_binaries() -> NamedJob {
294 named::job(
295 release_job(&[])
296 .runs_on(runners::LINUX_LARGE)
297 .add_step(steps::checkout_repo())
298 .add_step(steps::setup_cargo_config(Platform::Linux))
299 .add_step(steps::cache_rust_dependencies_namespace())
300 .map(steps::install_linux_dependencies)
301 .add_step(steps::script("cargo build -p collab"))
302 .add_step(steps::script("cargo build --workspace --bins --examples"))
303 .add_step(steps::cleanup_cargo_config(Platform::Linux)),
304 )
305}
306
307pub(crate) fn run_platform_tests(platform: Platform) -> NamedJob {
308 let runner = match platform {
309 Platform::Windows => runners::WINDOWS_DEFAULT,
310 Platform::Linux => runners::LINUX_DEFAULT,
311 Platform::Mac => runners::MAC_DEFAULT,
312 };
313 NamedJob {
314 name: format!("run_tests_{platform}"),
315 job: release_job(&[])
316 .runs_on(runner)
317 .add_step(steps::checkout_repo())
318 .add_step(steps::setup_cargo_config(platform))
319 .when(platform == Platform::Linux, |this| {
320 this.add_step(steps::cache_rust_dependencies_namespace())
321 })
322 .when(
323 platform == Platform::Linux,
324 steps::install_linux_dependencies,
325 )
326 .add_step(steps::setup_node())
327 .add_step(steps::clippy(platform))
328 .when(platform == Platform::Linux, |job| {
329 job.add_step(steps::cargo_install_nextest())
330 })
331 .add_step(steps::clear_target_dir_if_large(platform))
332 .add_step(steps::cargo_nextest(platform))
333 .add_step(steps::cleanup_cargo_config(platform)),
334 }
335}
336
337pub(crate) fn check_postgres_and_protobuf_migrations() -> NamedJob {
338 fn remove_untracked_files() -> Step<Run> {
339 named::bash("git clean -df")
340 }
341
342 fn ensure_fresh_merge() -> Step<Run> {
343 named::bash(indoc::indoc! {r#"
344 if [ -z "$GITHUB_BASE_REF" ];
345 then
346 echo "BUF_BASE_BRANCH=$(git merge-base origin/main HEAD)" >> "$GITHUB_ENV"
347 else
348 git checkout -B temp
349 git merge -q "origin/$GITHUB_BASE_REF" -m "merge main into temp"
350 echo "BUF_BASE_BRANCH=$GITHUB_BASE_REF" >> "$GITHUB_ENV"
351 fi
352 "#})
353 }
354
355 fn bufbuild_setup_action() -> Step<Use> {
356 named::uses("bufbuild", "buf-setup-action", "v1").add_with(("version", "v1.29.0"))
357 }
358
359 fn bufbuild_breaking_action() -> Step<Use> {
360 named::uses("bufbuild", "buf-breaking-action", "v1").add_with(("input", "crates/proto/proto/"))
361 .add_with(("against", "https://github.com/${GITHUB_REPOSITORY}.git#branch=${BUF_BASE_BRANCH},subdir=crates/proto/proto/"))
362 }
363
364 named::job(
365 release_job(&[])
366 .runs_on(runners::LINUX_DEFAULT)
367 .add_env(("GIT_AUTHOR_NAME", "Protobuf Action"))
368 .add_env(("GIT_AUTHOR_EMAIL", "ci@zed.dev"))
369 .add_step(steps::checkout_repo().with(("fetch-depth", 0))) // fetch full history
370 .add_step(remove_untracked_files())
371 .add_step(ensure_fresh_merge())
372 .add_step(bufbuild_setup_action())
373 .add_step(bufbuild_breaking_action()),
374 )
375}
376
377fn doctests() -> NamedJob {
378 fn run_doctests() -> Step<Run> {
379 named::bash(indoc::indoc! {r#"
380 cargo test --workspace --doc --no-fail-fast
381 "#})
382 .id("run_doctests")
383 }
384
385 named::job(
386 release_job(&[])
387 .runs_on(runners::LINUX_DEFAULT)
388 .add_step(steps::checkout_repo())
389 .add_step(steps::cache_rust_dependencies_namespace())
390 .map(steps::install_linux_dependencies)
391 .add_step(steps::setup_cargo_config(Platform::Linux))
392 .add_step(run_doctests())
393 .add_step(steps::cleanup_cargo_config(Platform::Linux)),
394 )
395}
396
397fn check_licenses() -> NamedJob {
398 named::job(
399 Job::default()
400 .runs_on(runners::LINUX_SMALL)
401 .add_step(steps::checkout_repo())
402 .add_step(steps::cache_rust_dependencies_namespace())
403 .add_step(steps::script("./script/check-licenses"))
404 .add_step(steps::script("./script/generate-licenses")),
405 )
406}
407
408fn check_docs() -> NamedJob {
409 fn lychee_link_check(dir: &str) -> Step<Use> {
410 named::uses(
411 "lycheeverse",
412 "lychee-action",
413 "82202e5e9c2f4ef1a55a3d02563e1cb6041e5332",
414 ) // v2.4.1
415 .add_with(("args", format!("--no-progress --exclude '^http' '{dir}'")))
416 .add_with(("fail", true))
417 .add_with(("jobSummary", false))
418 }
419
420 fn install_mdbook() -> Step<Use> {
421 named::uses(
422 "peaceiris",
423 "actions-mdbook",
424 "ee69d230fe19748b7abf22df32acaa93833fad08", // v2
425 )
426 .with(("mdbook-version", "0.4.37"))
427 }
428
429 fn build_docs() -> Step<Run> {
430 named::bash(indoc::indoc! {r#"
431 mkdir -p target/deploy
432 mdbook build ./docs --dest-dir=../target/deploy/docs/
433 "#})
434 }
435
436 named::job(
437 release_job(&[])
438 .runs_on(runners::LINUX_LARGE)
439 .add_step(steps::checkout_repo())
440 .add_step(steps::setup_cargo_config(Platform::Linux))
441 // todo(ci): un-inline build_docs/action.yml here
442 .add_step(steps::cache_rust_dependencies_namespace())
443 .add_step(
444 lychee_link_check("./docs/src/**/*"), // check markdown links
445 )
446 .map(steps::install_linux_dependencies)
447 .add_step(install_mdbook())
448 .add_step(build_docs())
449 .add_step(
450 lychee_link_check("target/deploy/docs"), // check links in generated html
451 ),
452 )
453}
454
455pub(crate) fn check_scripts() -> NamedJob {
456 fn download_actionlint() -> Step<Run> {
457 named::bash(
458 "bash <(curl https://raw.githubusercontent.com/rhysd/actionlint/main/scripts/download-actionlint.bash)",
459 )
460 }
461
462 fn run_actionlint() -> Step<Run> {
463 named::bash(indoc::indoc! {r#"
464 ${{ steps.get_actionlint.outputs.executable }} -color
465 "#})
466 }
467
468 fn run_shellcheck() -> Step<Run> {
469 named::bash("./script/shellcheck-scripts error")
470 }
471
472 fn check_xtask_workflows() -> Step<Run> {
473 named::bash(indoc::indoc! {r#"
474 cargo xtask workflows
475 if ! git diff --exit-code .github; then
476 echo "Error: .github directory has uncommitted changes after running 'cargo xtask workflows'"
477 echo "Please run 'cargo xtask workflows' locally and commit the changes"
478 exit 1
479 fi
480 "#})
481 }
482
483 named::job(
484 release_job(&[])
485 .runs_on(runners::LINUX_SMALL)
486 .add_step(steps::checkout_repo())
487 .add_step(run_shellcheck())
488 .add_step(download_actionlint().id("get_actionlint"))
489 .add_step(run_actionlint())
490 .add_step(check_xtask_workflows()),
491 )
492}