1use gh_workflow::{
2 Concurrency, Container, Event, Expression, Job, Port, PullRequest, Push, Run, Step, Use,
3 Workflow,
4};
5use indexmap::IndexMap;
6
7use crate::tasks::workflows::{
8 nix_build::build_nix,
9 runners::Arch,
10 steps::{CommonJobConditions, repository_owner_guard_expression},
11 vars::{self, PathCondition},
12};
13
14use super::{
15 runners::{self, Platform},
16 steps::{self, FluentBuilder, NamedJob, named, release_job},
17};
18
19pub(crate) fn run_tests() -> Workflow {
20 // Specify anything which should potentially skip full test suite in this regex:
21 // - docs/
22 // - script/update_top_ranking_issues/
23 // - .github/ISSUE_TEMPLATE/
24 // - .github/workflows/ (except .github/workflows/ci.yml)
25 let should_run_tests = PathCondition::inverted(
26 "run_tests",
27 r"^(docs/|script/update_top_ranking_issues/|\.github/(ISSUE_TEMPLATE|workflows/(?!run_tests)))",
28 );
29 let should_check_docs = PathCondition::new("run_docs", r"^(docs/|crates/.*\.rs)");
30 let should_check_scripts = PathCondition::new(
31 "run_action_checks",
32 r"^\.github/(workflows/|actions/|actionlint.yml)|tooling/xtask|script/",
33 );
34 let should_check_licences =
35 PathCondition::new("run_licenses", r"^(Cargo.lock|script/.*licenses)");
36 let should_build_nix = PathCondition::new(
37 "run_nix",
38 r"^(nix/|flake\.|Cargo\.|rust-toolchain.toml|\.cargo/config.toml)",
39 );
40
41 let orchestrate = orchestrate(&[
42 &should_check_scripts,
43 &should_check_docs,
44 &should_check_licences,
45 &should_build_nix,
46 &should_run_tests,
47 ]);
48
49 let mut jobs = vec![
50 orchestrate,
51 check_style(),
52 should_run_tests.guard(clippy(Platform::Windows)),
53 should_run_tests.guard(clippy(Platform::Linux)),
54 should_run_tests.guard(clippy(Platform::Mac)),
55 should_run_tests.guard(run_platform_tests(Platform::Windows)),
56 should_run_tests.guard(run_platform_tests(Platform::Linux)),
57 should_run_tests.guard(run_platform_tests(Platform::Mac)),
58 should_run_tests.guard(doctests()),
59 should_run_tests.guard(check_workspace_binaries()),
60 should_run_tests.guard(check_dependencies()), // could be more specific here?
61 should_check_docs.guard(check_docs()),
62 should_check_licences.guard(check_licenses()),
63 should_check_scripts.guard(check_scripts()),
64 should_build_nix.guard(build_nix(
65 Platform::Linux,
66 Arch::X86_64,
67 "debug",
68 // *don't* cache the built output
69 Some("-zed-editor-[0-9.]*-nightly"),
70 &[],
71 )),
72 should_build_nix.guard(build_nix(
73 Platform::Mac,
74 Arch::AARCH64,
75 "debug",
76 // *don't* cache the built output
77 Some("-zed-editor-[0-9.]*-nightly"),
78 &[],
79 )),
80 ];
81 let tests_pass = tests_pass(&jobs);
82
83 jobs.push(should_run_tests.guard(check_postgres_and_protobuf_migrations())); // could be more specific here?
84
85 named::workflow()
86 .add_event(
87 Event::default()
88 .push(
89 Push::default()
90 .add_branch("main")
91 .add_branch("v[0-9]+.[0-9]+.x"),
92 )
93 .pull_request(PullRequest::default().add_branch("**")),
94 )
95 .concurrency(
96 Concurrency::default()
97 .group(concat!(
98 "${{ github.workflow }}-${{ github.ref_name }}-",
99 "${{ github.ref_name == 'main' && github.sha || 'anysha' }}"
100 ))
101 .cancel_in_progress(true),
102 )
103 .add_env(("CARGO_TERM_COLOR", "always"))
104 .add_env(("RUST_BACKTRACE", 1))
105 .add_env(("CARGO_INCREMENTAL", 0))
106 .map(|mut workflow| {
107 for job in jobs {
108 workflow = workflow.add_job(job.name, job.job)
109 }
110 workflow
111 })
112 .add_job(tests_pass.name, tests_pass.job)
113}
114
115// Generates a bash script that checks changed files against regex patterns
116// and sets GitHub output variables accordingly
117pub fn orchestrate(rules: &[&PathCondition]) -> NamedJob {
118 orchestrate_impl(rules, true)
119}
120
121pub fn orchestrate_without_package_filter(rules: &[&PathCondition]) -> NamedJob {
122 orchestrate_impl(rules, false)
123}
124
125fn orchestrate_impl(rules: &[&PathCondition], include_package_filter: bool) -> NamedJob {
126 let name = "orchestrate".to_owned();
127 let step_name = "filter".to_owned();
128 let mut script = String::new();
129
130 script.push_str(indoc::indoc! {r#"
131 if [ -z "$GITHUB_BASE_REF" ]; then
132 echo "Not in a PR context (i.e., push to main/stable/preview)"
133 COMPARE_REV="$(git rev-parse HEAD~1)"
134 else
135 echo "In a PR context comparing to pull_request.base.ref"
136 git fetch origin "$GITHUB_BASE_REF" --depth=350
137 COMPARE_REV="$(git merge-base "origin/${GITHUB_BASE_REF}" HEAD)"
138 fi
139 CHANGED_FILES="$(git diff --name-only "$COMPARE_REV" ${{ github.sha }})"
140
141 check_pattern() {
142 local output_name="$1"
143 local pattern="$2"
144 local grep_arg="$3"
145
146 echo "$CHANGED_FILES" | grep "$grep_arg" "$pattern" && \
147 echo "${output_name}=true" >> "$GITHUB_OUTPUT" || \
148 echo "${output_name}=false" >> "$GITHUB_OUTPUT"
149 }
150
151 "#});
152
153 let mut outputs = IndexMap::new();
154
155 if include_package_filter {
156 script.push_str(indoc::indoc! {r#"
157 # Check for changes that require full rebuild (no filter)
158 # Direct pushes to main/stable/preview always run full suite
159 if [ -z "$GITHUB_BASE_REF" ]; then
160 echo "Not a PR, running full test suite"
161 echo "changed_packages=" >> "$GITHUB_OUTPUT"
162 elif echo "$CHANGED_FILES" | grep -qP '^(rust-toolchain\.toml|\.cargo/|\.github/|Cargo\.(toml|lock)$)'; then
163 echo "Toolchain, cargo config, or root Cargo files changed, will run all tests"
164 echo "changed_packages=" >> "$GITHUB_OUTPUT"
165 else
166 # Extract changed directories from file paths
167 CHANGED_DIRS=$(echo "$CHANGED_FILES" | \
168 grep -oP '^(crates|tooling)/\K[^/]+' | \
169 sort -u || true)
170
171 # Build directory-to-package mapping using cargo metadata
172 DIR_TO_PKG=$(cargo metadata --format-version=1 --no-deps 2>/dev/null | \
173 jq -r '.packages[] | select(.manifest_path | test("crates/|tooling/")) | "\(.manifest_path | capture("(crates|tooling)/(?<dir>[^/]+)") | .dir)=\(.name)"')
174
175 # Map directory names to package names
176 FILE_CHANGED_PKGS=""
177 for dir in $CHANGED_DIRS; do
178 pkg=$(echo "$DIR_TO_PKG" | grep "^${dir}=" | cut -d= -f2 | head -1)
179 if [ -n "$pkg" ]; then
180 FILE_CHANGED_PKGS=$(printf '%s\n%s' "$FILE_CHANGED_PKGS" "$pkg")
181 else
182 # Fall back to directory name if no mapping found
183 FILE_CHANGED_PKGS=$(printf '%s\n%s' "$FILE_CHANGED_PKGS" "$dir")
184 fi
185 done
186 FILE_CHANGED_PKGS=$(echo "$FILE_CHANGED_PKGS" | grep -v '^$' | sort -u || true)
187
188 # If assets/ changed, add crates that depend on those assets
189 if echo "$CHANGED_FILES" | grep -qP '^assets/'; then
190 FILE_CHANGED_PKGS=$(printf '%s\n%s\n%s\n%s' "$FILE_CHANGED_PKGS" "settings" "storybook" "assets" | sort -u)
191 fi
192
193 # Combine all changed packages
194 ALL_CHANGED_PKGS=$(echo "$FILE_CHANGED_PKGS" | grep -v '^$' || true)
195
196 if [ -z "$ALL_CHANGED_PKGS" ]; then
197 echo "No package changes detected, will run all tests"
198 echo "changed_packages=" >> "$GITHUB_OUTPUT"
199 else
200 # Build nextest filterset with rdeps for each package
201 FILTERSET=$(echo "$ALL_CHANGED_PKGS" | \
202 sed 's/.*/rdeps(&)/' | \
203 tr '\n' '|' | \
204 sed 's/|$//')
205 echo "Changed packages filterset: $FILTERSET"
206 echo "changed_packages=$FILTERSET" >> "$GITHUB_OUTPUT"
207 fi
208 fi
209
210 "#});
211
212 outputs.insert(
213 "changed_packages".to_owned(),
214 format!("${{{{ steps.{}.outputs.changed_packages }}}}", step_name),
215 );
216 }
217
218 for rule in rules {
219 assert!(
220 rule.set_by_step
221 .borrow_mut()
222 .replace(name.clone())
223 .is_none()
224 );
225 assert!(
226 outputs
227 .insert(
228 rule.name.to_owned(),
229 format!("${{{{ steps.{}.outputs.{} }}}}", step_name, rule.name)
230 )
231 .is_none()
232 );
233
234 let grep_arg = if rule.invert { "-qvP" } else { "-qP" };
235 script.push_str(&format!(
236 "check_pattern \"{}\" '{}' {}\n",
237 rule.name, rule.pattern, grep_arg
238 ));
239 }
240
241 let job = Job::default()
242 .runs_on(runners::LINUX_SMALL)
243 .with_repository_owner_guard()
244 .outputs(outputs)
245 .add_step(steps::checkout_repo().add_with((
246 "fetch-depth",
247 "${{ github.ref == 'refs/heads/main' && 2 || 350 }}",
248 )))
249 .add_step(Step::new(step_name.clone()).run(script).id(step_name));
250
251 NamedJob { name, job }
252}
253
254pub fn tests_pass(jobs: &[NamedJob]) -> NamedJob {
255 let mut script = String::from(indoc::indoc! {r#"
256 set +x
257 EXIT_CODE=0
258
259 check_result() {
260 echo "* $1: $2"
261 if [[ "$2" != "skipped" && "$2" != "success" ]]; then EXIT_CODE=1; fi
262 }
263
264 "#});
265
266 script.push_str(
267 &jobs
268 .iter()
269 .map(|job| {
270 format!(
271 "check_result \"{}\" \"${{{{ needs.{}.result }}}}\"",
272 job.name, job.name
273 )
274 })
275 .collect::<Vec<_>>()
276 .join("\n"),
277 );
278
279 script.push_str("\n\nexit $EXIT_CODE\n");
280
281 let job = Job::default()
282 .runs_on(runners::LINUX_SMALL)
283 .needs(
284 jobs.iter()
285 .map(|j| j.name.to_string())
286 .collect::<Vec<String>>(),
287 )
288 .cond(repository_owner_guard_expression(true))
289 .add_step(named::bash(&script));
290
291 named::job(job)
292}
293
294fn check_style() -> NamedJob {
295 fn check_for_typos() -> Step<Use> {
296 named::uses(
297 "crate-ci",
298 "typos",
299 "2d0ce569feab1f8752f1dde43cc2f2aa53236e06",
300 ) // v1.40.0
301 .with(("config", "./typos.toml"))
302 }
303 named::job(
304 release_job(&[])
305 .runs_on(runners::LINUX_MEDIUM)
306 .add_step(steps::checkout_repo())
307 .add_step(steps::cache_rust_dependencies_namespace())
308 .add_step(steps::setup_pnpm())
309 .add_step(steps::prettier())
310 .add_step(steps::cargo_fmt())
311 .add_step(steps::script("./script/check-todos"))
312 .add_step(steps::script("./script/check-keymaps"))
313 .add_step(check_for_typos()),
314 )
315}
316
317fn check_dependencies() -> NamedJob {
318 fn install_cargo_machete() -> Step<Use> {
319 named::uses(
320 "clechasseur",
321 "rs-cargo",
322 "8435b10f6e71c2e3d4d3b7573003a8ce4bfc6386", // v2
323 )
324 .add_with(("command", "install"))
325 .add_with(("args", "cargo-machete@0.7.0"))
326 }
327
328 fn run_cargo_machete() -> Step<Use> {
329 named::uses(
330 "clechasseur",
331 "rs-cargo",
332 "8435b10f6e71c2e3d4d3b7573003a8ce4bfc6386", // v2
333 )
334 .add_with(("command", "machete"))
335 }
336
337 fn check_cargo_lock() -> Step<Run> {
338 named::bash("cargo update --locked --workspace")
339 }
340
341 fn check_vulnerable_dependencies() -> Step<Use> {
342 named::uses(
343 "actions",
344 "dependency-review-action",
345 "67d4f4bd7a9b17a0db54d2a7519187c65e339de8", // v4
346 )
347 .if_condition(Expression::new("github.event_name == 'pull_request'"))
348 .with(("license-check", false))
349 }
350
351 named::job(
352 release_job(&[])
353 .runs_on(runners::LINUX_SMALL)
354 .add_step(steps::checkout_repo())
355 .add_step(steps::cache_rust_dependencies_namespace())
356 .add_step(install_cargo_machete())
357 .add_step(run_cargo_machete())
358 .add_step(check_cargo_lock())
359 .add_step(check_vulnerable_dependencies()),
360 )
361}
362
363fn check_workspace_binaries() -> NamedJob {
364 named::job(
365 release_job(&[])
366 .runs_on(runners::LINUX_LARGE)
367 .add_step(steps::checkout_repo())
368 .add_step(steps::setup_cargo_config(Platform::Linux))
369 .add_step(steps::cache_rust_dependencies_namespace())
370 .map(steps::install_linux_dependencies)
371 .add_step(steps::script("cargo build -p collab"))
372 .add_step(steps::script("cargo build --workspace --bins --examples"))
373 .add_step(steps::cleanup_cargo_config(Platform::Linux)),
374 )
375}
376
377pub(crate) fn clippy(platform: Platform) -> NamedJob {
378 let runner = match platform {
379 Platform::Windows => runners::WINDOWS_DEFAULT,
380 Platform::Linux => runners::LINUX_DEFAULT,
381 Platform::Mac => runners::MAC_DEFAULT,
382 };
383 NamedJob {
384 name: format!("clippy_{platform}"),
385 job: release_job(&[])
386 .runs_on(runner)
387 .add_step(steps::checkout_repo())
388 .add_step(steps::setup_cargo_config(platform))
389 .when(
390 platform == Platform::Linux || platform == Platform::Mac,
391 |this| this.add_step(steps::cache_rust_dependencies_namespace()),
392 )
393 .when(
394 platform == Platform::Linux,
395 steps::install_linux_dependencies,
396 )
397 .add_step(steps::clippy(platform)),
398 }
399}
400
401pub(crate) fn run_platform_tests(platform: Platform) -> NamedJob {
402 run_platform_tests_impl(platform, true)
403}
404
405pub(crate) fn run_platform_tests_no_filter(platform: Platform) -> NamedJob {
406 run_platform_tests_impl(platform, false)
407}
408
409fn run_platform_tests_impl(platform: Platform, filter_packages: bool) -> NamedJob {
410 let runner = match platform {
411 Platform::Windows => runners::WINDOWS_DEFAULT,
412 Platform::Linux => runners::LINUX_DEFAULT,
413 Platform::Mac => runners::MAC_DEFAULT,
414 };
415 NamedJob {
416 name: format!("run_tests_{platform}"),
417 job: release_job(&[])
418 .runs_on(runner)
419 .when(platform == Platform::Linux, |job| {
420 job.add_service(
421 "postgres",
422 Container::new("postgres:15")
423 .add_env(("POSTGRES_HOST_AUTH_METHOD", "trust"))
424 .ports(vec![Port::Name("5432:5432".into())])
425 .options(
426 "--health-cmd pg_isready \
427 --health-interval 500ms \
428 --health-timeout 5s \
429 --health-retries 10",
430 ),
431 )
432 })
433 .add_step(steps::checkout_repo())
434 .add_step(steps::setup_cargo_config(platform))
435 .when(
436 platform == Platform::Linux || platform == Platform::Mac,
437 |this| this.add_step(steps::cache_rust_dependencies_namespace()),
438 )
439 .when(
440 platform == Platform::Linux,
441 steps::install_linux_dependencies,
442 )
443 .add_step(steps::setup_node())
444 .when(
445 platform == Platform::Linux || platform == Platform::Mac,
446 |job| job.add_step(steps::cargo_install_nextest()),
447 )
448 .add_step(steps::clear_target_dir_if_large(platform))
449 .when(filter_packages, |job| {
450 job.add_step(
451 steps::cargo_nextest(platform).with_changed_packages_filter("orchestrate"),
452 )
453 })
454 .when(!filter_packages, |job| {
455 job.add_step(steps::cargo_nextest(platform))
456 })
457 .add_step(steps::cleanup_cargo_config(platform)),
458 }
459}
460
461pub(crate) fn check_postgres_and_protobuf_migrations() -> NamedJob {
462 fn remove_untracked_files() -> Step<Run> {
463 named::bash("git clean -df")
464 }
465
466 fn ensure_fresh_merge() -> Step<Run> {
467 named::bash(indoc::indoc! {r#"
468 if [ -z "$GITHUB_BASE_REF" ];
469 then
470 echo "BUF_BASE_BRANCH=$(git merge-base origin/main HEAD)" >> "$GITHUB_ENV"
471 else
472 git checkout -B temp
473 git merge -q "origin/$GITHUB_BASE_REF" -m "merge main into temp"
474 echo "BUF_BASE_BRANCH=$GITHUB_BASE_REF" >> "$GITHUB_ENV"
475 fi
476 "#})
477 }
478
479 fn bufbuild_setup_action() -> Step<Use> {
480 named::uses("bufbuild", "buf-setup-action", "v1")
481 .add_with(("version", "v1.29.0"))
482 .add_with(("github_token", vars::GITHUB_TOKEN))
483 }
484
485 fn bufbuild_breaking_action() -> Step<Use> {
486 named::uses("bufbuild", "buf-breaking-action", "v1").add_with(("input", "crates/proto/proto/"))
487 .add_with(("against", "https://github.com/${GITHUB_REPOSITORY}.git#branch=${BUF_BASE_BRANCH},subdir=crates/proto/proto/"))
488 }
489
490 named::job(
491 release_job(&[])
492 .runs_on(runners::LINUX_DEFAULT)
493 .add_env(("GIT_AUTHOR_NAME", "Protobuf Action"))
494 .add_env(("GIT_AUTHOR_EMAIL", "ci@zed.dev"))
495 .add_env(("GIT_COMMITTER_NAME", "Protobuf Action"))
496 .add_env(("GIT_COMMITTER_EMAIL", "ci@zed.dev"))
497 .add_step(steps::checkout_repo().with(("fetch-depth", 0))) // fetch full history
498 .add_step(remove_untracked_files())
499 .add_step(ensure_fresh_merge())
500 .add_step(bufbuild_setup_action())
501 .add_step(bufbuild_breaking_action()),
502 )
503}
504
505fn doctests() -> NamedJob {
506 fn run_doctests() -> Step<Run> {
507 named::bash(indoc::indoc! {r#"
508 cargo test --workspace --doc --no-fail-fast
509 "#})
510 .id("run_doctests")
511 }
512
513 named::job(
514 release_job(&[])
515 .runs_on(runners::LINUX_DEFAULT)
516 .add_step(steps::checkout_repo())
517 .add_step(steps::cache_rust_dependencies_namespace())
518 .map(steps::install_linux_dependencies)
519 .add_step(steps::setup_cargo_config(Platform::Linux))
520 .add_step(run_doctests())
521 .add_step(steps::cleanup_cargo_config(Platform::Linux)),
522 )
523}
524
525fn check_licenses() -> NamedJob {
526 named::job(
527 Job::default()
528 .runs_on(runners::LINUX_SMALL)
529 .add_step(steps::checkout_repo())
530 .add_step(steps::cache_rust_dependencies_namespace())
531 .add_step(steps::script("./script/check-licenses"))
532 .add_step(steps::script("./script/generate-licenses")),
533 )
534}
535
536fn check_docs() -> NamedJob {
537 fn lychee_link_check(dir: &str) -> Step<Use> {
538 named::uses(
539 "lycheeverse",
540 "lychee-action",
541 "82202e5e9c2f4ef1a55a3d02563e1cb6041e5332",
542 ) // v2.4.1
543 .add_with(("args", format!("--no-progress --exclude '^http' '{dir}'")))
544 .add_with(("fail", true))
545 .add_with(("jobSummary", false))
546 }
547
548 fn install_mdbook() -> Step<Use> {
549 named::uses(
550 "peaceiris",
551 "actions-mdbook",
552 "ee69d230fe19748b7abf22df32acaa93833fad08", // v2
553 )
554 .with(("mdbook-version", "0.4.37"))
555 }
556
557 fn build_docs() -> Step<Run> {
558 named::bash(indoc::indoc! {r#"
559 mkdir -p target/deploy
560 mdbook build ./docs --dest-dir=../target/deploy/docs/
561 "#})
562 }
563
564 named::job(
565 release_job(&[])
566 .runs_on(runners::LINUX_LARGE)
567 .add_step(steps::checkout_repo())
568 .add_step(steps::setup_cargo_config(Platform::Linux))
569 // todo(ci): un-inline build_docs/action.yml here
570 .add_step(steps::cache_rust_dependencies_namespace())
571 .add_step(
572 lychee_link_check("./docs/src/**/*"), // check markdown links
573 )
574 .map(steps::install_linux_dependencies)
575 .add_step(steps::script("./script/generate-action-metadata"))
576 .add_step(install_mdbook())
577 .add_step(build_docs())
578 .add_step(
579 lychee_link_check("target/deploy/docs"), // check links in generated html
580 ),
581 )
582}
583
584pub(crate) fn check_scripts() -> NamedJob {
585 fn download_actionlint() -> Step<Run> {
586 named::bash(
587 "bash <(curl https://raw.githubusercontent.com/rhysd/actionlint/main/scripts/download-actionlint.bash)",
588 )
589 }
590
591 fn run_actionlint() -> Step<Run> {
592 named::bash(indoc::indoc! {r#"
593 ${{ steps.get_actionlint.outputs.executable }} -color
594 "#})
595 }
596
597 fn run_shellcheck() -> Step<Run> {
598 named::bash("./script/shellcheck-scripts error")
599 }
600
601 fn check_xtask_workflows() -> Step<Run> {
602 named::bash(indoc::indoc! {r#"
603 cargo xtask workflows
604 if ! git diff --exit-code .github; then
605 echo "Error: .github directory has uncommitted changes after running 'cargo xtask workflows'"
606 echo "Please run 'cargo xtask workflows' locally and commit the changes"
607 exit 1
608 fi
609 "#})
610 }
611
612 named::job(
613 release_job(&[])
614 .runs_on(runners::LINUX_SMALL)
615 .add_step(steps::checkout_repo())
616 .add_step(run_shellcheck())
617 .add_step(download_actionlint().id("get_actionlint"))
618 .add_step(run_actionlint())
619 .add_step(check_xtask_workflows()),
620 )
621}