1use gh_workflow::{
2 Concurrency, Container, Event, Expression, Job, Port, PullRequest, Push, Run, Step, Use,
3 Workflow,
4};
5use indexmap::IndexMap;
6
7use crate::tasks::workflows::{
8 steps::{CommonJobConditions, repository_owner_guard_expression},
9 vars::{self, PathCondition},
10};
11
12use super::{
13 runners::{self, Platform},
14 steps::{self, FluentBuilder, NamedJob, named, release_job},
15};
16
17pub(crate) fn run_tests() -> Workflow {
18 // Specify anything which should potentially skip full test suite in this regex:
19 // - docs/
20 // - script/update_top_ranking_issues/
21 // - .github/ISSUE_TEMPLATE/
22 // - .github/workflows/ (except .github/workflows/ci.yml)
23 let should_run_tests = PathCondition::inverted(
24 "run_tests",
25 r"^(docs/|script/update_top_ranking_issues/|\.github/(ISSUE_TEMPLATE|workflows/(?!run_tests)))",
26 );
27 let should_check_docs = PathCondition::new("run_docs", r"^(docs/|crates/.*\.rs)");
28 let should_check_scripts = PathCondition::new(
29 "run_action_checks",
30 r"^\.github/(workflows/|actions/|actionlint.yml)|tooling/xtask|script/",
31 );
32 let should_check_licences =
33 PathCondition::new("run_licenses", r"^(Cargo.lock|script/.*licenses)");
34
35 let orchestrate = orchestrate(&[
36 &should_check_scripts,
37 &should_check_docs,
38 &should_check_licences,
39 &should_run_tests,
40 ]);
41
42 let mut jobs = vec![
43 orchestrate,
44 check_style(),
45 should_run_tests.guard(clippy(Platform::Windows)),
46 should_run_tests.guard(clippy(Platform::Linux)),
47 should_run_tests.guard(clippy(Platform::Mac)),
48 should_run_tests.guard(run_platform_tests(Platform::Windows)),
49 should_run_tests.guard(run_platform_tests(Platform::Linux)),
50 should_run_tests.guard(run_platform_tests(Platform::Mac)),
51 should_run_tests.guard(doctests()),
52 should_run_tests.guard(check_workspace_binaries()),
53 should_run_tests.guard(check_wasm()),
54 should_run_tests.guard(check_dependencies()), // could be more specific here?
55 should_check_docs.guard(check_docs()),
56 should_check_licences.guard(check_licenses()),
57 should_check_scripts.guard(check_scripts()),
58 ];
59 let tests_pass = tests_pass(&jobs);
60
61 jobs.push(should_run_tests.guard(check_postgres_and_protobuf_migrations())); // could be more specific here?
62
63 named::workflow()
64 .add_event(
65 Event::default()
66 .push(
67 Push::default()
68 .add_branch("main")
69 .add_branch("v[0-9]+.[0-9]+.x"),
70 )
71 .pull_request(PullRequest::default().add_branch("**")),
72 )
73 .concurrency(
74 Concurrency::default()
75 .group(concat!(
76 "${{ github.workflow }}-${{ github.ref_name }}-",
77 "${{ github.ref_name == 'main' && github.sha || 'anysha' }}"
78 ))
79 .cancel_in_progress(true),
80 )
81 .add_env(("CARGO_TERM_COLOR", "always"))
82 .add_env(("RUST_BACKTRACE", 1))
83 .add_env(("CARGO_INCREMENTAL", 0))
84 .map(|mut workflow| {
85 for job in jobs {
86 workflow = workflow.add_job(job.name, job.job)
87 }
88 workflow
89 })
90 .add_job(tests_pass.name, tests_pass.job)
91}
92
93// Generates a bash script that checks changed files against regex patterns
94// and sets GitHub output variables accordingly
95pub fn orchestrate(rules: &[&PathCondition]) -> NamedJob {
96 orchestrate_impl(rules, true)
97}
98
99pub fn orchestrate_without_package_filter(rules: &[&PathCondition]) -> NamedJob {
100 orchestrate_impl(rules, false)
101}
102
103fn orchestrate_impl(rules: &[&PathCondition], include_package_filter: bool) -> NamedJob {
104 let name = "orchestrate".to_owned();
105 let step_name = "filter".to_owned();
106 let mut script = String::new();
107
108 script.push_str(indoc::indoc! {r#"
109 set -euo pipefail
110 if [ -z "$GITHUB_BASE_REF" ]; then
111 echo "Not in a PR context (i.e., push to main/stable/preview)"
112 COMPARE_REV="$(git rev-parse HEAD~1)"
113 else
114 echo "In a PR context comparing to pull_request.base.ref"
115 git fetch origin "$GITHUB_BASE_REF" --depth=350
116 COMPARE_REV="$(git merge-base "origin/${GITHUB_BASE_REF}" HEAD)"
117 fi
118 CHANGED_FILES="$(git diff --name-only "$COMPARE_REV" ${{ github.sha }})"
119
120 check_pattern() {
121 local output_name="$1"
122 local pattern="$2"
123 local grep_arg="$3"
124
125 echo "$CHANGED_FILES" | grep "$grep_arg" "$pattern" && \
126 echo "${output_name}=true" >> "$GITHUB_OUTPUT" || \
127 echo "${output_name}=false" >> "$GITHUB_OUTPUT"
128 }
129
130 "#});
131
132 let mut outputs = IndexMap::new();
133
134 if include_package_filter {
135 script.push_str(indoc::indoc! {r#"
136 # Check for changes that require full rebuild (no filter)
137 # Direct pushes to main/stable/preview always run full suite
138 if [ -z "$GITHUB_BASE_REF" ]; then
139 echo "Not a PR, running full test suite"
140 echo "changed_packages=" >> "$GITHUB_OUTPUT"
141 elif echo "$CHANGED_FILES" | grep -qP '^(rust-toolchain\.toml|\.cargo/|\.github/|Cargo\.(toml|lock)$)'; then
142 echo "Toolchain, cargo config, or root Cargo files changed, will run all tests"
143 echo "changed_packages=" >> "$GITHUB_OUTPUT"
144 else
145 # Extract changed directories from file paths
146 CHANGED_DIRS=$(echo "$CHANGED_FILES" | \
147 grep -oP '^(crates|tooling)/\K[^/]+' | \
148 sort -u || true)
149
150 # Build directory-to-package mapping using cargo metadata
151 DIR_TO_PKG=$(cargo metadata --format-version=1 --no-deps 2>/dev/null | \
152 jq -r '.packages[] | select(.manifest_path | test("crates/|tooling/")) | "\(.manifest_path | capture("(crates|tooling)/(?<dir>[^/]+)") | .dir)=\(.name)"')
153
154 # Map directory names to package names
155 FILE_CHANGED_PKGS=""
156 for dir in $CHANGED_DIRS; do
157 pkg=$(echo "$DIR_TO_PKG" | grep "^${dir}=" | cut -d= -f2 | head -1)
158 if [ -n "$pkg" ]; then
159 FILE_CHANGED_PKGS=$(printf '%s\n%s' "$FILE_CHANGED_PKGS" "$pkg")
160 else
161 # Fall back to directory name if no mapping found
162 FILE_CHANGED_PKGS=$(printf '%s\n%s' "$FILE_CHANGED_PKGS" "$dir")
163 fi
164 done
165 FILE_CHANGED_PKGS=$(echo "$FILE_CHANGED_PKGS" | grep -v '^$' | sort -u || true)
166
167 # If assets/ changed, add crates that depend on those assets
168 if echo "$CHANGED_FILES" | grep -qP '^assets/'; then
169 FILE_CHANGED_PKGS=$(printf '%s\n%s\n%s\n%s' "$FILE_CHANGED_PKGS" "settings" "storybook" "assets" | sort -u)
170 fi
171
172 # Combine all changed packages
173 ALL_CHANGED_PKGS=$(echo "$FILE_CHANGED_PKGS" | grep -v '^$' || true)
174
175 if [ -z "$ALL_CHANGED_PKGS" ]; then
176 echo "No package changes detected, will run all tests"
177 echo "changed_packages=" >> "$GITHUB_OUTPUT"
178 else
179 # Build nextest filterset with rdeps for each package
180 FILTERSET=$(echo "$ALL_CHANGED_PKGS" | \
181 sed 's/.*/rdeps(&)/' | \
182 tr '\n' '|' | \
183 sed 's/|$//')
184 echo "Changed packages filterset: $FILTERSET"
185 echo "changed_packages=$FILTERSET" >> "$GITHUB_OUTPUT"
186 fi
187 fi
188
189 "#});
190
191 outputs.insert(
192 "changed_packages".to_owned(),
193 format!("${{{{ steps.{}.outputs.changed_packages }}}}", step_name),
194 );
195 }
196
197 for rule in rules {
198 assert!(
199 rule.set_by_step
200 .borrow_mut()
201 .replace(name.clone())
202 .is_none()
203 );
204 assert!(
205 outputs
206 .insert(
207 rule.name.to_owned(),
208 format!("${{{{ steps.{}.outputs.{} }}}}", step_name, rule.name)
209 )
210 .is_none()
211 );
212
213 let grep_arg = if rule.invert { "-qvP" } else { "-qP" };
214 script.push_str(&format!(
215 "check_pattern \"{}\" '{}' {}\n",
216 rule.name, rule.pattern, grep_arg
217 ));
218 }
219
220 let job = Job::default()
221 .runs_on(runners::LINUX_SMALL)
222 .with_repository_owner_guard()
223 .outputs(outputs)
224 .add_step(steps::checkout_repo().with_deep_history_on_non_main())
225 .add_step(Step::new(step_name.clone()).run(script).id(step_name));
226
227 NamedJob { name, job }
228}
229
230pub fn tests_pass(jobs: &[NamedJob]) -> NamedJob {
231 let mut script = String::from(indoc::indoc! {r#"
232 set +x
233 EXIT_CODE=0
234
235 check_result() {
236 echo "* $1: $2"
237 if [[ "$2" != "skipped" && "$2" != "success" ]]; then EXIT_CODE=1; fi
238 }
239
240 "#});
241
242 script.push_str(
243 &jobs
244 .iter()
245 .map(|job| {
246 format!(
247 "check_result \"{}\" \"${{{{ needs.{}.result }}}}\"",
248 job.name, job.name
249 )
250 })
251 .collect::<Vec<_>>()
252 .join("\n"),
253 );
254
255 script.push_str("\n\nexit $EXIT_CODE\n");
256
257 let job = Job::default()
258 .runs_on(runners::LINUX_SMALL)
259 .needs(
260 jobs.iter()
261 .map(|j| j.name.to_string())
262 .collect::<Vec<String>>(),
263 )
264 .cond(repository_owner_guard_expression(true))
265 .add_step(named::bash(&script));
266
267 named::job(job)
268}
269
270fn check_style() -> NamedJob {
271 fn check_for_typos() -> Step<Use> {
272 named::uses(
273 "crate-ci",
274 "typos",
275 "2d0ce569feab1f8752f1dde43cc2f2aa53236e06",
276 ) // v1.40.0
277 .with(("config", "./typos.toml"))
278 }
279 named::job(
280 release_job(&[])
281 .runs_on(runners::LINUX_MEDIUM)
282 .add_step(steps::checkout_repo())
283 .add_step(steps::cache_rust_dependencies_namespace())
284 .add_step(steps::setup_pnpm())
285 .add_step(steps::prettier())
286 .add_step(steps::cargo_fmt())
287 .add_step(steps::script("./script/check-todos"))
288 .add_step(steps::script("./script/check-keymaps"))
289 .add_step(check_for_typos()),
290 )
291}
292
293fn check_dependencies() -> NamedJob {
294 fn install_cargo_machete() -> Step<Use> {
295 named::uses(
296 "clechasseur",
297 "rs-cargo",
298 "8435b10f6e71c2e3d4d3b7573003a8ce4bfc6386", // v2
299 )
300 .add_with(("command", "install"))
301 .add_with(("args", "cargo-machete@0.7.0"))
302 }
303
304 fn run_cargo_machete() -> Step<Use> {
305 named::uses(
306 "clechasseur",
307 "rs-cargo",
308 "8435b10f6e71c2e3d4d3b7573003a8ce4bfc6386", // v2
309 )
310 .add_with(("command", "machete"))
311 }
312
313 fn check_cargo_lock() -> Step<Run> {
314 named::bash("cargo update --locked --workspace")
315 }
316
317 fn check_vulnerable_dependencies() -> Step<Use> {
318 named::uses(
319 "actions",
320 "dependency-review-action",
321 "67d4f4bd7a9b17a0db54d2a7519187c65e339de8", // v4
322 )
323 .if_condition(Expression::new("github.event_name == 'pull_request'"))
324 .with(("license-check", false))
325 }
326
327 named::job(
328 release_job(&[])
329 .runs_on(runners::LINUX_SMALL)
330 .add_step(steps::checkout_repo())
331 .add_step(steps::cache_rust_dependencies_namespace())
332 .add_step(install_cargo_machete())
333 .add_step(run_cargo_machete())
334 .add_step(check_cargo_lock())
335 .add_step(check_vulnerable_dependencies()),
336 )
337}
338
339fn check_wasm() -> NamedJob {
340 fn install_nightly_wasm_toolchain() -> Step<Run> {
341 named::bash(
342 "rustup toolchain install nightly --component rust-src --target wasm32-unknown-unknown",
343 )
344 }
345
346 fn cargo_check_wasm() -> Step<Run> {
347 named::bash(concat!(
348 "cargo +nightly -Zbuild-std=std,panic_abort ",
349 "check --target wasm32-unknown-unknown -p gpui_platform",
350 ))
351 .add_env((
352 "CARGO_TARGET_WASM32_UNKNOWN_UNKNOWN_RUSTFLAGS",
353 "-C target-feature=+atomics,+bulk-memory,+mutable-globals",
354 ))
355 }
356
357 named::job(
358 release_job(&[])
359 .runs_on(runners::LINUX_LARGE)
360 .add_step(steps::checkout_repo())
361 .add_step(steps::setup_cargo_config(Platform::Linux))
362 .add_step(steps::cache_rust_dependencies_namespace())
363 .add_step(install_nightly_wasm_toolchain())
364 .add_step(steps::setup_sccache(Platform::Linux))
365 .add_step(cargo_check_wasm())
366 .add_step(steps::show_sccache_stats(Platform::Linux))
367 .add_step(steps::cleanup_cargo_config(Platform::Linux)),
368 )
369}
370
371fn check_workspace_binaries() -> NamedJob {
372 named::job(
373 release_job(&[])
374 .runs_on(runners::LINUX_LARGE)
375 .add_step(steps::checkout_repo())
376 .add_step(steps::setup_cargo_config(Platform::Linux))
377 .add_step(steps::cache_rust_dependencies_namespace())
378 .map(steps::install_linux_dependencies)
379 .add_step(steps::setup_sccache(Platform::Linux))
380 .add_step(steps::script("cargo build -p collab"))
381 .add_step(steps::script("cargo build --workspace --bins --examples"))
382 .add_step(steps::show_sccache_stats(Platform::Linux))
383 .add_step(steps::cleanup_cargo_config(Platform::Linux)),
384 )
385}
386
387pub(crate) fn clippy(platform: Platform) -> NamedJob {
388 let runner = match platform {
389 Platform::Windows => runners::WINDOWS_DEFAULT,
390 Platform::Linux => runners::LINUX_DEFAULT,
391 Platform::Mac => runners::MAC_DEFAULT,
392 };
393 NamedJob {
394 name: format!("clippy_{platform}"),
395 job: release_job(&[])
396 .runs_on(runner)
397 .add_step(steps::checkout_repo())
398 .add_step(steps::setup_cargo_config(platform))
399 .when(
400 platform == Platform::Linux || platform == Platform::Mac,
401 |this| this.add_step(steps::cache_rust_dependencies_namespace()),
402 )
403 .when(
404 platform == Platform::Linux,
405 steps::install_linux_dependencies,
406 )
407 .add_step(steps::setup_sccache(platform))
408 .add_step(steps::clippy(platform))
409 .add_step(steps::show_sccache_stats(platform)),
410 }
411}
412
413pub(crate) fn run_platform_tests(platform: Platform) -> NamedJob {
414 run_platform_tests_impl(platform, true)
415}
416
417pub(crate) fn run_platform_tests_no_filter(platform: Platform) -> NamedJob {
418 run_platform_tests_impl(platform, false)
419}
420
421fn run_platform_tests_impl(platform: Platform, filter_packages: bool) -> NamedJob {
422 let runner = match platform {
423 Platform::Windows => runners::WINDOWS_DEFAULT,
424 Platform::Linux => runners::LINUX_DEFAULT,
425 Platform::Mac => runners::MAC_DEFAULT,
426 };
427 NamedJob {
428 name: format!("run_tests_{platform}"),
429 job: release_job(&[])
430 .runs_on(runner)
431 .when(platform == Platform::Linux, |job| {
432 job.add_service(
433 "postgres",
434 Container::new("postgres:15")
435 .add_env(("POSTGRES_HOST_AUTH_METHOD", "trust"))
436 .ports(vec![Port::Name("5432:5432".into())])
437 .options(
438 "--health-cmd pg_isready \
439 --health-interval 500ms \
440 --health-timeout 5s \
441 --health-retries 10",
442 ),
443 )
444 })
445 .add_step(steps::checkout_repo())
446 .add_step(steps::setup_cargo_config(platform))
447 .when(
448 platform == Platform::Linux || platform == Platform::Mac,
449 |this| this.add_step(steps::cache_rust_dependencies_namespace()),
450 )
451 .when(
452 platform == Platform::Linux,
453 steps::install_linux_dependencies,
454 )
455 .add_step(steps::setup_node())
456 .when(
457 platform == Platform::Linux || platform == Platform::Mac,
458 |job| job.add_step(steps::cargo_install_nextest()),
459 )
460 .add_step(steps::clear_target_dir_if_large(platform))
461 .add_step(steps::setup_sccache(platform))
462 .when(filter_packages, |job| {
463 job.add_step(
464 steps::cargo_nextest(platform).with_changed_packages_filter("orchestrate"),
465 )
466 })
467 .when(!filter_packages, |job| {
468 job.add_step(steps::cargo_nextest(platform))
469 })
470 .add_step(steps::show_sccache_stats(platform))
471 .add_step(steps::cleanup_cargo_config(platform)),
472 }
473}
474
475pub(crate) fn check_postgres_and_protobuf_migrations() -> NamedJob {
476 fn ensure_fresh_merge() -> Step<Run> {
477 named::bash(indoc::indoc! {r#"
478 if [ -z "$GITHUB_BASE_REF" ];
479 then
480 echo "BUF_BASE_BRANCH=$(git merge-base origin/main HEAD)" >> "$GITHUB_ENV"
481 else
482 git checkout -B temp
483 git merge -q "origin/$GITHUB_BASE_REF" -m "merge main into temp"
484 echo "BUF_BASE_BRANCH=$GITHUB_BASE_REF" >> "$GITHUB_ENV"
485 fi
486 "#})
487 }
488
489 fn bufbuild_setup_action() -> Step<Use> {
490 named::uses("bufbuild", "buf-setup-action", "v1")
491 .add_with(("version", "v1.29.0"))
492 .add_with(("github_token", vars::GITHUB_TOKEN))
493 }
494
495 fn bufbuild_breaking_action() -> Step<Use> {
496 named::uses("bufbuild", "buf-breaking-action", "v1").add_with(("input", "crates/proto/proto/"))
497 .add_with(("against", "https://github.com/${GITHUB_REPOSITORY}.git#branch=${BUF_BASE_BRANCH},subdir=crates/proto/proto/"))
498 }
499
500 named::job(
501 release_job(&[])
502 .runs_on(runners::LINUX_DEFAULT)
503 .add_env(("GIT_AUTHOR_NAME", "Protobuf Action"))
504 .add_env(("GIT_AUTHOR_EMAIL", "ci@zed.dev"))
505 .add_env(("GIT_COMMITTER_NAME", "Protobuf Action"))
506 .add_env(("GIT_COMMITTER_EMAIL", "ci@zed.dev"))
507 .add_step(steps::checkout_repo().with_full_history())
508 .add_step(ensure_fresh_merge())
509 .add_step(bufbuild_setup_action())
510 .add_step(bufbuild_breaking_action()),
511 )
512}
513
514fn doctests() -> NamedJob {
515 fn run_doctests() -> Step<Run> {
516 named::bash(indoc::indoc! {r#"
517 cargo test --workspace --doc --no-fail-fast
518 "#})
519 .id("run_doctests")
520 }
521
522 named::job(
523 release_job(&[])
524 .runs_on(runners::LINUX_DEFAULT)
525 .add_step(steps::checkout_repo())
526 .add_step(steps::cache_rust_dependencies_namespace())
527 .map(steps::install_linux_dependencies)
528 .add_step(steps::setup_cargo_config(Platform::Linux))
529 .add_step(steps::setup_sccache(Platform::Linux))
530 .add_step(run_doctests())
531 .add_step(steps::show_sccache_stats(Platform::Linux))
532 .add_step(steps::cleanup_cargo_config(Platform::Linux)),
533 )
534}
535
536fn check_licenses() -> NamedJob {
537 named::job(
538 Job::default()
539 .runs_on(runners::LINUX_SMALL)
540 .add_step(steps::checkout_repo())
541 .add_step(steps::cache_rust_dependencies_namespace())
542 .add_step(steps::script("./script/check-licenses"))
543 .add_step(steps::script("./script/generate-licenses")),
544 )
545}
546
547fn check_docs() -> NamedJob {
548 fn lychee_link_check(dir: &str) -> Step<Use> {
549 named::uses(
550 "lycheeverse",
551 "lychee-action",
552 "82202e5e9c2f4ef1a55a3d02563e1cb6041e5332",
553 ) // v2.4.1
554 .add_with(("args", format!("--no-progress --exclude '^http' '{dir}'")))
555 .add_with(("fail", true))
556 .add_with(("jobSummary", false))
557 }
558
559 fn install_mdbook() -> Step<Use> {
560 named::uses(
561 "peaceiris",
562 "actions-mdbook",
563 "ee69d230fe19748b7abf22df32acaa93833fad08", // v2
564 )
565 .with(("mdbook-version", "0.4.37"))
566 }
567
568 fn build_docs() -> Step<Run> {
569 named::bash(indoc::indoc! {r#"
570 mkdir -p target/deploy
571 mdbook build ./docs --dest-dir=../target/deploy/docs/
572 "#})
573 }
574
575 named::job(
576 release_job(&[])
577 .runs_on(runners::LINUX_LARGE)
578 .add_step(steps::checkout_repo())
579 .add_step(steps::setup_cargo_config(Platform::Linux))
580 // todo(ci): un-inline build_docs/action.yml here
581 .add_step(steps::cache_rust_dependencies_namespace())
582 .add_step(
583 lychee_link_check("./docs/src/**/*"), // check markdown links
584 )
585 .map(steps::install_linux_dependencies)
586 .add_step(steps::script("./script/generate-action-metadata"))
587 .add_step(install_mdbook())
588 .add_step(build_docs())
589 .add_step(
590 lychee_link_check("target/deploy/docs"), // check links in generated html
591 ),
592 )
593}
594
595pub(crate) fn check_scripts() -> NamedJob {
596 fn download_actionlint() -> Step<Run> {
597 named::bash(
598 "bash <(curl https://raw.githubusercontent.com/rhysd/actionlint/main/scripts/download-actionlint.bash)",
599 )
600 }
601
602 fn run_actionlint() -> Step<Run> {
603 named::bash(indoc::indoc! {r#"
604 ${{ steps.get_actionlint.outputs.executable }} -color
605 "#})
606 }
607
608 fn run_shellcheck() -> Step<Run> {
609 named::bash("./script/shellcheck-scripts error")
610 }
611
612 fn check_xtask_workflows() -> Step<Run> {
613 named::bash(indoc::indoc! {r#"
614 cargo xtask workflows
615 if ! git diff --exit-code .github; then
616 echo "Error: .github directory has uncommitted changes after running 'cargo xtask workflows'"
617 echo "Please run 'cargo xtask workflows' locally and commit the changes"
618 exit 1
619 fi
620 "#})
621 }
622
623 named::job(
624 release_job(&[])
625 .runs_on(runners::LINUX_SMALL)
626 .add_step(steps::checkout_repo())
627 .add_step(run_shellcheck())
628 .add_step(download_actionlint().id("get_actionlint"))
629 .add_step(run_actionlint())
630 .add_step(check_xtask_workflows()),
631 )
632}