1use gh_workflow::{
2 Concurrency, Container, Event, Expression, Job, Port, PullRequest, Push, Run, Step, Use,
3 Workflow,
4};
5use indexmap::IndexMap;
6
7use crate::tasks::workflows::{
8 steps::{CommonJobConditions, repository_owner_guard_expression},
9 vars::{self, PathCondition},
10};
11
12use super::{
13 runners::{self, Platform},
14 steps::{self, FluentBuilder, NamedJob, named, release_job},
15};
16
17pub(crate) fn run_tests() -> Workflow {
18 // Specify anything which should potentially skip full test suite in this regex:
19 // - docs/
20 // - script/update_top_ranking_issues/
21 // - .github/ISSUE_TEMPLATE/
22 // - .github/workflows/ (except .github/workflows/ci.yml)
23 let should_run_tests = PathCondition::inverted(
24 "run_tests",
25 r"^(docs/|script/update_top_ranking_issues/|\.github/(ISSUE_TEMPLATE|workflows/(?!run_tests)))",
26 );
27 let should_check_docs = PathCondition::new("run_docs", r"^(docs/|crates/.*\.rs)");
28 let should_check_scripts = PathCondition::new(
29 "run_action_checks",
30 r"^\.github/(workflows/|actions/|actionlint.yml)|tooling/xtask|script/",
31 );
32 let should_check_licences =
33 PathCondition::new("run_licenses", r"^(Cargo.lock|script/.*licenses)");
34
35 let orchestrate = orchestrate(&[
36 &should_check_scripts,
37 &should_check_docs,
38 &should_check_licences,
39 &should_run_tests,
40 ]);
41
42 let mut jobs = vec![
43 orchestrate,
44 check_style(),
45 should_run_tests.guard(clippy(Platform::Windows)),
46 should_run_tests.guard(clippy(Platform::Linux)),
47 should_run_tests.guard(clippy(Platform::Mac)),
48 should_run_tests.guard(run_platform_tests(Platform::Windows)),
49 should_run_tests.guard(run_platform_tests(Platform::Linux)),
50 should_run_tests.guard(run_platform_tests(Platform::Mac)),
51 should_run_tests.guard(doctests()),
52 should_run_tests.guard(check_workspace_binaries()),
53 should_run_tests.guard(check_dependencies()), // could be more specific here?
54 should_check_docs.guard(check_docs()),
55 should_check_licences.guard(check_licenses()),
56 should_check_scripts.guard(check_scripts()),
57 ];
58 let tests_pass = tests_pass(&jobs);
59
60 jobs.push(should_run_tests.guard(check_postgres_and_protobuf_migrations())); // could be more specific here?
61
62 named::workflow()
63 .add_event(
64 Event::default()
65 .push(
66 Push::default()
67 .add_branch("main")
68 .add_branch("v[0-9]+.[0-9]+.x"),
69 )
70 .pull_request(PullRequest::default().add_branch("**")),
71 )
72 .concurrency(
73 Concurrency::default()
74 .group(concat!(
75 "${{ github.workflow }}-${{ github.ref_name }}-",
76 "${{ github.ref_name == 'main' && github.sha || 'anysha' }}"
77 ))
78 .cancel_in_progress(true),
79 )
80 .add_env(("CARGO_TERM_COLOR", "always"))
81 .add_env(("RUST_BACKTRACE", 1))
82 .add_env(("CARGO_INCREMENTAL", 0))
83 .map(|mut workflow| {
84 for job in jobs {
85 workflow = workflow.add_job(job.name, job.job)
86 }
87 workflow
88 })
89 .add_job(tests_pass.name, tests_pass.job)
90}
91
92// Generates a bash script that checks changed files against regex patterns
93// and sets GitHub output variables accordingly
94pub fn orchestrate(rules: &[&PathCondition]) -> NamedJob {
95 orchestrate_impl(rules, true)
96}
97
98pub fn orchestrate_without_package_filter(rules: &[&PathCondition]) -> NamedJob {
99 orchestrate_impl(rules, false)
100}
101
102fn orchestrate_impl(rules: &[&PathCondition], include_package_filter: bool) -> NamedJob {
103 let name = "orchestrate".to_owned();
104 let step_name = "filter".to_owned();
105 let mut script = String::new();
106
107 script.push_str(indoc::indoc! {r#"
108 if [ -z "$GITHUB_BASE_REF" ]; then
109 echo "Not in a PR context (i.e., push to main/stable/preview)"
110 COMPARE_REV="$(git rev-parse HEAD~1)"
111 else
112 echo "In a PR context comparing to pull_request.base.ref"
113 git fetch origin "$GITHUB_BASE_REF" --depth=350
114 COMPARE_REV="$(git merge-base "origin/${GITHUB_BASE_REF}" HEAD)"
115 fi
116 CHANGED_FILES="$(git diff --name-only "$COMPARE_REV" ${{ github.sha }})"
117
118 check_pattern() {
119 local output_name="$1"
120 local pattern="$2"
121 local grep_arg="$3"
122
123 echo "$CHANGED_FILES" | grep "$grep_arg" "$pattern" && \
124 echo "${output_name}=true" >> "$GITHUB_OUTPUT" || \
125 echo "${output_name}=false" >> "$GITHUB_OUTPUT"
126 }
127
128 "#});
129
130 let mut outputs = IndexMap::new();
131
132 if include_package_filter {
133 script.push_str(indoc::indoc! {r#"
134 # Check for changes that require full rebuild (no filter)
135 # Direct pushes to main/stable/preview always run full suite
136 if [ -z "$GITHUB_BASE_REF" ]; then
137 echo "Not a PR, running full test suite"
138 echo "changed_packages=" >> "$GITHUB_OUTPUT"
139 elif echo "$CHANGED_FILES" | grep -qP '^(rust-toolchain\.toml|\.cargo/|\.github/|Cargo\.(toml|lock)$)'; then
140 echo "Toolchain, cargo config, or root Cargo files changed, will run all tests"
141 echo "changed_packages=" >> "$GITHUB_OUTPUT"
142 else
143 # Extract changed directories from file paths
144 CHANGED_DIRS=$(echo "$CHANGED_FILES" | \
145 grep -oP '^(crates|tooling)/\K[^/]+' | \
146 sort -u || true)
147
148 # Build directory-to-package mapping using cargo metadata
149 DIR_TO_PKG=$(cargo metadata --format-version=1 --no-deps 2>/dev/null | \
150 jq -r '.packages[] | select(.manifest_path | test("crates/|tooling/")) | "\(.manifest_path | capture("(crates|tooling)/(?<dir>[^/]+)") | .dir)=\(.name)"')
151
152 # Map directory names to package names
153 FILE_CHANGED_PKGS=""
154 for dir in $CHANGED_DIRS; do
155 pkg=$(echo "$DIR_TO_PKG" | grep "^${dir}=" | cut -d= -f2 | head -1)
156 if [ -n "$pkg" ]; then
157 FILE_CHANGED_PKGS=$(printf '%s\n%s' "$FILE_CHANGED_PKGS" "$pkg")
158 else
159 # Fall back to directory name if no mapping found
160 FILE_CHANGED_PKGS=$(printf '%s\n%s' "$FILE_CHANGED_PKGS" "$dir")
161 fi
162 done
163 FILE_CHANGED_PKGS=$(echo "$FILE_CHANGED_PKGS" | grep -v '^$' | sort -u || true)
164
165 # If assets/ changed, add crates that depend on those assets
166 if echo "$CHANGED_FILES" | grep -qP '^assets/'; then
167 FILE_CHANGED_PKGS=$(printf '%s\n%s\n%s\n%s' "$FILE_CHANGED_PKGS" "settings" "storybook" "assets" | sort -u)
168 fi
169
170 # Combine all changed packages
171 ALL_CHANGED_PKGS=$(echo "$FILE_CHANGED_PKGS" | grep -v '^$' || true)
172
173 if [ -z "$ALL_CHANGED_PKGS" ]; then
174 echo "No package changes detected, will run all tests"
175 echo "changed_packages=" >> "$GITHUB_OUTPUT"
176 else
177 # Build nextest filterset with rdeps for each package
178 FILTERSET=$(echo "$ALL_CHANGED_PKGS" | \
179 sed 's/.*/rdeps(&)/' | \
180 tr '\n' '|' | \
181 sed 's/|$//')
182 echo "Changed packages filterset: $FILTERSET"
183 echo "changed_packages=$FILTERSET" >> "$GITHUB_OUTPUT"
184 fi
185 fi
186
187 "#});
188
189 outputs.insert(
190 "changed_packages".to_owned(),
191 format!("${{{{ steps.{}.outputs.changed_packages }}}}", step_name),
192 );
193 }
194
195 for rule in rules {
196 assert!(
197 rule.set_by_step
198 .borrow_mut()
199 .replace(name.clone())
200 .is_none()
201 );
202 assert!(
203 outputs
204 .insert(
205 rule.name.to_owned(),
206 format!("${{{{ steps.{}.outputs.{} }}}}", step_name, rule.name)
207 )
208 .is_none()
209 );
210
211 let grep_arg = if rule.invert { "-qvP" } else { "-qP" };
212 script.push_str(&format!(
213 "check_pattern \"{}\" '{}' {}\n",
214 rule.name, rule.pattern, grep_arg
215 ));
216 }
217
218 let job = Job::default()
219 .runs_on(runners::LINUX_SMALL)
220 .with_repository_owner_guard()
221 .outputs(outputs)
222 .add_step(steps::checkout_repo().add_with((
223 "fetch-depth",
224 "${{ github.ref == 'refs/heads/main' && 2 || 350 }}",
225 )))
226 .add_step(Step::new(step_name.clone()).run(script).id(step_name));
227
228 NamedJob { name, job }
229}
230
231pub fn tests_pass(jobs: &[NamedJob]) -> NamedJob {
232 let mut script = String::from(indoc::indoc! {r#"
233 set +x
234 EXIT_CODE=0
235
236 check_result() {
237 echo "* $1: $2"
238 if [[ "$2" != "skipped" && "$2" != "success" ]]; then EXIT_CODE=1; fi
239 }
240
241 "#});
242
243 script.push_str(
244 &jobs
245 .iter()
246 .map(|job| {
247 format!(
248 "check_result \"{}\" \"${{{{ needs.{}.result }}}}\"",
249 job.name, job.name
250 )
251 })
252 .collect::<Vec<_>>()
253 .join("\n"),
254 );
255
256 script.push_str("\n\nexit $EXIT_CODE\n");
257
258 let job = Job::default()
259 .runs_on(runners::LINUX_SMALL)
260 .needs(
261 jobs.iter()
262 .map(|j| j.name.to_string())
263 .collect::<Vec<String>>(),
264 )
265 .cond(repository_owner_guard_expression(true))
266 .add_step(named::bash(&script));
267
268 named::job(job)
269}
270
271fn check_style() -> NamedJob {
272 fn check_for_typos() -> Step<Use> {
273 named::uses(
274 "crate-ci",
275 "typos",
276 "2d0ce569feab1f8752f1dde43cc2f2aa53236e06",
277 ) // v1.40.0
278 .with(("config", "./typos.toml"))
279 }
280 named::job(
281 release_job(&[])
282 .runs_on(runners::LINUX_MEDIUM)
283 .add_step(steps::checkout_repo())
284 .add_step(steps::cache_rust_dependencies_namespace())
285 .add_step(steps::setup_pnpm())
286 .add_step(steps::prettier())
287 .add_step(steps::cargo_fmt())
288 .add_step(steps::script("./script/check-todos"))
289 .add_step(steps::script("./script/check-keymaps"))
290 .add_step(check_for_typos()),
291 )
292}
293
294fn check_dependencies() -> NamedJob {
295 fn install_cargo_machete() -> Step<Use> {
296 named::uses(
297 "clechasseur",
298 "rs-cargo",
299 "8435b10f6e71c2e3d4d3b7573003a8ce4bfc6386", // v2
300 )
301 .add_with(("command", "install"))
302 .add_with(("args", "cargo-machete@0.7.0"))
303 }
304
305 fn run_cargo_machete() -> Step<Use> {
306 named::uses(
307 "clechasseur",
308 "rs-cargo",
309 "8435b10f6e71c2e3d4d3b7573003a8ce4bfc6386", // v2
310 )
311 .add_with(("command", "machete"))
312 }
313
314 fn check_cargo_lock() -> Step<Run> {
315 named::bash("cargo update --locked --workspace")
316 }
317
318 fn check_vulnerable_dependencies() -> Step<Use> {
319 named::uses(
320 "actions",
321 "dependency-review-action",
322 "67d4f4bd7a9b17a0db54d2a7519187c65e339de8", // v4
323 )
324 .if_condition(Expression::new("github.event_name == 'pull_request'"))
325 .with(("license-check", false))
326 }
327
328 named::job(
329 release_job(&[])
330 .runs_on(runners::LINUX_SMALL)
331 .add_step(steps::checkout_repo())
332 .add_step(steps::cache_rust_dependencies_namespace())
333 .add_step(install_cargo_machete())
334 .add_step(run_cargo_machete())
335 .add_step(check_cargo_lock())
336 .add_step(check_vulnerable_dependencies()),
337 )
338}
339
340fn check_workspace_binaries() -> NamedJob {
341 named::job(
342 release_job(&[])
343 .runs_on(runners::LINUX_LARGE)
344 .add_step(steps::checkout_repo())
345 .add_step(steps::setup_cargo_config(Platform::Linux))
346 .add_step(steps::cache_rust_dependencies_namespace())
347 .map(steps::install_linux_dependencies)
348 .add_step(steps::setup_sccache(Platform::Linux))
349 .add_step(steps::script("cargo build -p collab"))
350 .add_step(steps::script("cargo build --workspace --bins --examples"))
351 .add_step(steps::show_sccache_stats(Platform::Linux))
352 .add_step(steps::cleanup_cargo_config(Platform::Linux)),
353 )
354}
355
356pub(crate) fn clippy(platform: Platform) -> NamedJob {
357 let runner = match platform {
358 Platform::Windows => runners::WINDOWS_DEFAULT,
359 Platform::Linux => runners::LINUX_DEFAULT,
360 Platform::Mac => runners::MAC_DEFAULT,
361 };
362 NamedJob {
363 name: format!("clippy_{platform}"),
364 job: release_job(&[])
365 .runs_on(runner)
366 .add_step(steps::checkout_repo())
367 .add_step(steps::setup_cargo_config(platform))
368 .when(
369 platform == Platform::Linux || platform == Platform::Mac,
370 |this| this.add_step(steps::cache_rust_dependencies_namespace()),
371 )
372 .when(
373 platform == Platform::Linux,
374 steps::install_linux_dependencies,
375 )
376 .add_step(steps::setup_sccache(platform))
377 .add_step(steps::clippy(platform))
378 .add_step(steps::show_sccache_stats(platform)),
379 }
380}
381
382pub(crate) fn run_platform_tests(platform: Platform) -> NamedJob {
383 run_platform_tests_impl(platform, true)
384}
385
386pub(crate) fn run_platform_tests_no_filter(platform: Platform) -> NamedJob {
387 run_platform_tests_impl(platform, false)
388}
389
390fn run_platform_tests_impl(platform: Platform, filter_packages: bool) -> NamedJob {
391 let runner = match platform {
392 Platform::Windows => runners::WINDOWS_DEFAULT,
393 Platform::Linux => runners::LINUX_DEFAULT,
394 Platform::Mac => runners::MAC_DEFAULT,
395 };
396 NamedJob {
397 name: format!("run_tests_{platform}"),
398 job: release_job(&[])
399 .runs_on(runner)
400 .when(platform == Platform::Linux, |job| {
401 job.add_service(
402 "postgres",
403 Container::new("postgres:15")
404 .add_env(("POSTGRES_HOST_AUTH_METHOD", "trust"))
405 .ports(vec![Port::Name("5432:5432".into())])
406 .options(
407 "--health-cmd pg_isready \
408 --health-interval 500ms \
409 --health-timeout 5s \
410 --health-retries 10",
411 ),
412 )
413 })
414 .add_step(steps::checkout_repo())
415 .add_step(steps::setup_cargo_config(platform))
416 .when(
417 platform == Platform::Linux || platform == Platform::Mac,
418 |this| this.add_step(steps::cache_rust_dependencies_namespace()),
419 )
420 .when(
421 platform == Platform::Linux,
422 steps::install_linux_dependencies,
423 )
424 .add_step(steps::setup_node())
425 .when(
426 platform == Platform::Linux || platform == Platform::Mac,
427 |job| job.add_step(steps::cargo_install_nextest()),
428 )
429 .add_step(steps::clear_target_dir_if_large(platform))
430 .add_step(steps::setup_sccache(platform))
431 .when(filter_packages, |job| {
432 job.add_step(
433 steps::cargo_nextest(platform).with_changed_packages_filter("orchestrate"),
434 )
435 })
436 .when(!filter_packages, |job| {
437 job.add_step(steps::cargo_nextest(platform))
438 })
439 .add_step(steps::show_sccache_stats(platform))
440 .add_step(steps::cleanup_cargo_config(platform)),
441 }
442}
443
444pub(crate) fn check_postgres_and_protobuf_migrations() -> NamedJob {
445 fn remove_untracked_files() -> Step<Run> {
446 named::bash("git clean -df")
447 }
448
449 fn ensure_fresh_merge() -> Step<Run> {
450 named::bash(indoc::indoc! {r#"
451 if [ -z "$GITHUB_BASE_REF" ];
452 then
453 echo "BUF_BASE_BRANCH=$(git merge-base origin/main HEAD)" >> "$GITHUB_ENV"
454 else
455 git checkout -B temp
456 git merge -q "origin/$GITHUB_BASE_REF" -m "merge main into temp"
457 echo "BUF_BASE_BRANCH=$GITHUB_BASE_REF" >> "$GITHUB_ENV"
458 fi
459 "#})
460 }
461
462 fn bufbuild_setup_action() -> Step<Use> {
463 named::uses("bufbuild", "buf-setup-action", "v1")
464 .add_with(("version", "v1.29.0"))
465 .add_with(("github_token", vars::GITHUB_TOKEN))
466 }
467
468 fn bufbuild_breaking_action() -> Step<Use> {
469 named::uses("bufbuild", "buf-breaking-action", "v1").add_with(("input", "crates/proto/proto/"))
470 .add_with(("against", "https://github.com/${GITHUB_REPOSITORY}.git#branch=${BUF_BASE_BRANCH},subdir=crates/proto/proto/"))
471 }
472
473 named::job(
474 release_job(&[])
475 .runs_on(runners::LINUX_DEFAULT)
476 .add_env(("GIT_AUTHOR_NAME", "Protobuf Action"))
477 .add_env(("GIT_AUTHOR_EMAIL", "ci@zed.dev"))
478 .add_env(("GIT_COMMITTER_NAME", "Protobuf Action"))
479 .add_env(("GIT_COMMITTER_EMAIL", "ci@zed.dev"))
480 .add_step(steps::checkout_repo().with(("fetch-depth", 0))) // fetch full history
481 .add_step(remove_untracked_files())
482 .add_step(ensure_fresh_merge())
483 .add_step(bufbuild_setup_action())
484 .add_step(bufbuild_breaking_action()),
485 )
486}
487
488fn doctests() -> NamedJob {
489 fn run_doctests() -> Step<Run> {
490 named::bash(indoc::indoc! {r#"
491 cargo test --workspace --doc --no-fail-fast
492 "#})
493 .id("run_doctests")
494 }
495
496 named::job(
497 release_job(&[])
498 .runs_on(runners::LINUX_DEFAULT)
499 .add_step(steps::checkout_repo())
500 .add_step(steps::cache_rust_dependencies_namespace())
501 .map(steps::install_linux_dependencies)
502 .add_step(steps::setup_cargo_config(Platform::Linux))
503 .add_step(steps::setup_sccache(Platform::Linux))
504 .add_step(run_doctests())
505 .add_step(steps::show_sccache_stats(Platform::Linux))
506 .add_step(steps::cleanup_cargo_config(Platform::Linux)),
507 )
508}
509
510fn check_licenses() -> NamedJob {
511 named::job(
512 Job::default()
513 .runs_on(runners::LINUX_SMALL)
514 .add_step(steps::checkout_repo())
515 .add_step(steps::cache_rust_dependencies_namespace())
516 .add_step(steps::script("./script/check-licenses"))
517 .add_step(steps::script("./script/generate-licenses")),
518 )
519}
520
521fn check_docs() -> NamedJob {
522 fn lychee_link_check(dir: &str) -> Step<Use> {
523 named::uses(
524 "lycheeverse",
525 "lychee-action",
526 "82202e5e9c2f4ef1a55a3d02563e1cb6041e5332",
527 ) // v2.4.1
528 .add_with(("args", format!("--no-progress --exclude '^http' '{dir}'")))
529 .add_with(("fail", true))
530 .add_with(("jobSummary", false))
531 }
532
533 fn install_mdbook() -> Step<Use> {
534 named::uses(
535 "peaceiris",
536 "actions-mdbook",
537 "ee69d230fe19748b7abf22df32acaa93833fad08", // v2
538 )
539 .with(("mdbook-version", "0.4.37"))
540 }
541
542 fn build_docs() -> Step<Run> {
543 named::bash(indoc::indoc! {r#"
544 mkdir -p target/deploy
545 mdbook build ./docs --dest-dir=../target/deploy/docs/
546 "#})
547 }
548
549 named::job(
550 release_job(&[])
551 .runs_on(runners::LINUX_LARGE)
552 .add_step(steps::checkout_repo())
553 .add_step(steps::setup_cargo_config(Platform::Linux))
554 // todo(ci): un-inline build_docs/action.yml here
555 .add_step(steps::cache_rust_dependencies_namespace())
556 .add_step(
557 lychee_link_check("./docs/src/**/*"), // check markdown links
558 )
559 .map(steps::install_linux_dependencies)
560 .add_step(steps::script("./script/generate-action-metadata"))
561 .add_step(install_mdbook())
562 .add_step(build_docs())
563 .add_step(
564 lychee_link_check("target/deploy/docs"), // check links in generated html
565 ),
566 )
567}
568
569pub(crate) fn check_scripts() -> NamedJob {
570 fn download_actionlint() -> Step<Run> {
571 named::bash(
572 "bash <(curl https://raw.githubusercontent.com/rhysd/actionlint/main/scripts/download-actionlint.bash)",
573 )
574 }
575
576 fn run_actionlint() -> Step<Run> {
577 named::bash(indoc::indoc! {r#"
578 ${{ steps.get_actionlint.outputs.executable }} -color
579 "#})
580 }
581
582 fn run_shellcheck() -> Step<Run> {
583 named::bash("./script/shellcheck-scripts error")
584 }
585
586 fn check_xtask_workflows() -> Step<Run> {
587 named::bash(indoc::indoc! {r#"
588 cargo xtask workflows
589 if ! git diff --exit-code .github; then
590 echo "Error: .github directory has uncommitted changes after running 'cargo xtask workflows'"
591 echo "Please run 'cargo xtask workflows' locally and commit the changes"
592 exit 1
593 fi
594 "#})
595 }
596
597 named::job(
598 release_job(&[])
599 .runs_on(runners::LINUX_SMALL)
600 .add_step(steps::checkout_repo())
601 .add_step(run_shellcheck())
602 .add_step(download_actionlint().id("get_actionlint"))
603 .add_step(run_actionlint())
604 .add_step(check_xtask_workflows()),
605 )
606}