1use gh_workflow::{
2 Concurrency, Container, Event, Expression, Input, Job, Level, Permissions, Port, PullRequest,
3 Push, Run, Step, Strategy, Use, UsesJob, Workflow,
4};
5use indexmap::IndexMap;
6use indoc::formatdoc;
7use serde_json::json;
8
9use crate::tasks::workflows::{
10 steps::{
11 CommonJobConditions, cache_rust_dependencies_namespace, repository_owner_guard_expression,
12 use_clang,
13 },
14 vars::{self, PathCondition},
15};
16
17use super::{
18 runners::{self, Arch, Platform},
19 steps::{self, FluentBuilder, NamedJob, named, release_job},
20};
21
22pub(crate) fn run_tests() -> Workflow {
23 // Specify anything which should potentially skip full test suite in this regex:
24 // - docs/
25 // - script/update_top_ranking_issues/
26 // - .github/ISSUE_TEMPLATE/
27 // - .github/workflows/ (except .github/workflows/ci.yml)
28 // - extensions/ (these have their own test workflow)
29 let should_run_tests = PathCondition::inverted(
30 "run_tests",
31 r"^(docs/|script/update_top_ranking_issues/|\.github/(ISSUE_TEMPLATE|workflows/(?!run_tests))|extensions/)",
32 );
33 let should_check_docs = PathCondition::new("run_docs", r"^(docs/|crates/.*\.rs)");
34 let should_check_scripts = PathCondition::new(
35 "run_action_checks",
36 r"^\.github/(workflows/|actions/|actionlint.yml)|tooling/xtask|script/",
37 );
38 let should_check_licences =
39 PathCondition::new("run_licenses", r"^(Cargo.lock|script/.*licenses)");
40
41 let orchestrate = orchestrate(&[
42 &should_check_scripts,
43 &should_check_docs,
44 &should_check_licences,
45 &should_run_tests,
46 ]);
47
48 let mut jobs = vec![
49 orchestrate,
50 check_style(),
51 should_run_tests.guard(clippy(Platform::Windows, None)),
52 should_run_tests.guard(clippy(Platform::Linux, None)),
53 should_run_tests.guard(clippy(Platform::Mac, None)),
54 should_run_tests.guard(clippy(Platform::Mac, Some(Arch::X86_64))),
55 should_run_tests.guard(run_platform_tests(Platform::Windows)),
56 should_run_tests.guard(run_platform_tests(Platform::Linux)),
57 should_run_tests.guard(run_platform_tests(Platform::Mac)),
58 should_run_tests.guard(doctests()),
59 should_run_tests.guard(check_workspace_binaries()),
60 should_run_tests.guard(build_visual_tests_binary()),
61 should_run_tests.guard(check_wasm()),
62 should_run_tests.guard(check_dependencies()), // could be more specific here?
63 should_check_docs.guard(check_docs()),
64 should_check_licences.guard(check_licenses()),
65 should_check_scripts.guard(check_scripts()),
66 ];
67 let ext_tests = extension_tests();
68 let tests_pass = tests_pass(&jobs, &[&ext_tests.name]);
69
70 jobs.push(should_run_tests.guard(check_postgres_and_protobuf_migrations())); // could be more specific here?
71
72 named::workflow()
73 .add_event(
74 Event::default()
75 .push(
76 Push::default()
77 .add_branch("main")
78 .add_branch("v[0-9]+.[0-9]+.x"),
79 )
80 .pull_request(PullRequest::default().add_branch("**")),
81 )
82 .concurrency(
83 Concurrency::default()
84 .group(concat!(
85 "${{ github.workflow }}-${{ github.ref_name }}-",
86 "${{ github.ref_name == 'main' && github.sha || 'anysha' }}"
87 ))
88 .cancel_in_progress(true),
89 )
90 .add_env(("CARGO_TERM_COLOR", "always"))
91 .add_env(("RUST_BACKTRACE", 1))
92 .add_env(("CARGO_INCREMENTAL", 0))
93 .map(|mut workflow| {
94 for job in jobs {
95 workflow = workflow.add_job(job.name, job.job)
96 }
97 workflow
98 })
99 .add_job(ext_tests.name, ext_tests.job)
100 .add_job(tests_pass.name, tests_pass.job)
101}
102
103/// Controls which features `orchestrate_impl` includes in the generated script.
104#[derive(PartialEq, Eq)]
105enum OrchestrateTarget {
106 /// For the main Zed repo: includes the cargo package filter and extension
107 /// change detection, but no working-directory scoping.
108 ZedRepo,
109 /// For individual extension repos: scopes changed-file detection to the
110 /// working directory, with no package filter or extension detection.
111 Extension,
112}
113
114// Generates a bash script that checks changed files against regex patterns
115// and sets GitHub output variables accordingly
116pub fn orchestrate(rules: &[&PathCondition]) -> NamedJob {
117 orchestrate_impl(rules, OrchestrateTarget::ZedRepo)
118}
119
120pub fn orchestrate_for_extension(rules: &[&PathCondition]) -> NamedJob {
121 orchestrate_impl(rules, OrchestrateTarget::Extension)
122}
123
124fn orchestrate_impl(rules: &[&PathCondition], target: OrchestrateTarget) -> NamedJob {
125 let name = "orchestrate".to_owned();
126 let step_name = "filter".to_owned();
127 let mut script = String::new();
128
129 script.push_str(indoc::indoc! {r#"
130 set -euo pipefail
131 if [ -z "$GITHUB_BASE_REF" ]; then
132 echo "Not in a PR context (i.e., push to main/stable/preview)"
133 COMPARE_REV="$(git rev-parse HEAD~1)"
134 else
135 echo "In a PR context comparing to pull_request.base.ref"
136 git fetch origin "$GITHUB_BASE_REF" --depth=350
137 COMPARE_REV="$(git merge-base "origin/${GITHUB_BASE_REF}" HEAD)"
138 fi
139 CHANGED_FILES="$(git diff --name-only "$COMPARE_REV" "$GITHUB_SHA")"
140
141 "#});
142
143 if target == OrchestrateTarget::Extension {
144 script.push_str(indoc::indoc! {r#"
145 # When running from a subdirectory, git diff returns repo-root-relative paths.
146 # Filter to only files within the current working directory and strip the prefix.
147 REPO_SUBDIR="$(git rev-parse --show-prefix)"
148 REPO_SUBDIR="${REPO_SUBDIR%/}"
149 if [ -n "$REPO_SUBDIR" ]; then
150 CHANGED_FILES="$(echo "$CHANGED_FILES" | grep "^${REPO_SUBDIR}/" | sed "s|^${REPO_SUBDIR}/||" || true)"
151 fi
152
153 "#});
154 }
155
156 script.push_str(indoc::indoc! {r#"
157 check_pattern() {
158 local output_name="$1"
159 local pattern="$2"
160 local grep_arg="$3"
161
162 echo "$CHANGED_FILES" | grep "$grep_arg" "$pattern" && \
163 echo "${output_name}=true" >> "$GITHUB_OUTPUT" || \
164 echo "${output_name}=false" >> "$GITHUB_OUTPUT"
165 }
166
167 "#});
168
169 let mut outputs = IndexMap::new();
170
171 if target == OrchestrateTarget::ZedRepo {
172 script.push_str(indoc::indoc! {r#"
173 # Check for changes that require full rebuild (no filter)
174 # Direct pushes to main/stable/preview always run full suite
175 if [ -z "$GITHUB_BASE_REF" ]; then
176 echo "Not a PR, running full test suite"
177 echo "changed_packages=" >> "$GITHUB_OUTPUT"
178 elif echo "$CHANGED_FILES" | grep -qP '^(rust-toolchain\.toml|\.cargo/|\.github/|Cargo\.(toml|lock)$)'; then
179 echo "Toolchain, cargo config, or root Cargo files changed, will run all tests"
180 echo "changed_packages=" >> "$GITHUB_OUTPUT"
181 else
182 # Extract changed directories from file paths
183 CHANGED_DIRS=$(echo "$CHANGED_FILES" | \
184 grep -oP '^(crates|tooling)/\K[^/]+' | \
185 sort -u || true)
186
187 # Build directory-to-package mapping using cargo metadata
188 DIR_TO_PKG=$(cargo metadata --format-version=1 --no-deps 2>/dev/null | \
189 jq -r '.packages[] | select(.manifest_path | test("crates/|tooling/")) | "\(.manifest_path | capture("(crates|tooling)/(?<dir>[^/]+)") | .dir)=\(.name)"')
190
191 # Map directory names to package names
192 FILE_CHANGED_PKGS=""
193 for dir in $CHANGED_DIRS; do
194 pkg=$(echo "$DIR_TO_PKG" | grep "^${dir}=" | cut -d= -f2 | head -1)
195 if [ -n "$pkg" ]; then
196 FILE_CHANGED_PKGS=$(printf '%s\n%s' "$FILE_CHANGED_PKGS" "$pkg")
197 else
198 # Fall back to directory name if no mapping found
199 FILE_CHANGED_PKGS=$(printf '%s\n%s' "$FILE_CHANGED_PKGS" "$dir")
200 fi
201 done
202 FILE_CHANGED_PKGS=$(echo "$FILE_CHANGED_PKGS" | grep -v '^$' | sort -u || true)
203
204 # If assets/ changed, add crates that depend on those assets
205 if echo "$CHANGED_FILES" | grep -qP '^assets/'; then
206 FILE_CHANGED_PKGS=$(printf '%s\n%s\n%s\n%s' "$FILE_CHANGED_PKGS" "settings" "storybook" "assets" | sort -u)
207 fi
208
209 # Combine all changed packages
210 ALL_CHANGED_PKGS=$(echo "$FILE_CHANGED_PKGS" | grep -v '^$' || true)
211
212 if [ -z "$ALL_CHANGED_PKGS" ]; then
213 echo "No package changes detected, will run all tests"
214 echo "changed_packages=" >> "$GITHUB_OUTPUT"
215 else
216 # Build nextest filterset with rdeps for each package
217 FILTERSET=$(echo "$ALL_CHANGED_PKGS" | \
218 sed 's/.*/rdeps(&)/' | \
219 tr '\n' '|' | \
220 sed 's/|$//')
221 echo "Changed packages filterset: $FILTERSET"
222 echo "changed_packages=$FILTERSET" >> "$GITHUB_OUTPUT"
223 fi
224 fi
225
226 "#});
227
228 outputs.insert(
229 "changed_packages".to_owned(),
230 format!("${{{{ steps.{}.outputs.changed_packages }}}}", step_name),
231 );
232 }
233
234 for rule in rules {
235 assert!(
236 rule.set_by_step
237 .borrow_mut()
238 .replace(name.clone())
239 .is_none()
240 );
241 assert!(
242 outputs
243 .insert(
244 rule.name.to_owned(),
245 format!("${{{{ steps.{}.outputs.{} }}}}", step_name, rule.name)
246 )
247 .is_none()
248 );
249
250 let grep_arg = if rule.invert { "-qvP" } else { "-qP" };
251 script.push_str(&format!(
252 "check_pattern \"{}\" '{}' {}\n",
253 rule.name, rule.pattern, grep_arg
254 ));
255 }
256
257 if target == OrchestrateTarget::ZedRepo {
258 script.push_str(DETECT_CHANGED_EXTENSIONS_SCRIPT);
259 script.push_str("echo \"changed_extensions=$EXTENSIONS_JSON\" >> \"$GITHUB_OUTPUT\"\n");
260
261 outputs.insert(
262 "changed_extensions".to_owned(),
263 format!("${{{{ steps.{}.outputs.changed_extensions }}}}", step_name),
264 );
265 }
266
267 let job = Job::default()
268 .runs_on(runners::LINUX_SMALL)
269 .with_repository_owner_guard()
270 .outputs(outputs)
271 .add_step(steps::checkout_repo().with_deep_history_on_non_main())
272 .add_step(Step::new(step_name.clone()).run(script).id(step_name));
273
274 NamedJob { name, job }
275}
276
277pub fn tests_pass(jobs: &[NamedJob], extra_job_names: &[&str]) -> NamedJob {
278 let mut script = String::from(indoc::indoc! {r#"
279 set +x
280 EXIT_CODE=0
281
282 check_result() {
283 echo "* $1: $2"
284 if [[ "$2" != "skipped" && "$2" != "success" ]]; then EXIT_CODE=1; fi
285 }
286
287 "#});
288
289 let all_names: Vec<&str> = jobs
290 .iter()
291 .map(|job| job.name.as_str())
292 .chain(extra_job_names.iter().copied())
293 .collect();
294
295 let env_entries: Vec<_> = all_names
296 .iter()
297 .map(|name| {
298 let env_name = format!("RESULT_{}", name.to_uppercase());
299 let env_value = format!("${{{{ needs.{}.result }}}}", name);
300 (env_name, env_value)
301 })
302 .collect();
303
304 script.push_str(
305 &all_names
306 .iter()
307 .zip(env_entries.iter())
308 .map(|(name, (env_name, _))| format!("check_result \"{}\" \"${}\"", name, env_name))
309 .collect::<Vec<_>>()
310 .join("\n"),
311 );
312
313 script.push_str("\n\nexit $EXIT_CODE\n");
314
315 let job = Job::default()
316 .runs_on(runners::LINUX_SMALL)
317 .needs(
318 all_names
319 .iter()
320 .map(|name| name.to_string())
321 .collect::<Vec<String>>(),
322 )
323 .cond(repository_owner_guard_expression(true))
324 .add_step(
325 env_entries
326 .into_iter()
327 .fold(named::bash(&script), |step, env_item| {
328 step.add_env(env_item)
329 }),
330 );
331
332 named::job(job)
333}
334
335/// Bash script snippet that detects changed extension directories from `$CHANGED_FILES`.
336/// Assumes `$CHANGED_FILES` is already set. Sets `$EXTENSIONS_JSON` to a JSON array of
337/// changed extension paths. Callers are responsible for writing the result to `$GITHUB_OUTPUT`.
338pub(crate) const DETECT_CHANGED_EXTENSIONS_SCRIPT: &str = indoc::indoc! {r#"
339 # Detect changed extension directories (excluding extensions/workflows)
340 CHANGED_EXTENSIONS=$(echo "$CHANGED_FILES" | grep -oP '^extensions/[^/]+(?=/)' | sort -u | grep -v '^extensions/workflows$' || true)
341 # Filter out deleted extensions
342 EXISTING_EXTENSIONS=""
343 for ext in $CHANGED_EXTENSIONS; do
344 if [ -f "$ext/extension.toml" ]; then
345 EXISTING_EXTENSIONS=$(printf '%s\n%s' "$EXISTING_EXTENSIONS" "$ext")
346 fi
347 done
348 CHANGED_EXTENSIONS=$(echo "$EXISTING_EXTENSIONS" | sed '/^$/d')
349 if [ -n "$CHANGED_EXTENSIONS" ]; then
350 EXTENSIONS_JSON=$(echo "$CHANGED_EXTENSIONS" | jq -R -s -c 'split("\n") | map(select(length > 0))')
351 else
352 EXTENSIONS_JSON="[]"
353 fi
354"#};
355
356const TS_QUERY_LS_FILE: &str = "ts_query_ls-x86_64-unknown-linux-gnu.tar.gz";
357const CI_TS_QUERY_RELEASE: &str = "tags/v3.15.1";
358
359pub(crate) fn fetch_ts_query_ls() -> Step<Use> {
360 named::uses(
361 "dsaltares",
362 "fetch-gh-release-asset",
363 "aa37ae5c44d3c9820bc12fe675e8670ecd93bd1c",
364 ) // v1.1.1
365 .add_with(("repo", "ribru17/ts_query_ls"))
366 .add_with(("version", CI_TS_QUERY_RELEASE))
367 .add_with(("file", TS_QUERY_LS_FILE))
368}
369
370pub(crate) fn run_ts_query_ls() -> Step<Run> {
371 named::bash(formatdoc!(
372 r#"tar -xf "$GITHUB_WORKSPACE/{TS_QUERY_LS_FILE}" -C "$GITHUB_WORKSPACE"
373 "$GITHUB_WORKSPACE/ts_query_ls" format --check . || {{
374 echo "Found unformatted queries, please format them with ts_query_ls."
375 echo "For easy use, install the Tree-sitter query extension:"
376 echo "zed://extension/tree-sitter-query"
377 false
378 }}"#
379 ))
380}
381
382fn check_style() -> NamedJob {
383 fn check_for_typos() -> Step<Use> {
384 named::uses(
385 "crate-ci",
386 "typos",
387 "2d0ce569feab1f8752f1dde43cc2f2aa53236e06",
388 ) // v1.40.0
389 .with(("config", "./typos.toml"))
390 }
391
392 named::job(
393 release_job(&[])
394 .runs_on(runners::LINUX_MEDIUM)
395 .add_step(steps::checkout_repo())
396 .add_step(steps::cache_rust_dependencies_namespace())
397 .add_step(steps::setup_pnpm())
398 .add_step(steps::prettier())
399 .add_step(steps::cargo_fmt())
400 .add_step(steps::script("./script/check-todos"))
401 .add_step(steps::script("./script/check-keymaps"))
402 .add_step(check_for_typos())
403 .add_step(fetch_ts_query_ls())
404 .add_step(run_ts_query_ls()),
405 )
406}
407
408fn check_dependencies() -> NamedJob {
409 fn install_cargo_machete() -> Step<Use> {
410 named::uses(
411 "clechasseur",
412 "rs-cargo",
413 "8435b10f6e71c2e3d4d3b7573003a8ce4bfc6386", // v2
414 )
415 .add_with(("command", "install"))
416 .add_with(("args", "cargo-machete@0.7.0"))
417 }
418
419 fn run_cargo_machete() -> Step<Use> {
420 named::uses(
421 "clechasseur",
422 "rs-cargo",
423 "8435b10f6e71c2e3d4d3b7573003a8ce4bfc6386", // v2
424 )
425 .add_with(("command", "machete"))
426 }
427
428 fn check_cargo_lock() -> Step<Run> {
429 named::bash("cargo update --locked --workspace")
430 }
431
432 fn check_vulnerable_dependencies() -> Step<Use> {
433 named::uses(
434 "actions",
435 "dependency-review-action",
436 "67d4f4bd7a9b17a0db54d2a7519187c65e339de8", // v4
437 )
438 .if_condition(Expression::new("github.event_name == 'pull_request'"))
439 .with(("license-check", false))
440 }
441
442 named::job(use_clang(
443 release_job(&[])
444 .runs_on(runners::LINUX_SMALL)
445 .add_step(steps::checkout_repo())
446 .add_step(steps::cache_rust_dependencies_namespace())
447 .add_step(install_cargo_machete())
448 .add_step(run_cargo_machete())
449 .add_step(check_cargo_lock())
450 .add_step(check_vulnerable_dependencies()),
451 ))
452}
453
454fn check_wasm() -> NamedJob {
455 fn install_nightly_wasm_toolchain() -> Step<Run> {
456 named::bash(
457 "rustup toolchain install nightly --component rust-src --target wasm32-unknown-unknown",
458 )
459 }
460
461 fn cargo_check_wasm() -> Step<Run> {
462 named::bash(concat!(
463 "cargo +nightly -Zbuild-std=std,panic_abort ",
464 "check --target wasm32-unknown-unknown -p gpui_platform",
465 ))
466 .add_env((
467 "CARGO_TARGET_WASM32_UNKNOWN_UNKNOWN_RUSTFLAGS",
468 "-C target-feature=+atomics,+bulk-memory,+mutable-globals",
469 ))
470 }
471
472 named::job(
473 release_job(&[])
474 .runs_on(runners::LINUX_LARGE)
475 .add_step(steps::checkout_repo())
476 .add_step(steps::setup_cargo_config(Platform::Linux))
477 .add_step(steps::cache_rust_dependencies_namespace())
478 .add_step(install_nightly_wasm_toolchain())
479 .add_step(steps::setup_sccache(Platform::Linux))
480 .add_step(cargo_check_wasm())
481 .add_step(steps::show_sccache_stats(Platform::Linux))
482 .add_step(steps::cleanup_cargo_config(Platform::Linux)),
483 )
484}
485
486fn check_workspace_binaries() -> NamedJob {
487 named::job(use_clang(
488 release_job(&[])
489 .runs_on(runners::LINUX_LARGE)
490 .add_step(steps::checkout_repo())
491 .add_step(steps::setup_cargo_config(Platform::Linux))
492 .add_step(steps::cache_rust_dependencies_namespace())
493 .map(steps::install_linux_dependencies)
494 .add_step(steps::setup_sccache(Platform::Linux))
495 .add_step(steps::script("cargo build -p collab"))
496 .add_step(steps::script("cargo build --workspace --bins --examples"))
497 .add_step(steps::show_sccache_stats(Platform::Linux))
498 .add_step(steps::cleanup_cargo_config(Platform::Linux)),
499 ))
500}
501
502pub(crate) fn clippy(platform: Platform, arch: Option<Arch>) -> NamedJob {
503 let target = arch.map(|arch| match (platform, arch) {
504 (Platform::Mac, Arch::X86_64) => "x86_64-apple-darwin",
505 (Platform::Mac, Arch::AARCH64) => "aarch64-apple-darwin",
506 _ => unimplemented!("cross-arch clippy not supported for {platform}/{arch}"),
507 });
508 let runner = match platform {
509 Platform::Windows => runners::WINDOWS_DEFAULT,
510 Platform::Linux => runners::LINUX_DEFAULT,
511 Platform::Mac => runners::MAC_DEFAULT,
512 };
513 let mut job = release_job(&[])
514 .runs_on(runner)
515 .add_step(steps::checkout_repo())
516 .add_step(steps::setup_cargo_config(platform))
517 .when(
518 platform == Platform::Linux || platform == Platform::Mac,
519 |this| this.add_step(steps::cache_rust_dependencies_namespace()),
520 )
521 .when(
522 platform == Platform::Linux,
523 steps::install_linux_dependencies,
524 )
525 .when_some(target, |this, target| {
526 this.add_step(steps::install_rustup_target(target))
527 })
528 .add_step(steps::setup_sccache(platform))
529 .add_step(steps::clippy(platform, target))
530 .add_step(steps::show_sccache_stats(platform));
531 if platform == Platform::Linux {
532 job = use_clang(job);
533 }
534 let name = match arch {
535 Some(arch) => format!("clippy_{platform}_{arch}"),
536 None => format!("clippy_{platform}"),
537 };
538 NamedJob { name, job }
539}
540
541pub(crate) fn run_platform_tests(platform: Platform) -> NamedJob {
542 run_platform_tests_impl(platform, true)
543}
544
545pub(crate) fn run_platform_tests_no_filter(platform: Platform) -> NamedJob {
546 run_platform_tests_impl(platform, false)
547}
548
549fn run_platform_tests_impl(platform: Platform, filter_packages: bool) -> NamedJob {
550 let runner = match platform {
551 Platform::Windows => runners::WINDOWS_DEFAULT,
552 Platform::Linux => runners::LINUX_DEFAULT,
553 Platform::Mac => runners::MAC_DEFAULT,
554 };
555 NamedJob {
556 name: format!("run_tests_{platform}"),
557 job: release_job(&[])
558 .runs_on(runner)
559 .when(platform == Platform::Linux, |job| {
560 job.add_service(
561 "postgres",
562 Container::new("postgres:15")
563 .add_env(("POSTGRES_HOST_AUTH_METHOD", "trust"))
564 .ports(vec![Port::Name("5432:5432".into())])
565 .options(
566 "--health-cmd pg_isready \
567 --health-interval 500ms \
568 --health-timeout 5s \
569 --health-retries 10",
570 ),
571 )
572 })
573 .add_step(steps::checkout_repo())
574 .add_step(steps::setup_cargo_config(platform))
575 .when(platform == Platform::Mac, |this| {
576 this.add_step(steps::cache_rust_dependencies_namespace())
577 })
578 .when(platform == Platform::Linux, |this| {
579 use_clang(this.add_step(steps::cache_rust_dependencies_namespace()))
580 })
581 .when(
582 platform == Platform::Linux,
583 steps::install_linux_dependencies,
584 )
585 .add_step(steps::setup_node())
586 .when(
587 platform == Platform::Linux || platform == Platform::Mac,
588 |job| job.add_step(steps::cargo_install_nextest()),
589 )
590 .add_step(steps::clear_target_dir_if_large(platform))
591 .add_step(steps::setup_sccache(platform))
592 .when(filter_packages, |job| {
593 job.add_step(
594 steps::cargo_nextest(platform).with_changed_packages_filter("orchestrate"),
595 )
596 })
597 .when(!filter_packages, |job| {
598 job.add_step(steps::cargo_nextest(platform))
599 })
600 .add_step(steps::show_sccache_stats(platform))
601 .add_step(steps::cleanup_cargo_config(platform)),
602 }
603}
604
605fn build_visual_tests_binary() -> NamedJob {
606 pub fn cargo_build_visual_tests() -> Step<Run> {
607 named::bash("cargo build -p zed --bin zed_visual_test_runner --features visual-tests")
608 }
609
610 named::job(
611 Job::default()
612 .runs_on(runners::MAC_DEFAULT)
613 .add_step(steps::checkout_repo())
614 .add_step(steps::setup_cargo_config(Platform::Mac))
615 .add_step(steps::cache_rust_dependencies_namespace())
616 .add_step(cargo_build_visual_tests())
617 .add_step(steps::cleanup_cargo_config(Platform::Mac)),
618 )
619}
620
621pub(crate) fn check_postgres_and_protobuf_migrations() -> NamedJob {
622 fn ensure_fresh_merge() -> Step<Run> {
623 named::bash(indoc::indoc! {r#"
624 if [ -z "$GITHUB_BASE_REF" ];
625 then
626 echo "BUF_BASE_BRANCH=$(git merge-base origin/main HEAD)" >> "$GITHUB_ENV"
627 else
628 git checkout -B temp
629 git merge -q "origin/$GITHUB_BASE_REF" -m "merge main into temp"
630 echo "BUF_BASE_BRANCH=$GITHUB_BASE_REF" >> "$GITHUB_ENV"
631 fi
632 "#})
633 }
634
635 fn bufbuild_setup_action() -> Step<Use> {
636 named::uses("bufbuild", "buf-setup-action", "v1")
637 .add_with(("version", "v1.29.0"))
638 .add_with(("github_token", vars::GITHUB_TOKEN))
639 }
640
641 fn bufbuild_breaking_action() -> Step<Use> {
642 named::uses("bufbuild", "buf-breaking-action", "v1").add_with(("input", "crates/proto/proto/"))
643 .add_with(("against", "https://github.com/${GITHUB_REPOSITORY}.git#branch=${BUF_BASE_BRANCH},subdir=crates/proto/proto/"))
644 }
645
646 fn buf_lint() -> Step<Run> {
647 named::bash("buf lint crates/proto/proto")
648 }
649
650 fn check_protobuf_formatting() -> Step<Run> {
651 named::bash("buf format --diff --exit-code crates/proto/proto")
652 }
653
654 named::job(
655 release_job(&[])
656 .runs_on(runners::LINUX_DEFAULT)
657 .add_env(("GIT_AUTHOR_NAME", "Protobuf Action"))
658 .add_env(("GIT_AUTHOR_EMAIL", "ci@zed.dev"))
659 .add_env(("GIT_COMMITTER_NAME", "Protobuf Action"))
660 .add_env(("GIT_COMMITTER_EMAIL", "ci@zed.dev"))
661 .add_step(steps::checkout_repo().with_full_history())
662 .add_step(ensure_fresh_merge())
663 .add_step(bufbuild_setup_action())
664 .add_step(bufbuild_breaking_action())
665 .add_step(buf_lint())
666 .add_step(check_protobuf_formatting()),
667 )
668}
669
670fn doctests() -> NamedJob {
671 fn run_doctests() -> Step<Run> {
672 named::bash(indoc::indoc! {r#"
673 cargo test --workspace --doc --no-fail-fast
674 "#})
675 .id("run_doctests")
676 }
677
678 named::job(use_clang(
679 release_job(&[])
680 .runs_on(runners::LINUX_DEFAULT)
681 .add_step(steps::checkout_repo())
682 .add_step(steps::cache_rust_dependencies_namespace())
683 .map(steps::install_linux_dependencies)
684 .add_step(steps::setup_cargo_config(Platform::Linux))
685 .add_step(steps::setup_sccache(Platform::Linux))
686 .add_step(run_doctests())
687 .add_step(steps::show_sccache_stats(Platform::Linux))
688 .add_step(steps::cleanup_cargo_config(Platform::Linux)),
689 ))
690}
691
692fn check_licenses() -> NamedJob {
693 named::job(
694 Job::default()
695 .runs_on(runners::LINUX_SMALL)
696 .add_step(steps::checkout_repo())
697 .add_step(steps::cache_rust_dependencies_namespace())
698 .add_step(steps::script("./script/check-licenses"))
699 .add_step(steps::script("./script/generate-licenses")),
700 )
701}
702
703fn check_docs() -> NamedJob {
704 fn lychee_link_check(dir: &str) -> Step<Use> {
705 named::uses(
706 "lycheeverse",
707 "lychee-action",
708 "82202e5e9c2f4ef1a55a3d02563e1cb6041e5332",
709 ) // v2.4.1
710 .add_with(("args", format!("--no-progress --exclude '^http' '{dir}'")))
711 .add_with(("fail", true))
712 .add_with(("jobSummary", false))
713 }
714
715 fn install_mdbook() -> Step<Use> {
716 named::uses(
717 "peaceiris",
718 "actions-mdbook",
719 "ee69d230fe19748b7abf22df32acaa93833fad08", // v2
720 )
721 .with(("mdbook-version", "0.4.37"))
722 }
723
724 fn build_docs() -> Step<Run> {
725 named::bash(indoc::indoc! {r#"
726 mkdir -p target/deploy
727 mdbook build ./docs --dest-dir=../target/deploy/docs/
728 "#})
729 }
730
731 named::job(use_clang(
732 release_job(&[])
733 .runs_on(runners::LINUX_LARGE)
734 .add_step(steps::checkout_repo())
735 .add_step(steps::setup_cargo_config(Platform::Linux))
736 // todo(ci): un-inline build_docs/action.yml here
737 .add_step(steps::cache_rust_dependencies_namespace())
738 .add_step(
739 lychee_link_check("./docs/src/**/*"), // check markdown links
740 )
741 .map(steps::install_linux_dependencies)
742 .add_step(steps::script("./script/generate-action-metadata"))
743 .add_step(install_mdbook())
744 .add_step(build_docs())
745 .add_step(
746 lychee_link_check("target/deploy/docs"), // check links in generated html
747 ),
748 ))
749}
750
751pub(crate) fn check_scripts() -> NamedJob {
752 fn download_actionlint() -> Step<Run> {
753 named::bash(
754 "bash <(curl https://raw.githubusercontent.com/rhysd/actionlint/main/scripts/download-actionlint.bash)",
755 )
756 }
757
758 fn run_actionlint() -> Step<Run> {
759 named::bash(r#""$ACTIONLINT_BIN" -color"#).add_env((
760 "ACTIONLINT_BIN",
761 "${{ steps.get_actionlint.outputs.executable }}",
762 ))
763 }
764
765 fn run_shellcheck() -> Step<Run> {
766 named::bash("./script/shellcheck-scripts error")
767 }
768
769 fn check_xtask_workflows() -> Step<Run> {
770 named::bash(indoc::indoc! {r#"
771 cargo xtask workflows
772 if ! git diff --exit-code .github; then
773 echo "Error: .github directory has uncommitted changes after running 'cargo xtask workflows'"
774 echo "Please run 'cargo xtask workflows' locally and commit the changes"
775 exit 1
776 fi
777 "#})
778 }
779
780 named::job(
781 release_job(&[])
782 .runs_on(runners::LINUX_SMALL)
783 .add_step(steps::checkout_repo())
784 .add_step(run_shellcheck())
785 .add_step(download_actionlint().id("get_actionlint"))
786 .add_step(run_actionlint())
787 .add_step(cache_rust_dependencies_namespace())
788 .add_step(check_xtask_workflows()),
789 )
790}
791
792fn extension_tests() -> NamedJob<UsesJob> {
793 let job = Job::default()
794 .needs(vec!["orchestrate".to_owned()])
795 .cond(Expression::new(
796 "needs.orchestrate.outputs.changed_extensions != '[]'",
797 ))
798 .permissions(Permissions::default().contents(Level::Read))
799 .strategy(
800 Strategy::default()
801 .fail_fast(false)
802 // TODO: Remove the limit. We currently need this to workaround the concurrency group issue
803 // where different matrix jobs would be placed in the same concurrency group and thus cancelled.
804 .max_parallel(1u32)
805 .matrix(json!({
806 "extension": "${{ fromJson(needs.orchestrate.outputs.changed_extensions) }}"
807 })),
808 )
809 .uses_local(".github/workflows/extension_tests.yml")
810 .with(Input::default().add("working-directory", "${{ matrix.extension }}"));
811
812 named::job(job)
813}