1use gh_workflow::{
2 Container, Event, Expression, Input, Job, Level, MergeGroup, Permissions, Port, PullRequest,
3 Push, Run, Step, Strategy, Use, UsesJob, Workflow,
4};
5use indexmap::IndexMap;
6use indoc::formatdoc;
7use serde_json::json;
8
9use crate::tasks::workflows::{
10 steps::{
11 CommonJobConditions, cache_rust_dependencies_namespace, repository_owner_guard_expression,
12 use_clang,
13 },
14 vars::{self, PathCondition},
15};
16
17use super::{
18 runners::{self, Arch, Platform},
19 steps::{self, FluentBuilder, NamedJob, named, release_job},
20};
21
22pub(crate) fn run_tests() -> Workflow {
23 // Specify anything which should potentially skip full test suite in this regex:
24 // - docs/
25 // - script/update_top_ranking_issues/
26 // - .github/ISSUE_TEMPLATE/
27 // - .github/workflows/ (except .github/workflows/ci.yml)
28 // - extensions/ (these have their own test workflow)
29 let should_run_tests = PathCondition::inverted(
30 "run_tests",
31 r"^(docs/|script/update_top_ranking_issues/|\.github/(ISSUE_TEMPLATE|workflows/(?!run_tests))|extensions/)",
32 );
33 let should_check_docs = PathCondition::new("run_docs", r"^(docs/|crates/.*\.rs)");
34 let should_check_scripts = PathCondition::new(
35 "run_action_checks",
36 r"^\.github/(workflows/|actions/|actionlint.yml)|tooling/xtask|script/",
37 );
38 let should_check_licences =
39 PathCondition::new("run_licenses", r"^(Cargo.lock|script/.*licenses)");
40
41 let orchestrate = orchestrate(&[
42 &should_check_scripts,
43 &should_check_docs,
44 &should_check_licences,
45 &should_run_tests,
46 ]);
47
48 let mut jobs = vec![
49 orchestrate,
50 check_style(),
51 should_run_tests
52 .and_not_in_merge_queue()
53 .then(clippy(Platform::Windows, None)),
54 should_run_tests
55 .and_not_in_merge_queue()
56 .then(clippy(Platform::Linux, None)),
57 should_run_tests
58 .and_not_in_merge_queue()
59 .then(clippy(Platform::Mac, None)),
60 should_run_tests
61 .and_not_in_merge_queue()
62 .then(clippy(Platform::Mac, Some(Arch::X86_64))),
63 should_run_tests
64 .and_not_in_merge_queue()
65 .then(run_platform_tests(Platform::Windows)),
66 should_run_tests
67 .and_not_in_merge_queue()
68 .then(run_platform_tests(Platform::Linux)),
69 should_run_tests
70 .and_not_in_merge_queue()
71 .then(run_platform_tests(Platform::Mac)),
72 should_run_tests.and_not_in_merge_queue().then(doctests()),
73 should_run_tests
74 .and_not_in_merge_queue()
75 .then(check_workspace_binaries()),
76 should_run_tests
77 .and_not_in_merge_queue()
78 .then(build_visual_tests_binary()),
79 should_run_tests.and_not_in_merge_queue().then(check_wasm()),
80 should_run_tests
81 .and_not_in_merge_queue()
82 .then(check_dependencies()), // could be more specific here?
83 should_check_docs
84 .and_not_in_merge_queue()
85 .then(check_docs()),
86 should_check_licences
87 .and_not_in_merge_queue()
88 .then(check_licenses()),
89 should_check_scripts.and_always().then(check_scripts()),
90 ];
91 let ext_tests = extension_tests();
92 let tests_pass = tests_pass(&jobs, &[&ext_tests.name]);
93
94 // TODO: For merge queues, this should fail in the merge queue context
95 jobs.push(
96 should_run_tests
97 .and_always()
98 .then(check_postgres_and_protobuf_migrations()),
99 ); // could be more specific here?
100
101 named::workflow()
102 .add_event(
103 Event::default()
104 .push(
105 Push::default()
106 .add_branch("main")
107 .add_branch("v[0-9]+.[0-9]+.x"),
108 )
109 .pull_request(PullRequest::default().add_branch("**"))
110 .merge_group(MergeGroup::default()),
111 )
112 .concurrency(vars::one_workflow_per_non_main_branch())
113 .add_env(("CARGO_TERM_COLOR", "always"))
114 .add_env(("RUST_BACKTRACE", 1))
115 .add_env(("CARGO_INCREMENTAL", 0))
116 .map(|mut workflow| {
117 for job in jobs {
118 workflow = workflow.add_job(job.name, job.job)
119 }
120 workflow
121 })
122 .add_job(ext_tests.name, ext_tests.job)
123 .add_job(tests_pass.name, tests_pass.job)
124}
125
126/// Controls which features `orchestrate_impl` includes in the generated script.
127#[derive(PartialEq, Eq)]
128enum OrchestrateTarget {
129 /// For the main Zed repo: includes the cargo package filter and extension
130 /// change detection, but no working-directory scoping.
131 ZedRepo,
132 /// For individual extension repos: scopes changed-file detection to the
133 /// working directory, with no package filter or extension detection.
134 Extension,
135}
136
137// Generates a bash script that checks changed files against regex patterns
138// and sets GitHub output variables accordingly
139pub fn orchestrate(rules: &[&PathCondition]) -> NamedJob {
140 orchestrate_impl(rules, OrchestrateTarget::ZedRepo)
141}
142
143pub fn orchestrate_for_extension(rules: &[&PathCondition]) -> NamedJob {
144 orchestrate_impl(rules, OrchestrateTarget::Extension)
145}
146
147fn orchestrate_impl(rules: &[&PathCondition], target: OrchestrateTarget) -> NamedJob {
148 let name = "orchestrate".to_owned();
149 let step_name = "filter".to_owned();
150 let mut script = String::new();
151
152 script.push_str(indoc::indoc! {r#"
153 set -euo pipefail
154 if [ -z "$GITHUB_BASE_REF" ]; then
155 echo "Not in a PR context (i.e., push to main/stable/preview)"
156 COMPARE_REV="$(git rev-parse HEAD~1)"
157 else
158 echo "In a PR context comparing to pull_request.base.ref"
159 git fetch origin "$GITHUB_BASE_REF" --depth=350
160 COMPARE_REV="$(git merge-base "origin/${GITHUB_BASE_REF}" HEAD)"
161 fi
162 CHANGED_FILES="$(git diff --name-only "$COMPARE_REV" "$GITHUB_SHA")"
163
164 "#});
165
166 if target == OrchestrateTarget::Extension {
167 script.push_str(indoc::indoc! {r#"
168 # When running from a subdirectory, git diff returns repo-root-relative paths.
169 # Filter to only files within the current working directory and strip the prefix.
170 REPO_SUBDIR="$(git rev-parse --show-prefix)"
171 REPO_SUBDIR="${REPO_SUBDIR%/}"
172 if [ -n "$REPO_SUBDIR" ]; then
173 CHANGED_FILES="$(echo "$CHANGED_FILES" | grep "^${REPO_SUBDIR}/" | sed "s|^${REPO_SUBDIR}/||" || true)"
174 fi
175
176 "#});
177 }
178
179 script.push_str(indoc::indoc! {r#"
180 check_pattern() {
181 local output_name="$1"
182 local pattern="$2"
183 local grep_arg="$3"
184
185 echo "$CHANGED_FILES" | grep "$grep_arg" "$pattern" && \
186 echo "${output_name}=true" >> "$GITHUB_OUTPUT" || \
187 echo "${output_name}=false" >> "$GITHUB_OUTPUT"
188 }
189
190 "#});
191
192 let mut outputs = IndexMap::new();
193
194 if target == OrchestrateTarget::ZedRepo {
195 script.push_str(indoc::indoc! {r#"
196 # Check for changes that require full rebuild (no filter)
197 # Direct pushes to main/stable/preview always run full suite
198 if [ -z "$GITHUB_BASE_REF" ]; then
199 echo "Not a PR, running full test suite"
200 echo "changed_packages=" >> "$GITHUB_OUTPUT"
201 elif echo "$CHANGED_FILES" | grep -qP '^(rust-toolchain\.toml|\.cargo/|\.github/|Cargo\.(toml|lock)$)'; then
202 echo "Toolchain, cargo config, or root Cargo files changed, will run all tests"
203 echo "changed_packages=" >> "$GITHUB_OUTPUT"
204 else
205 # Extract changed directories from file paths
206 CHANGED_DIRS=$(echo "$CHANGED_FILES" | \
207 grep -oP '^(crates|tooling)/\K[^/]+' | \
208 sort -u || true)
209
210 # Build directory-to-package mapping using cargo metadata
211 DIR_TO_PKG=$(cargo metadata --format-version=1 --no-deps 2>/dev/null | \
212 jq -r '.packages[] | select(.manifest_path | test("crates/|tooling/")) | "\(.manifest_path | capture("(crates|tooling)/(?<dir>[^/]+)") | .dir)=\(.name)"')
213
214 # Map directory names to package names
215 FILE_CHANGED_PKGS=""
216 for dir in $CHANGED_DIRS; do
217 pkg=$(echo "$DIR_TO_PKG" | grep "^${dir}=" | cut -d= -f2 | head -1)
218 if [ -n "$pkg" ]; then
219 FILE_CHANGED_PKGS=$(printf '%s\n%s' "$FILE_CHANGED_PKGS" "$pkg")
220 else
221 # Fall back to directory name if no mapping found
222 FILE_CHANGED_PKGS=$(printf '%s\n%s' "$FILE_CHANGED_PKGS" "$dir")
223 fi
224 done
225 FILE_CHANGED_PKGS=$(echo "$FILE_CHANGED_PKGS" | grep -v '^$' | sort -u || true)
226
227 # If assets/ changed, add crates that depend on those assets
228 if echo "$CHANGED_FILES" | grep -qP '^assets/'; then
229 FILE_CHANGED_PKGS=$(printf '%s\n%s\n%s' "$FILE_CHANGED_PKGS" "settings" "assets" | sort -u)
230 fi
231
232 # Combine all changed packages
233 ALL_CHANGED_PKGS=$(echo "$FILE_CHANGED_PKGS" | grep -v '^$' || true)
234
235 if [ -z "$ALL_CHANGED_PKGS" ]; then
236 echo "No package changes detected, will run all tests"
237 echo "changed_packages=" >> "$GITHUB_OUTPUT"
238 else
239 # Build nextest filterset with rdeps for each package
240 FILTERSET=$(echo "$ALL_CHANGED_PKGS" | \
241 sed 's/.*/rdeps(&)/' | \
242 tr '\n' '|' | \
243 sed 's/|$//')
244 echo "Changed packages filterset: $FILTERSET"
245 echo "changed_packages=$FILTERSET" >> "$GITHUB_OUTPUT"
246 fi
247 fi
248
249 "#});
250
251 outputs.insert(
252 "changed_packages".to_owned(),
253 format!("${{{{ steps.{}.outputs.changed_packages }}}}", step_name),
254 );
255 }
256
257 for rule in rules {
258 assert!(
259 rule.set_by_step
260 .borrow_mut()
261 .replace(name.clone())
262 .is_none()
263 );
264 assert!(
265 outputs
266 .insert(
267 rule.name.to_owned(),
268 format!("${{{{ steps.{}.outputs.{} }}}}", step_name, rule.name)
269 )
270 .is_none()
271 );
272
273 let grep_arg = if rule.invert { "-qvP" } else { "-qP" };
274 script.push_str(&format!(
275 "check_pattern \"{}\" '{}' {}\n",
276 rule.name, rule.pattern, grep_arg
277 ));
278 }
279
280 if target == OrchestrateTarget::ZedRepo {
281 script.push_str(DETECT_CHANGED_EXTENSIONS_SCRIPT);
282 script.push_str("echo \"changed_extensions=$EXTENSIONS_JSON\" >> \"$GITHUB_OUTPUT\"\n");
283
284 outputs.insert(
285 "changed_extensions".to_owned(),
286 format!("${{{{ steps.{}.outputs.changed_extensions }}}}", step_name),
287 );
288 }
289
290 let job = Job::default()
291 .runs_on(runners::LINUX_SMALL)
292 .with_repository_owner_guard()
293 .outputs(outputs)
294 .add_step(steps::checkout_repo().with_deep_history_on_non_main())
295 .add_step(Step::new(step_name.clone()).run(script).id(step_name));
296
297 NamedJob { name, job }
298}
299
300pub fn tests_pass(jobs: &[NamedJob], extra_job_names: &[&str]) -> NamedJob {
301 let mut script = String::from(indoc::indoc! {r#"
302 set +x
303 EXIT_CODE=0
304
305 check_result() {
306 echo "* $1: $2"
307 if [[ "$2" != "skipped" && "$2" != "success" ]]; then EXIT_CODE=1; fi
308 }
309
310 "#});
311
312 let all_names: Vec<&str> = jobs
313 .iter()
314 .map(|job| job.name.as_str())
315 .chain(extra_job_names.iter().copied())
316 .collect();
317
318 let env_entries: Vec<_> = all_names
319 .iter()
320 .map(|name| {
321 let env_name = format!("RESULT_{}", name.to_uppercase());
322 let env_value = format!("${{{{ needs.{}.result }}}}", name);
323 (env_name, env_value)
324 })
325 .collect();
326
327 script.push_str(
328 &all_names
329 .iter()
330 .zip(env_entries.iter())
331 .map(|(name, (env_name, _))| format!("check_result \"{}\" \"${}\"", name, env_name))
332 .collect::<Vec<_>>()
333 .join("\n"),
334 );
335
336 script.push_str("\n\nexit $EXIT_CODE\n");
337
338 let job = Job::default()
339 .runs_on(runners::LINUX_SMALL)
340 .needs(
341 all_names
342 .iter()
343 .map(|name| name.to_string())
344 .collect::<Vec<String>>(),
345 )
346 .cond(repository_owner_guard_expression(true))
347 .add_step(
348 env_entries
349 .into_iter()
350 .fold(named::bash(&script), |step, env_item| {
351 step.add_env(env_item)
352 }),
353 );
354
355 named::job(job)
356}
357
358/// Bash script snippet that detects changed extension directories from `$CHANGED_FILES`.
359/// Assumes `$CHANGED_FILES` is already set. Sets `$EXTENSIONS_JSON` to a JSON array of
360/// changed extension paths. Callers are responsible for writing the result to `$GITHUB_OUTPUT`.
361pub(crate) const DETECT_CHANGED_EXTENSIONS_SCRIPT: &str = indoc::indoc! {r#"
362 # Detect changed extension directories (excluding extensions/workflows)
363 CHANGED_EXTENSIONS=$(echo "$CHANGED_FILES" | grep -oP '^extensions/[^/]+(?=/)' | sort -u | grep -v '^extensions/workflows$' || true)
364 # Filter out deleted extensions
365 EXISTING_EXTENSIONS=""
366 for ext in $CHANGED_EXTENSIONS; do
367 if [ -f "$ext/extension.toml" ]; then
368 EXISTING_EXTENSIONS=$(printf '%s\n%s' "$EXISTING_EXTENSIONS" "$ext")
369 fi
370 done
371 CHANGED_EXTENSIONS=$(echo "$EXISTING_EXTENSIONS" | sed '/^$/d')
372 if [ -n "$CHANGED_EXTENSIONS" ]; then
373 EXTENSIONS_JSON=$(echo "$CHANGED_EXTENSIONS" | jq -R -s -c 'split("\n") | map(select(length > 0))')
374 else
375 EXTENSIONS_JSON="[]"
376 fi
377"#};
378
379const TS_QUERY_LS_FILE: &str = "ts_query_ls-x86_64-unknown-linux-gnu.tar.gz";
380const CI_TS_QUERY_RELEASE: &str = "tags/v3.15.1";
381
382pub(crate) fn fetch_ts_query_ls() -> Step<Use> {
383 named::uses(
384 "dsaltares",
385 "fetch-gh-release-asset",
386 "aa37ae5c44d3c9820bc12fe675e8670ecd93bd1c",
387 ) // v1.1.1
388 .add_with(("repo", "ribru17/ts_query_ls"))
389 .add_with(("version", CI_TS_QUERY_RELEASE))
390 .add_with(("file", TS_QUERY_LS_FILE))
391}
392
393pub(crate) fn run_ts_query_ls() -> Step<Run> {
394 named::bash(formatdoc!(
395 r#"tar -xf "$GITHUB_WORKSPACE/{TS_QUERY_LS_FILE}" -C "$GITHUB_WORKSPACE"
396 "$GITHUB_WORKSPACE/ts_query_ls" format --check . || {{
397 echo "Found unformatted queries, please format them with ts_query_ls."
398 echo "For easy use, install the Tree-sitter query extension:"
399 echo "zed://extension/tree-sitter-query"
400 false
401 }}"#
402 ))
403}
404
405fn check_style() -> NamedJob {
406 fn check_for_typos() -> Step<Use> {
407 named::uses(
408 "crate-ci",
409 "typos",
410 "2d0ce569feab1f8752f1dde43cc2f2aa53236e06",
411 ) // v1.40.0
412 .with(("config", "./typos.toml"))
413 }
414
415 named::job(
416 release_job(&[])
417 .runs_on(runners::LINUX_MEDIUM)
418 .add_step(steps::checkout_repo())
419 .add_step(steps::cache_rust_dependencies_namespace())
420 .add_step(steps::setup_pnpm())
421 .add_step(steps::prettier())
422 .add_step(steps::cargo_fmt())
423 .add_step(steps::script("./script/check-todos"))
424 .add_step(steps::script("./script/check-keymaps"))
425 .add_step(check_for_typos())
426 .add_step(fetch_ts_query_ls())
427 .add_step(run_ts_query_ls()),
428 )
429}
430
431fn check_dependencies() -> NamedJob {
432 fn install_cargo_machete() -> Step<Use> {
433 named::uses(
434 "taiki-e",
435 "install-action",
436 "02cc5f8ca9f2301050c0c099055816a41ee05507",
437 )
438 .add_with(("tool", "cargo-machete@0.7.0"))
439 }
440
441 fn run_cargo_machete() -> Step<Run> {
442 named::bash("cargo machete")
443 }
444
445 fn check_cargo_lock() -> Step<Run> {
446 named::bash("cargo update --locked --workspace")
447 }
448
449 fn check_vulnerable_dependencies() -> Step<Use> {
450 named::uses(
451 "actions",
452 "dependency-review-action",
453 "67d4f4bd7a9b17a0db54d2a7519187c65e339de8", // v4
454 )
455 .if_condition(Expression::new("github.event_name == 'pull_request'"))
456 .with(("license-check", false))
457 }
458
459 named::job(use_clang(
460 release_job(&[])
461 .runs_on(runners::LINUX_SMALL)
462 .add_step(steps::checkout_repo())
463 .add_step(steps::cache_rust_dependencies_namespace())
464 .add_step(install_cargo_machete())
465 .add_step(run_cargo_machete())
466 .add_step(check_cargo_lock())
467 .add_step(check_vulnerable_dependencies()),
468 ))
469}
470
471fn check_wasm() -> NamedJob {
472 fn install_nightly_wasm_toolchain() -> Step<Run> {
473 named::bash(
474 "rustup toolchain install nightly --component rust-src --target wasm32-unknown-unknown",
475 )
476 }
477
478 fn cargo_check_wasm() -> Step<Run> {
479 named::bash(concat!(
480 "cargo -Zbuild-std=std,panic_abort ",
481 "check --target wasm32-unknown-unknown -p gpui_platform",
482 ))
483 .add_env((
484 "CARGO_TARGET_WASM32_UNKNOWN_UNKNOWN_RUSTFLAGS",
485 "-C target-feature=+atomics,+bulk-memory,+mutable-globals",
486 ))
487 .add_env(("RUSTC_BOOTSTRAP", "1"))
488 }
489
490 named::job(
491 release_job(&[])
492 .runs_on(runners::LINUX_LARGE)
493 .add_step(steps::checkout_repo())
494 .add_step(steps::setup_cargo_config(Platform::Linux))
495 .add_step(steps::cache_rust_dependencies_namespace())
496 .add_step(install_nightly_wasm_toolchain())
497 .add_step(steps::setup_sccache(Platform::Linux))
498 .add_step(cargo_check_wasm())
499 .add_step(steps::show_sccache_stats(Platform::Linux))
500 .add_step(steps::cleanup_cargo_config(Platform::Linux)),
501 )
502}
503
504fn check_workspace_binaries() -> NamedJob {
505 named::job(use_clang(
506 release_job(&[])
507 .runs_on(runners::LINUX_LARGE)
508 .add_step(steps::checkout_repo())
509 .add_step(steps::setup_cargo_config(Platform::Linux))
510 .add_step(steps::cache_rust_dependencies_namespace())
511 .map(steps::install_linux_dependencies)
512 .add_step(steps::setup_sccache(Platform::Linux))
513 .add_step(steps::script("cargo build -p collab"))
514 .add_step(steps::script("cargo build --workspace --bins --examples"))
515 .add_step(steps::show_sccache_stats(Platform::Linux))
516 .add_step(steps::cleanup_cargo_config(Platform::Linux)),
517 ))
518}
519
520pub(crate) fn clippy(platform: Platform, arch: Option<Arch>) -> NamedJob {
521 let target = arch.map(|arch| match (platform, arch) {
522 (Platform::Mac, Arch::X86_64) => "x86_64-apple-darwin",
523 (Platform::Mac, Arch::AARCH64) => "aarch64-apple-darwin",
524 _ => unimplemented!("cross-arch clippy not supported for {platform}/{arch}"),
525 });
526 let runner = match platform {
527 Platform::Windows => runners::WINDOWS_DEFAULT,
528 Platform::Linux => runners::LINUX_DEFAULT,
529 Platform::Mac => runners::MAC_DEFAULT,
530 };
531 let mut job = release_job(&[])
532 .runs_on(runner)
533 .add_step(steps::checkout_repo())
534 .add_step(steps::setup_cargo_config(platform))
535 .when(
536 platform == Platform::Linux || platform == Platform::Mac,
537 |this| this.add_step(steps::cache_rust_dependencies_namespace()),
538 )
539 .when(
540 platform == Platform::Linux,
541 steps::install_linux_dependencies,
542 )
543 .when_some(target, |this, target| {
544 this.add_step(steps::install_rustup_target(target))
545 })
546 .add_step(steps::setup_sccache(platform))
547 .add_step(steps::clippy(platform, target))
548 .add_step(steps::show_sccache_stats(platform));
549 if platform == Platform::Linux {
550 job = use_clang(job);
551 }
552 let name = match arch {
553 Some(arch) => format!("clippy_{platform}_{arch}"),
554 None => format!("clippy_{platform}"),
555 };
556 NamedJob { name, job }
557}
558
559pub(crate) fn run_platform_tests(platform: Platform) -> NamedJob {
560 run_platform_tests_impl(platform, true)
561}
562
563pub(crate) fn run_platform_tests_no_filter(platform: Platform) -> NamedJob {
564 run_platform_tests_impl(platform, false)
565}
566
567fn run_platform_tests_impl(platform: Platform, filter_packages: bool) -> NamedJob {
568 let runner = match platform {
569 Platform::Windows => runners::WINDOWS_DEFAULT,
570 Platform::Linux => runners::LINUX_DEFAULT,
571 Platform::Mac => runners::MAC_DEFAULT,
572 };
573 NamedJob {
574 name: format!("run_tests_{platform}"),
575 job: release_job(&[])
576 .runs_on(runner)
577 .when(platform == Platform::Linux, |job| {
578 job.add_service(
579 "postgres",
580 Container::new("postgres:15")
581 .add_env(("POSTGRES_HOST_AUTH_METHOD", "trust"))
582 .ports(vec![Port::Name("5432:5432".into())])
583 .options(
584 "--health-cmd pg_isready \
585 --health-interval 500ms \
586 --health-timeout 5s \
587 --health-retries 10",
588 ),
589 )
590 })
591 .add_step(steps::checkout_repo())
592 .add_step(steps::setup_cargo_config(platform))
593 .when(platform == Platform::Mac, |this| {
594 this.add_step(steps::cache_rust_dependencies_namespace())
595 })
596 .when(platform == Platform::Linux, |this| {
597 use_clang(this.add_step(steps::cache_rust_dependencies_namespace()))
598 })
599 .when(
600 platform == Platform::Linux,
601 steps::install_linux_dependencies,
602 )
603 .add_step(steps::setup_node())
604 .when(
605 platform == Platform::Linux || platform == Platform::Mac,
606 |job| job.add_step(steps::cargo_install_nextest()),
607 )
608 .add_step(steps::clear_target_dir_if_large(platform))
609 .add_step(steps::setup_sccache(platform))
610 .when(filter_packages, |job| {
611 job.add_step(
612 steps::cargo_nextest(platform).with_changed_packages_filter("orchestrate"),
613 )
614 })
615 .when(!filter_packages, |job| {
616 job.add_step(steps::cargo_nextest(platform))
617 })
618 .add_step(steps::show_sccache_stats(platform))
619 .add_step(steps::cleanup_cargo_config(platform)),
620 }
621}
622
623fn build_visual_tests_binary() -> NamedJob {
624 pub fn cargo_build_visual_tests() -> Step<Run> {
625 named::bash("cargo build -p zed --bin zed_visual_test_runner --features visual-tests")
626 }
627
628 named::job(
629 Job::default()
630 .runs_on(runners::MAC_DEFAULT)
631 .add_step(steps::checkout_repo())
632 .add_step(steps::setup_cargo_config(Platform::Mac))
633 .add_step(steps::cache_rust_dependencies_namespace())
634 .add_step(cargo_build_visual_tests())
635 .add_step(steps::cleanup_cargo_config(Platform::Mac)),
636 )
637}
638
639pub(crate) fn check_postgres_and_protobuf_migrations() -> NamedJob {
640 fn ensure_fresh_merge() -> Step<Run> {
641 named::bash(indoc::indoc! {r#"
642 if [ -z "$GITHUB_BASE_REF" ];
643 then
644 echo "BUF_BASE_BRANCH=$(git merge-base origin/main HEAD)" >> "$GITHUB_ENV"
645 else
646 git checkout -B temp
647 git merge -q "origin/$GITHUB_BASE_REF" -m "merge main into temp"
648 echo "BUF_BASE_BRANCH=$GITHUB_BASE_REF" >> "$GITHUB_ENV"
649 fi
650 "#})
651 }
652
653 fn bufbuild_setup_action() -> Step<Use> {
654 named::uses("bufbuild", "buf-setup-action", "v1")
655 .add_with(("version", "v1.29.0"))
656 .add_with(("github_token", vars::GITHUB_TOKEN))
657 }
658
659 fn bufbuild_breaking_action() -> Step<Use> {
660 named::uses("bufbuild", "buf-breaking-action", "v1").add_with(("input", "crates/proto/proto/"))
661 .add_with(("against", "https://github.com/${GITHUB_REPOSITORY}.git#branch=${BUF_BASE_BRANCH},subdir=crates/proto/proto/"))
662 }
663
664 fn buf_lint() -> Step<Run> {
665 named::bash("buf lint crates/proto/proto")
666 }
667
668 fn check_protobuf_formatting() -> Step<Run> {
669 named::bash("buf format --diff --exit-code crates/proto/proto")
670 }
671
672 named::job(
673 release_job(&[])
674 .runs_on(runners::LINUX_DEFAULT)
675 .add_env(("GIT_AUTHOR_NAME", "Protobuf Action"))
676 .add_env(("GIT_AUTHOR_EMAIL", "ci@zed.dev"))
677 .add_env(("GIT_COMMITTER_NAME", "Protobuf Action"))
678 .add_env(("GIT_COMMITTER_EMAIL", "ci@zed.dev"))
679 .add_step(steps::checkout_repo().with_full_history())
680 .add_step(ensure_fresh_merge())
681 .add_step(bufbuild_setup_action())
682 .add_step(bufbuild_breaking_action())
683 .add_step(buf_lint())
684 .add_step(check_protobuf_formatting()),
685 )
686}
687
688fn doctests() -> NamedJob {
689 fn run_doctests() -> Step<Run> {
690 named::bash(indoc::indoc! {r#"
691 cargo test --workspace --doc --no-fail-fast
692 "#})
693 .id("run_doctests")
694 }
695
696 named::job(use_clang(
697 release_job(&[])
698 .runs_on(runners::LINUX_DEFAULT)
699 .add_step(steps::checkout_repo())
700 .add_step(steps::cache_rust_dependencies_namespace())
701 .map(steps::install_linux_dependencies)
702 .add_step(steps::setup_cargo_config(Platform::Linux))
703 .add_step(steps::setup_sccache(Platform::Linux))
704 .add_step(run_doctests())
705 .add_step(steps::show_sccache_stats(Platform::Linux))
706 .add_step(steps::cleanup_cargo_config(Platform::Linux)),
707 ))
708}
709
710fn check_licenses() -> NamedJob {
711 named::job(
712 Job::default()
713 .runs_on(runners::LINUX_SMALL)
714 .add_step(steps::checkout_repo())
715 .add_step(steps::cache_rust_dependencies_namespace())
716 .add_step(steps::script("./script/check-licenses"))
717 .add_step(steps::script("./script/generate-licenses")),
718 )
719}
720
721fn check_docs() -> NamedJob {
722 fn lychee_link_check(dir: &str) -> Step<Use> {
723 named::uses(
724 "lycheeverse",
725 "lychee-action",
726 "82202e5e9c2f4ef1a55a3d02563e1cb6041e5332",
727 ) // v2.4.1
728 .add_with(("args", format!("--no-progress --exclude '^http' '{dir}'")))
729 .add_with(("fail", true))
730 .add_with(("jobSummary", false))
731 }
732
733 fn install_mdbook() -> Step<Use> {
734 named::uses(
735 "peaceiris",
736 "actions-mdbook",
737 "ee69d230fe19748b7abf22df32acaa93833fad08", // v2
738 )
739 .with(("mdbook-version", "0.4.37"))
740 }
741
742 fn build_docs() -> Step<Run> {
743 named::bash(indoc::indoc! {r#"
744 mkdir -p target/deploy
745 mdbook build ./docs --dest-dir=../target/deploy/docs/
746 "#})
747 }
748
749 named::job(use_clang(
750 release_job(&[])
751 .runs_on(runners::LINUX_LARGE)
752 .add_step(steps::checkout_repo())
753 .add_step(steps::setup_cargo_config(Platform::Linux))
754 // todo(ci): un-inline build_docs/action.yml here
755 .add_step(steps::cache_rust_dependencies_namespace())
756 .add_step(
757 lychee_link_check("./docs/src/**/*"), // check markdown links
758 )
759 .map(steps::install_linux_dependencies)
760 .add_step(steps::script("./script/generate-action-metadata"))
761 .add_step(install_mdbook())
762 .add_step(build_docs())
763 .add_step(
764 lychee_link_check("target/deploy/docs"), // check links in generated html
765 ),
766 ))
767}
768
769pub(crate) fn check_scripts() -> NamedJob {
770 fn download_actionlint() -> Step<Run> {
771 named::bash(
772 "bash <(curl https://raw.githubusercontent.com/rhysd/actionlint/main/scripts/download-actionlint.bash)",
773 )
774 }
775
776 fn run_actionlint() -> Step<Run> {
777 named::bash(r#""$ACTIONLINT_BIN" -color"#).add_env((
778 "ACTIONLINT_BIN",
779 "${{ steps.get_actionlint.outputs.executable }}",
780 ))
781 }
782
783 fn run_shellcheck() -> Step<Run> {
784 named::bash("./script/shellcheck-scripts error")
785 }
786
787 fn check_xtask_workflows() -> Step<Run> {
788 named::bash(indoc::indoc! {r#"
789 cargo xtask workflows
790 if ! git diff --exit-code .github; then
791 echo "Error: .github directory has uncommitted changes after running 'cargo xtask workflows'"
792 echo "Please run 'cargo xtask workflows' locally and commit the changes"
793 exit 1
794 fi
795 "#})
796 }
797
798 named::job(
799 release_job(&[])
800 .runs_on(runners::LINUX_SMALL)
801 .add_step(steps::checkout_repo())
802 .add_step(run_shellcheck())
803 .add_step(download_actionlint().id("get_actionlint"))
804 .add_step(run_actionlint())
805 .add_step(cache_rust_dependencies_namespace())
806 .add_step(check_xtask_workflows()),
807 )
808}
809
810fn extension_tests() -> NamedJob<UsesJob> {
811 let job = Job::default()
812 .needs(vec!["orchestrate".to_owned()])
813 .cond(Expression::new(
814 "needs.orchestrate.outputs.changed_extensions != '[]'",
815 ))
816 .permissions(Permissions::default().contents(Level::Read))
817 .strategy(
818 Strategy::default()
819 .fail_fast(false)
820 // TODO: Remove the limit. We currently need this to workaround the concurrency group issue
821 // where different matrix jobs would be placed in the same concurrency group and thus cancelled.
822 .max_parallel(1u32)
823 .matrix(json!({
824 "extension": "${{ fromJson(needs.orchestrate.outputs.changed_extensions) }}"
825 })),
826 )
827 .uses_local(".github/workflows/extension_tests.yml")
828 .with(Input::default().add("working-directory", "${{ matrix.extension }}"));
829
830 named::job(job)
831}