run_tests.rs

  1use gh_workflow::{
  2    Container, Event, Expression, Input, Job, Level, MergeGroup, Permissions, Port, PullRequest,
  3    Push, Run, Step, Strategy, Use, UsesJob, Workflow,
  4};
  5use indexmap::IndexMap;
  6use indoc::formatdoc;
  7use serde_json::json;
  8
  9use crate::tasks::workflows::{
 10    steps::{
 11        CommonJobConditions, cache_rust_dependencies_namespace, repository_owner_guard_expression,
 12        use_clang,
 13    },
 14    vars::{self, PathCondition},
 15};
 16
 17use super::{
 18    runners::{self, Arch, Platform},
 19    steps::{self, FluentBuilder, NamedJob, named, release_job},
 20};
 21
 22pub(crate) fn run_tests() -> Workflow {
 23    // Specify anything which should potentially skip full test suite in this regex:
 24    // - docs/
 25    // - script/update_top_ranking_issues/
 26    // - .github/ISSUE_TEMPLATE/
 27    // - .github/workflows/  (except .github/workflows/ci.yml)
 28    // - extensions/  (these have their own test workflow)
 29    let should_run_tests = PathCondition::inverted(
 30        "run_tests",
 31        r"^(docs/|script/update_top_ranking_issues/|\.github/(ISSUE_TEMPLATE|workflows/(?!run_tests))|extensions/)",
 32    );
 33    let should_check_docs = PathCondition::new("run_docs", r"^(docs/|crates/.*\.rs)");
 34    let should_check_scripts = PathCondition::new(
 35        "run_action_checks",
 36        r"^\.github/(workflows/|actions/|actionlint.yml)|tooling/xtask|script/",
 37    );
 38    let should_check_licences =
 39        PathCondition::new("run_licenses", r"^(Cargo.lock|script/.*licenses)");
 40
 41    let orchestrate = orchestrate(&[
 42        &should_check_scripts,
 43        &should_check_docs,
 44        &should_check_licences,
 45        &should_run_tests,
 46    ]);
 47
 48    let mut jobs = vec![
 49        orchestrate,
 50        check_style(),
 51        should_run_tests
 52            .and_not_in_merge_queue()
 53            .then(clippy(Platform::Windows, None)),
 54        should_run_tests
 55            .and_not_in_merge_queue()
 56            .then(clippy(Platform::Linux, None)),
 57        should_run_tests
 58            .and_not_in_merge_queue()
 59            .then(clippy(Platform::Mac, None)),
 60        should_run_tests
 61            .and_not_in_merge_queue()
 62            .then(clippy(Platform::Mac, Some(Arch::X86_64))),
 63        should_run_tests
 64            .and_not_in_merge_queue()
 65            .then(run_platform_tests(Platform::Windows)),
 66        should_run_tests
 67            .and_not_in_merge_queue()
 68            .then(run_platform_tests(Platform::Linux)),
 69        should_run_tests
 70            .and_not_in_merge_queue()
 71            .then(run_platform_tests(Platform::Mac)),
 72        should_run_tests.and_not_in_merge_queue().then(doctests()),
 73        should_run_tests
 74            .and_not_in_merge_queue()
 75            .then(check_workspace_binaries()),
 76        should_run_tests
 77            .and_not_in_merge_queue()
 78            .then(build_visual_tests_binary()),
 79        should_run_tests.and_not_in_merge_queue().then(check_wasm()),
 80        should_run_tests
 81            .and_not_in_merge_queue()
 82            .then(check_dependencies()), // could be more specific here?
 83        should_check_docs
 84            .and_not_in_merge_queue()
 85            .then(check_docs()),
 86        should_check_licences
 87            .and_not_in_merge_queue()
 88            .then(check_licenses()),
 89        should_check_scripts.and_always().then(check_scripts()),
 90    ];
 91    let ext_tests = extension_tests();
 92    let tests_pass = tests_pass(&jobs, &[&ext_tests.name]);
 93
 94    // TODO: For merge queues, this should fail in the merge queue context
 95    jobs.push(
 96        should_run_tests
 97            .and_always()
 98            .then(check_postgres_and_protobuf_migrations()),
 99    ); // could be more specific here?
100
101    named::workflow()
102        .add_event(
103            Event::default()
104                .push(
105                    Push::default()
106                        .add_branch("main")
107                        .add_branch("v[0-9]+.[0-9]+.x"),
108                )
109                .pull_request(PullRequest::default().add_branch("**"))
110                .merge_group(MergeGroup::default()),
111        )
112        .concurrency(vars::one_workflow_per_non_main_branch())
113        .add_env(("CARGO_TERM_COLOR", "always"))
114        .add_env(("RUST_BACKTRACE", 1))
115        .add_env(("CARGO_INCREMENTAL", 0))
116        .map(|mut workflow| {
117            for job in jobs {
118                workflow = workflow.add_job(job.name, job.job)
119            }
120            workflow
121        })
122        .add_job(ext_tests.name, ext_tests.job)
123        .add_job(tests_pass.name, tests_pass.job)
124}
125
126/// Controls which features `orchestrate_impl` includes in the generated script.
127#[derive(PartialEq, Eq)]
128enum OrchestrateTarget {
129    /// For the main Zed repo: includes the cargo package filter and extension
130    /// change detection, but no working-directory scoping.
131    ZedRepo,
132    /// For individual extension repos: scopes changed-file detection to the
133    /// working directory, with no package filter or extension detection.
134    Extension,
135}
136
137// Generates a bash script that checks changed files against regex patterns
138// and sets GitHub output variables accordingly
139pub fn orchestrate(rules: &[&PathCondition]) -> NamedJob {
140    orchestrate_impl(rules, OrchestrateTarget::ZedRepo)
141}
142
143pub fn orchestrate_for_extension(rules: &[&PathCondition]) -> NamedJob {
144    orchestrate_impl(rules, OrchestrateTarget::Extension)
145}
146
147fn orchestrate_impl(rules: &[&PathCondition], target: OrchestrateTarget) -> NamedJob {
148    let name = "orchestrate".to_owned();
149    let step_name = "filter".to_owned();
150    let mut script = String::new();
151
152    script.push_str(indoc::indoc! {r#"
153        set -euo pipefail
154        if [ -z "$GITHUB_BASE_REF" ]; then
155          echo "Not in a PR context (i.e., push to main/stable/preview)"
156          COMPARE_REV="$(git rev-parse HEAD~1)"
157        else
158          echo "In a PR context comparing to pull_request.base.ref"
159          git fetch origin "$GITHUB_BASE_REF" --depth=350
160          COMPARE_REV="$(git merge-base "origin/${GITHUB_BASE_REF}" HEAD)"
161        fi
162        CHANGED_FILES="$(git diff --name-only "$COMPARE_REV" "$GITHUB_SHA")"
163
164    "#});
165
166    if target == OrchestrateTarget::Extension {
167        script.push_str(indoc::indoc! {r#"
168        # When running from a subdirectory, git diff returns repo-root-relative paths.
169        # Filter to only files within the current working directory and strip the prefix.
170        REPO_SUBDIR="$(git rev-parse --show-prefix)"
171        REPO_SUBDIR="${REPO_SUBDIR%/}"
172        if [ -n "$REPO_SUBDIR" ]; then
173            CHANGED_FILES="$(echo "$CHANGED_FILES" | grep "^${REPO_SUBDIR}/" | sed "s|^${REPO_SUBDIR}/||" || true)"
174        fi
175
176    "#});
177    }
178
179    script.push_str(indoc::indoc! {r#"
180        check_pattern() {
181          local output_name="$1"
182          local pattern="$2"
183          local grep_arg="$3"
184
185          echo "$CHANGED_FILES" | grep "$grep_arg" "$pattern" && \
186            echo "${output_name}=true" >> "$GITHUB_OUTPUT" || \
187            echo "${output_name}=false" >> "$GITHUB_OUTPUT"
188        }
189
190    "#});
191
192    let mut outputs = IndexMap::new();
193
194    if target == OrchestrateTarget::ZedRepo {
195        script.push_str(indoc::indoc! {r#"
196        # Check for changes that require full rebuild (no filter)
197        # Direct pushes to main/stable/preview always run full suite
198        if [ -z "$GITHUB_BASE_REF" ]; then
199          echo "Not a PR, running full test suite"
200          echo "changed_packages=" >> "$GITHUB_OUTPUT"
201        elif echo "$CHANGED_FILES" | grep -qP '^(rust-toolchain\.toml|\.cargo/|\.github/|Cargo\.(toml|lock)$)'; then
202          echo "Toolchain, cargo config, or root Cargo files changed, will run all tests"
203          echo "changed_packages=" >> "$GITHUB_OUTPUT"
204        else
205          # Extract changed directories from file paths
206          CHANGED_DIRS=$(echo "$CHANGED_FILES" | \
207            grep -oP '^(crates|tooling)/\K[^/]+' | \
208            sort -u || true)
209
210          # Build directory-to-package mapping using cargo metadata
211          DIR_TO_PKG=$(cargo metadata --format-version=1 --no-deps 2>/dev/null | \
212            jq -r '.packages[] | select(.manifest_path | test("crates/|tooling/")) | "\(.manifest_path | capture("(crates|tooling)/(?<dir>[^/]+)") | .dir)=\(.name)"')
213
214          # Map directory names to package names
215          FILE_CHANGED_PKGS=""
216          for dir in $CHANGED_DIRS; do
217            pkg=$(echo "$DIR_TO_PKG" | grep "^${dir}=" | cut -d= -f2 | head -1)
218            if [ -n "$pkg" ]; then
219              FILE_CHANGED_PKGS=$(printf '%s\n%s' "$FILE_CHANGED_PKGS" "$pkg")
220            else
221              # Fall back to directory name if no mapping found
222              FILE_CHANGED_PKGS=$(printf '%s\n%s' "$FILE_CHANGED_PKGS" "$dir")
223            fi
224          done
225          FILE_CHANGED_PKGS=$(echo "$FILE_CHANGED_PKGS" | grep -v '^$' | sort -u || true)
226
227          # If assets/ changed, add crates that depend on those assets
228          if echo "$CHANGED_FILES" | grep -qP '^assets/'; then
229            FILE_CHANGED_PKGS=$(printf '%s\n%s\n%s' "$FILE_CHANGED_PKGS" "settings" "assets" | sort -u)
230          fi
231
232          # Combine all changed packages
233          ALL_CHANGED_PKGS=$(echo "$FILE_CHANGED_PKGS" | grep -v '^$' || true)
234
235          if [ -z "$ALL_CHANGED_PKGS" ]; then
236            echo "No package changes detected, will run all tests"
237            echo "changed_packages=" >> "$GITHUB_OUTPUT"
238          else
239            # Build nextest filterset with rdeps for each package
240            FILTERSET=$(echo "$ALL_CHANGED_PKGS" | \
241              sed 's/.*/rdeps(&)/' | \
242              tr '\n' '|' | \
243              sed 's/|$//')
244            echo "Changed packages filterset: $FILTERSET"
245            echo "changed_packages=$FILTERSET" >> "$GITHUB_OUTPUT"
246          fi
247        fi
248
249    "#});
250
251        outputs.insert(
252            "changed_packages".to_owned(),
253            format!("${{{{ steps.{}.outputs.changed_packages }}}}", step_name),
254        );
255    }
256
257    for rule in rules {
258        assert!(
259            rule.set_by_step
260                .borrow_mut()
261                .replace(name.clone())
262                .is_none()
263        );
264        assert!(
265            outputs
266                .insert(
267                    rule.name.to_owned(),
268                    format!("${{{{ steps.{}.outputs.{} }}}}", step_name, rule.name)
269                )
270                .is_none()
271        );
272
273        let grep_arg = if rule.invert { "-qvP" } else { "-qP" };
274        script.push_str(&format!(
275            "check_pattern \"{}\" '{}' {}\n",
276            rule.name, rule.pattern, grep_arg
277        ));
278    }
279
280    if target == OrchestrateTarget::ZedRepo {
281        script.push_str(DETECT_CHANGED_EXTENSIONS_SCRIPT);
282        script.push_str("echo \"changed_extensions=$EXTENSIONS_JSON\" >> \"$GITHUB_OUTPUT\"\n");
283
284        outputs.insert(
285            "changed_extensions".to_owned(),
286            format!("${{{{ steps.{}.outputs.changed_extensions }}}}", step_name),
287        );
288    }
289
290    let job = Job::default()
291        .runs_on(runners::LINUX_SMALL)
292        .with_repository_owner_guard()
293        .outputs(outputs)
294        .add_step(steps::checkout_repo().with_deep_history_on_non_main())
295        .add_step(Step::new(step_name.clone()).run(script).id(step_name));
296
297    NamedJob { name, job }
298}
299
300pub fn tests_pass(jobs: &[NamedJob], extra_job_names: &[&str]) -> NamedJob {
301    let mut script = String::from(indoc::indoc! {r#"
302        set +x
303        EXIT_CODE=0
304
305        check_result() {
306          echo "* $1: $2"
307          if [[ "$2" != "skipped" && "$2" != "success" ]]; then EXIT_CODE=1; fi
308        }
309
310    "#});
311
312    let all_names: Vec<&str> = jobs
313        .iter()
314        .map(|job| job.name.as_str())
315        .chain(extra_job_names.iter().copied())
316        .collect();
317
318    let env_entries: Vec<_> = all_names
319        .iter()
320        .map(|name| {
321            let env_name = format!("RESULT_{}", name.to_uppercase());
322            let env_value = format!("${{{{ needs.{}.result }}}}", name);
323            (env_name, env_value)
324        })
325        .collect();
326
327    script.push_str(
328        &all_names
329            .iter()
330            .zip(env_entries.iter())
331            .map(|(name, (env_name, _))| format!("check_result \"{}\" \"${}\"", name, env_name))
332            .collect::<Vec<_>>()
333            .join("\n"),
334    );
335
336    script.push_str("\n\nexit $EXIT_CODE\n");
337
338    let job = Job::default()
339        .runs_on(runners::LINUX_SMALL)
340        .needs(
341            all_names
342                .iter()
343                .map(|name| name.to_string())
344                .collect::<Vec<String>>(),
345        )
346        .cond(repository_owner_guard_expression(true))
347        .add_step(
348            env_entries
349                .into_iter()
350                .fold(named::bash(&script), |step, env_item| {
351                    step.add_env(env_item)
352                }),
353        );
354
355    named::job(job)
356}
357
358/// Bash script snippet that detects changed extension directories from `$CHANGED_FILES`.
359/// Assumes `$CHANGED_FILES` is already set. Sets `$EXTENSIONS_JSON` to a JSON array of
360/// changed extension paths. Callers are responsible for writing the result to `$GITHUB_OUTPUT`.
361pub(crate) const DETECT_CHANGED_EXTENSIONS_SCRIPT: &str = indoc::indoc! {r#"
362    # Detect changed extension directories (excluding extensions/workflows)
363    CHANGED_EXTENSIONS=$(echo "$CHANGED_FILES" | grep -oP '^extensions/[^/]+(?=/)' | sort -u | grep -v '^extensions/workflows$' || true)
364    # Filter out deleted extensions
365    EXISTING_EXTENSIONS=""
366    for ext in $CHANGED_EXTENSIONS; do
367        if [ -f "$ext/extension.toml" ]; then
368            EXISTING_EXTENSIONS=$(printf '%s\n%s' "$EXISTING_EXTENSIONS" "$ext")
369        fi
370    done
371    CHANGED_EXTENSIONS=$(echo "$EXISTING_EXTENSIONS" | sed '/^$/d')
372    if [ -n "$CHANGED_EXTENSIONS" ]; then
373        EXTENSIONS_JSON=$(echo "$CHANGED_EXTENSIONS" | jq -R -s -c 'split("\n") | map(select(length > 0))')
374    else
375        EXTENSIONS_JSON="[]"
376    fi
377"#};
378
379const TS_QUERY_LS_FILE: &str = "ts_query_ls-x86_64-unknown-linux-gnu.tar.gz";
380const CI_TS_QUERY_RELEASE: &str = "tags/v3.15.1";
381
382pub(crate) fn fetch_ts_query_ls() -> Step<Use> {
383    named::uses(
384        "dsaltares",
385        "fetch-gh-release-asset",
386        "aa37ae5c44d3c9820bc12fe675e8670ecd93bd1c",
387    ) // v1.1.1
388    .add_with(("repo", "ribru17/ts_query_ls"))
389    .add_with(("version", CI_TS_QUERY_RELEASE))
390    .add_with(("file", TS_QUERY_LS_FILE))
391}
392
393pub(crate) fn run_ts_query_ls() -> Step<Run> {
394    named::bash(formatdoc!(
395        r#"tar -xf "$GITHUB_WORKSPACE/{TS_QUERY_LS_FILE}" -C "$GITHUB_WORKSPACE"
396        "$GITHUB_WORKSPACE/ts_query_ls" format --check . || {{
397            echo "Found unformatted queries, please format them with ts_query_ls."
398            echo "For easy use, install the Tree-sitter query extension:"
399            echo "zed://extension/tree-sitter-query"
400            false
401        }}"#
402    ))
403}
404
405fn check_style() -> NamedJob {
406    fn check_for_typos() -> Step<Use> {
407        named::uses(
408            "crate-ci",
409            "typos",
410            "2d0ce569feab1f8752f1dde43cc2f2aa53236e06",
411        ) // v1.40.0
412        .with(("config", "./typos.toml"))
413    }
414
415    named::job(
416        release_job(&[])
417            .runs_on(runners::LINUX_MEDIUM)
418            .add_step(steps::checkout_repo())
419            .add_step(steps::cache_rust_dependencies_namespace())
420            .add_step(steps::setup_pnpm())
421            .add_step(steps::prettier())
422            .add_step(steps::cargo_fmt())
423            .add_step(steps::script("./script/check-todos"))
424            .add_step(steps::script("./script/check-keymaps"))
425            .add_step(check_for_typos())
426            .add_step(fetch_ts_query_ls())
427            .add_step(run_ts_query_ls()),
428    )
429}
430
431fn check_dependencies() -> NamedJob {
432    fn install_cargo_machete() -> Step<Use> {
433        named::uses(
434            "taiki-e",
435            "install-action",
436            "02cc5f8ca9f2301050c0c099055816a41ee05507",
437        )
438        .add_with(("tool", "cargo-machete@0.7.0"))
439    }
440
441    fn run_cargo_machete() -> Step<Run> {
442        named::bash("cargo machete")
443    }
444
445    fn check_cargo_lock() -> Step<Run> {
446        named::bash("cargo update --locked --workspace")
447    }
448
449    fn check_vulnerable_dependencies() -> Step<Use> {
450        named::uses(
451            "actions",
452            "dependency-review-action",
453            "67d4f4bd7a9b17a0db54d2a7519187c65e339de8", // v4
454        )
455        .if_condition(Expression::new("github.event_name == 'pull_request'"))
456        .with(("license-check", false))
457    }
458
459    named::job(use_clang(
460        release_job(&[])
461            .runs_on(runners::LINUX_SMALL)
462            .add_step(steps::checkout_repo())
463            .add_step(steps::cache_rust_dependencies_namespace())
464            .add_step(install_cargo_machete())
465            .add_step(run_cargo_machete())
466            .add_step(check_cargo_lock())
467            .add_step(check_vulnerable_dependencies()),
468    ))
469}
470
471fn check_wasm() -> NamedJob {
472    fn install_nightly_wasm_toolchain() -> Step<Run> {
473        named::bash(
474            "rustup toolchain install nightly --component rust-src --target wasm32-unknown-unknown",
475        )
476    }
477
478    fn cargo_check_wasm() -> Step<Run> {
479        named::bash(concat!(
480            "cargo +nightly -Zbuild-std=std,panic_abort ",
481            "check --target wasm32-unknown-unknown -p gpui_platform",
482        ))
483        .add_env((
484            "CARGO_TARGET_WASM32_UNKNOWN_UNKNOWN_RUSTFLAGS",
485            "-C target-feature=+atomics,+bulk-memory,+mutable-globals",
486        ))
487    }
488
489    named::job(
490        release_job(&[])
491            .runs_on(runners::LINUX_LARGE)
492            .add_step(steps::checkout_repo())
493            .add_step(steps::setup_cargo_config(Platform::Linux))
494            .add_step(steps::cache_rust_dependencies_namespace())
495            .add_step(install_nightly_wasm_toolchain())
496            .add_step(steps::setup_sccache(Platform::Linux))
497            .add_step(cargo_check_wasm())
498            .add_step(steps::show_sccache_stats(Platform::Linux))
499            .add_step(steps::cleanup_cargo_config(Platform::Linux)),
500    )
501}
502
503fn check_workspace_binaries() -> NamedJob {
504    named::job(use_clang(
505        release_job(&[])
506            .runs_on(runners::LINUX_LARGE)
507            .add_step(steps::checkout_repo())
508            .add_step(steps::setup_cargo_config(Platform::Linux))
509            .add_step(steps::cache_rust_dependencies_namespace())
510            .map(steps::install_linux_dependencies)
511            .add_step(steps::setup_sccache(Platform::Linux))
512            .add_step(steps::script("cargo build -p collab"))
513            .add_step(steps::script("cargo build --workspace --bins --examples"))
514            .add_step(steps::show_sccache_stats(Platform::Linux))
515            .add_step(steps::cleanup_cargo_config(Platform::Linux)),
516    ))
517}
518
519pub(crate) fn clippy(platform: Platform, arch: Option<Arch>) -> NamedJob {
520    let target = arch.map(|arch| match (platform, arch) {
521        (Platform::Mac, Arch::X86_64) => "x86_64-apple-darwin",
522        (Platform::Mac, Arch::AARCH64) => "aarch64-apple-darwin",
523        _ => unimplemented!("cross-arch clippy not supported for {platform}/{arch}"),
524    });
525    let runner = match platform {
526        Platform::Windows => runners::WINDOWS_DEFAULT,
527        Platform::Linux => runners::LINUX_DEFAULT,
528        Platform::Mac => runners::MAC_DEFAULT,
529    };
530    let mut job = release_job(&[])
531        .runs_on(runner)
532        .add_step(steps::checkout_repo())
533        .add_step(steps::setup_cargo_config(platform))
534        .when(
535            platform == Platform::Linux || platform == Platform::Mac,
536            |this| this.add_step(steps::cache_rust_dependencies_namespace()),
537        )
538        .when(
539            platform == Platform::Linux,
540            steps::install_linux_dependencies,
541        )
542        .when_some(target, |this, target| {
543            this.add_step(steps::install_rustup_target(target))
544        })
545        .add_step(steps::setup_sccache(platform))
546        .add_step(steps::clippy(platform, target))
547        .add_step(steps::show_sccache_stats(platform));
548    if platform == Platform::Linux {
549        job = use_clang(job);
550    }
551    let name = match arch {
552        Some(arch) => format!("clippy_{platform}_{arch}"),
553        None => format!("clippy_{platform}"),
554    };
555    NamedJob { name, job }
556}
557
558pub(crate) fn run_platform_tests(platform: Platform) -> NamedJob {
559    run_platform_tests_impl(platform, true)
560}
561
562pub(crate) fn run_platform_tests_no_filter(platform: Platform) -> NamedJob {
563    run_platform_tests_impl(platform, false)
564}
565
566fn run_platform_tests_impl(platform: Platform, filter_packages: bool) -> NamedJob {
567    let runner = match platform {
568        Platform::Windows => runners::WINDOWS_DEFAULT,
569        Platform::Linux => runners::LINUX_DEFAULT,
570        Platform::Mac => runners::MAC_DEFAULT,
571    };
572    NamedJob {
573        name: format!("run_tests_{platform}"),
574        job: release_job(&[])
575            .runs_on(runner)
576            .when(platform == Platform::Linux, |job| {
577                job.add_service(
578                    "postgres",
579                    Container::new("postgres:15")
580                        .add_env(("POSTGRES_HOST_AUTH_METHOD", "trust"))
581                        .ports(vec![Port::Name("5432:5432".into())])
582                        .options(
583                            "--health-cmd pg_isready \
584                             --health-interval 500ms \
585                             --health-timeout 5s \
586                             --health-retries 10",
587                        ),
588                )
589            })
590            .add_step(steps::checkout_repo())
591            .add_step(steps::setup_cargo_config(platform))
592            .when(platform == Platform::Mac, |this| {
593                this.add_step(steps::cache_rust_dependencies_namespace())
594            })
595            .when(platform == Platform::Linux, |this| {
596                use_clang(this.add_step(steps::cache_rust_dependencies_namespace()))
597            })
598            .when(
599                platform == Platform::Linux,
600                steps::install_linux_dependencies,
601            )
602            .add_step(steps::setup_node())
603            .when(
604                platform == Platform::Linux || platform == Platform::Mac,
605                |job| job.add_step(steps::cargo_install_nextest()),
606            )
607            .add_step(steps::clear_target_dir_if_large(platform))
608            .add_step(steps::setup_sccache(platform))
609            .when(filter_packages, |job| {
610                job.add_step(
611                    steps::cargo_nextest(platform).with_changed_packages_filter("orchestrate"),
612                )
613            })
614            .when(!filter_packages, |job| {
615                job.add_step(steps::cargo_nextest(platform))
616            })
617            .add_step(steps::show_sccache_stats(platform))
618            .add_step(steps::cleanup_cargo_config(platform)),
619    }
620}
621
622fn build_visual_tests_binary() -> NamedJob {
623    pub fn cargo_build_visual_tests() -> Step<Run> {
624        named::bash("cargo build -p zed --bin zed_visual_test_runner --features visual-tests")
625    }
626
627    named::job(
628        Job::default()
629            .runs_on(runners::MAC_DEFAULT)
630            .add_step(steps::checkout_repo())
631            .add_step(steps::setup_cargo_config(Platform::Mac))
632            .add_step(steps::cache_rust_dependencies_namespace())
633            .add_step(cargo_build_visual_tests())
634            .add_step(steps::cleanup_cargo_config(Platform::Mac)),
635    )
636}
637
638pub(crate) fn check_postgres_and_protobuf_migrations() -> NamedJob {
639    fn ensure_fresh_merge() -> Step<Run> {
640        named::bash(indoc::indoc! {r#"
641            if [ -z "$GITHUB_BASE_REF" ];
642            then
643              echo "BUF_BASE_BRANCH=$(git merge-base origin/main HEAD)" >> "$GITHUB_ENV"
644            else
645              git checkout -B temp
646              git merge -q "origin/$GITHUB_BASE_REF" -m "merge main into temp"
647              echo "BUF_BASE_BRANCH=$GITHUB_BASE_REF" >> "$GITHUB_ENV"
648            fi
649        "#})
650    }
651
652    fn bufbuild_setup_action() -> Step<Use> {
653        named::uses("bufbuild", "buf-setup-action", "v1")
654            .add_with(("version", "v1.29.0"))
655            .add_with(("github_token", vars::GITHUB_TOKEN))
656    }
657
658    fn bufbuild_breaking_action() -> Step<Use> {
659        named::uses("bufbuild", "buf-breaking-action", "v1").add_with(("input", "crates/proto/proto/"))
660            .add_with(("against", "https://github.com/${GITHUB_REPOSITORY}.git#branch=${BUF_BASE_BRANCH},subdir=crates/proto/proto/"))
661    }
662
663    fn buf_lint() -> Step<Run> {
664        named::bash("buf lint crates/proto/proto")
665    }
666
667    fn check_protobuf_formatting() -> Step<Run> {
668        named::bash("buf format --diff --exit-code crates/proto/proto")
669    }
670
671    named::job(
672        release_job(&[])
673            .runs_on(runners::LINUX_DEFAULT)
674            .add_env(("GIT_AUTHOR_NAME", "Protobuf Action"))
675            .add_env(("GIT_AUTHOR_EMAIL", "ci@zed.dev"))
676            .add_env(("GIT_COMMITTER_NAME", "Protobuf Action"))
677            .add_env(("GIT_COMMITTER_EMAIL", "ci@zed.dev"))
678            .add_step(steps::checkout_repo().with_full_history())
679            .add_step(ensure_fresh_merge())
680            .add_step(bufbuild_setup_action())
681            .add_step(bufbuild_breaking_action())
682            .add_step(buf_lint())
683            .add_step(check_protobuf_formatting()),
684    )
685}
686
687fn doctests() -> NamedJob {
688    fn run_doctests() -> Step<Run> {
689        named::bash(indoc::indoc! {r#"
690            cargo test --workspace --doc --no-fail-fast
691        "#})
692        .id("run_doctests")
693    }
694
695    named::job(use_clang(
696        release_job(&[])
697            .runs_on(runners::LINUX_DEFAULT)
698            .add_step(steps::checkout_repo())
699            .add_step(steps::cache_rust_dependencies_namespace())
700            .map(steps::install_linux_dependencies)
701            .add_step(steps::setup_cargo_config(Platform::Linux))
702            .add_step(steps::setup_sccache(Platform::Linux))
703            .add_step(run_doctests())
704            .add_step(steps::show_sccache_stats(Platform::Linux))
705            .add_step(steps::cleanup_cargo_config(Platform::Linux)),
706    ))
707}
708
709fn check_licenses() -> NamedJob {
710    named::job(
711        Job::default()
712            .runs_on(runners::LINUX_SMALL)
713            .add_step(steps::checkout_repo())
714            .add_step(steps::cache_rust_dependencies_namespace())
715            .add_step(steps::script("./script/check-licenses"))
716            .add_step(steps::script("./script/generate-licenses")),
717    )
718}
719
720fn check_docs() -> NamedJob {
721    fn lychee_link_check(dir: &str) -> Step<Use> {
722        named::uses(
723            "lycheeverse",
724            "lychee-action",
725            "82202e5e9c2f4ef1a55a3d02563e1cb6041e5332",
726        ) // v2.4.1
727        .add_with(("args", format!("--no-progress --exclude '^http' '{dir}'")))
728        .add_with(("fail", true))
729        .add_with(("jobSummary", false))
730    }
731
732    fn install_mdbook() -> Step<Use> {
733        named::uses(
734            "peaceiris",
735            "actions-mdbook",
736            "ee69d230fe19748b7abf22df32acaa93833fad08", // v2
737        )
738        .with(("mdbook-version", "0.4.37"))
739    }
740
741    fn build_docs() -> Step<Run> {
742        named::bash(indoc::indoc! {r#"
743            mkdir -p target/deploy
744            mdbook build ./docs --dest-dir=../target/deploy/docs/
745        "#})
746    }
747
748    named::job(use_clang(
749        release_job(&[])
750            .runs_on(runners::LINUX_LARGE)
751            .add_step(steps::checkout_repo())
752            .add_step(steps::setup_cargo_config(Platform::Linux))
753            // todo(ci): un-inline build_docs/action.yml here
754            .add_step(steps::cache_rust_dependencies_namespace())
755            .add_step(
756                lychee_link_check("./docs/src/**/*"), // check markdown links
757            )
758            .map(steps::install_linux_dependencies)
759            .add_step(steps::script("./script/generate-action-metadata"))
760            .add_step(install_mdbook())
761            .add_step(build_docs())
762            .add_step(
763                lychee_link_check("target/deploy/docs"), // check links in generated html
764            ),
765    ))
766}
767
768pub(crate) fn check_scripts() -> NamedJob {
769    fn download_actionlint() -> Step<Run> {
770        named::bash(
771            "bash <(curl https://raw.githubusercontent.com/rhysd/actionlint/main/scripts/download-actionlint.bash)",
772        )
773    }
774
775    fn run_actionlint() -> Step<Run> {
776        named::bash(r#""$ACTIONLINT_BIN" -color"#).add_env((
777            "ACTIONLINT_BIN",
778            "${{ steps.get_actionlint.outputs.executable }}",
779        ))
780    }
781
782    fn run_shellcheck() -> Step<Run> {
783        named::bash("./script/shellcheck-scripts error")
784    }
785
786    fn check_xtask_workflows() -> Step<Run> {
787        named::bash(indoc::indoc! {r#"
788            cargo xtask workflows
789            if ! git diff --exit-code .github; then
790              echo "Error: .github directory has uncommitted changes after running 'cargo xtask workflows'"
791              echo "Please run 'cargo xtask workflows' locally and commit the changes"
792              exit 1
793            fi
794        "#})
795    }
796
797    named::job(
798        release_job(&[])
799            .runs_on(runners::LINUX_SMALL)
800            .add_step(steps::checkout_repo())
801            .add_step(run_shellcheck())
802            .add_step(download_actionlint().id("get_actionlint"))
803            .add_step(run_actionlint())
804            .add_step(cache_rust_dependencies_namespace())
805            .add_step(check_xtask_workflows()),
806    )
807}
808
809fn extension_tests() -> NamedJob<UsesJob> {
810    let job = Job::default()
811        .needs(vec!["orchestrate".to_owned()])
812        .cond(Expression::new(
813            "needs.orchestrate.outputs.changed_extensions != '[]'",
814        ))
815        .permissions(Permissions::default().contents(Level::Read))
816        .strategy(
817            Strategy::default()
818                .fail_fast(false)
819                // TODO: Remove the limit. We currently need this to workaround the concurrency group issue
820                // where different matrix jobs would be placed in the same concurrency group and thus cancelled.
821                .max_parallel(1u32)
822                .matrix(json!({
823                    "extension": "${{ fromJson(needs.orchestrate.outputs.changed_extensions) }}"
824                })),
825        )
826        .uses_local(".github/workflows/extension_tests.yml")
827        .with(Input::default().add("working-directory", "${{ matrix.extension }}"));
828
829    named::job(job)
830}