run_tests.rs

  1use gh_workflow::{
  2    Container, Event, Expression, Input, Job, Level, MergeGroup, Permissions, Port, PullRequest,
  3    Push, Run, Step, Strategy, Use, UsesJob, Workflow,
  4};
  5use indexmap::IndexMap;
  6use indoc::formatdoc;
  7use serde_json::json;
  8
  9use crate::tasks::workflows::{
 10    steps::{
 11        CommonJobConditions, cache_rust_dependencies_namespace, repository_owner_guard_expression,
 12        use_clang,
 13    },
 14    vars::{self, PathCondition},
 15};
 16
 17use super::{
 18    deploy_docs,
 19    runners::{self, Arch, Platform},
 20    steps::{self, FluentBuilder, NamedJob, named, release_job},
 21};
 22
 23pub(crate) fn run_tests() -> Workflow {
 24    // Specify anything which should potentially skip full test suite in this regex:
 25    // - docs/
 26    // - script/update_top_ranking_issues/
 27    // - .github/ISSUE_TEMPLATE/
 28    // - .github/workflows/  (except .github/workflows/ci.yml)
 29    // - extensions/  (these have their own test workflow)
 30    let should_run_tests = PathCondition::inverted(
 31        "run_tests",
 32        r"^(docs/|script/update_top_ranking_issues/|\.github/(ISSUE_TEMPLATE|workflows/(?!run_tests))|extensions/)",
 33    );
 34    let should_check_docs = PathCondition::new("run_docs", r"^(docs/|crates/.*\.rs)");
 35    let should_check_scripts = PathCondition::new(
 36        "run_action_checks",
 37        r"^\.github/(workflows/|actions/|actionlint.yml)|tooling/xtask|script/",
 38    );
 39    let should_check_licences =
 40        PathCondition::new("run_licenses", r"^(Cargo.lock|script/.*licenses)");
 41
 42    let orchestrate = orchestrate(&[
 43        &should_check_scripts,
 44        &should_check_docs,
 45        &should_check_licences,
 46        &should_run_tests,
 47    ]);
 48
 49    let mut jobs = vec![
 50        orchestrate,
 51        check_style(),
 52        should_run_tests
 53            .and_not_in_merge_queue()
 54            .then(clippy(Platform::Windows, None)),
 55        should_run_tests
 56            .and_not_in_merge_queue()
 57            .then(clippy(Platform::Linux, None)),
 58        should_run_tests
 59            .and_not_in_merge_queue()
 60            .then(clippy(Platform::Mac, None)),
 61        should_run_tests
 62            .and_not_in_merge_queue()
 63            .then(clippy(Platform::Mac, Some(Arch::X86_64))),
 64        should_run_tests
 65            .and_not_in_merge_queue()
 66            .then(run_platform_tests(Platform::Windows)),
 67        should_run_tests
 68            .and_not_in_merge_queue()
 69            .then(run_platform_tests(Platform::Linux)),
 70        should_run_tests
 71            .and_not_in_merge_queue()
 72            .then(run_platform_tests(Platform::Mac)),
 73        should_run_tests.and_not_in_merge_queue().then(doctests()),
 74        should_run_tests
 75            .and_not_in_merge_queue()
 76            .then(check_workspace_binaries()),
 77        should_run_tests
 78            .and_not_in_merge_queue()
 79            .then(build_visual_tests_binary()),
 80        should_run_tests.and_not_in_merge_queue().then(check_wasm()),
 81        should_run_tests
 82            .and_not_in_merge_queue()
 83            .then(check_dependencies()), // could be more specific here?
 84        should_check_docs
 85            .and_not_in_merge_queue()
 86            .then(deploy_docs::check_docs()),
 87        should_check_licences
 88            .and_not_in_merge_queue()
 89            .then(check_licenses()),
 90        should_check_scripts.and_always().then(check_scripts()),
 91    ];
 92    let ext_tests = extension_tests();
 93    let tests_pass = tests_pass(&jobs, &[&ext_tests.name]);
 94
 95    // TODO: For merge queues, this should fail in the merge queue context
 96    jobs.push(
 97        should_run_tests
 98            .and_always()
 99            .then(check_postgres_and_protobuf_migrations()),
100    ); // could be more specific here?
101
102    named::workflow()
103        .add_event(
104            Event::default()
105                .push(
106                    Push::default()
107                        .add_branch("main")
108                        .add_branch("v[0-9]+.[0-9]+.x"),
109                )
110                .pull_request(PullRequest::default().add_branch("**"))
111                .merge_group(MergeGroup::default()),
112        )
113        .concurrency(vars::one_workflow_per_non_main_branch())
114        .add_env(("CARGO_TERM_COLOR", "always"))
115        .add_env(("RUST_BACKTRACE", 1))
116        .add_env(("CARGO_INCREMENTAL", 0))
117        .map(|mut workflow| {
118            for job in jobs {
119                workflow = workflow.add_job(job.name, job.job)
120            }
121            workflow
122        })
123        .add_job(ext_tests.name, ext_tests.job)
124        .add_job(tests_pass.name, tests_pass.job)
125}
126
127/// Controls which features `orchestrate_impl` includes in the generated script.
128#[derive(PartialEq, Eq)]
129enum OrchestrateTarget {
130    /// For the main Zed repo: includes the cargo package filter and extension
131    /// change detection, but no working-directory scoping.
132    ZedRepo,
133    /// For individual extension repos: scopes changed-file detection to the
134    /// working directory, with no package filter or extension detection.
135    Extension,
136}
137
138// Generates a bash script that checks changed files against regex patterns
139// and sets GitHub output variables accordingly
140pub fn orchestrate(rules: &[&PathCondition]) -> NamedJob {
141    orchestrate_impl(rules, OrchestrateTarget::ZedRepo)
142}
143
144pub fn orchestrate_for_extension(rules: &[&PathCondition]) -> NamedJob {
145    orchestrate_impl(rules, OrchestrateTarget::Extension)
146}
147
148fn orchestrate_impl(rules: &[&PathCondition], target: OrchestrateTarget) -> NamedJob {
149    let name = "orchestrate".to_owned();
150    let step_name = "filter".to_owned();
151    let mut script = String::new();
152
153    script.push_str(indoc::indoc! {r#"
154        set -euo pipefail
155        if [ -z "$GITHUB_BASE_REF" ]; then
156          echo "Not in a PR context (i.e., push to main/stable/preview)"
157          COMPARE_REV="$(git rev-parse HEAD~1)"
158        else
159          echo "In a PR context comparing to pull_request.base.ref"
160          git fetch origin "$GITHUB_BASE_REF" --depth=350
161          COMPARE_REV="$(git merge-base "origin/${GITHUB_BASE_REF}" HEAD)"
162        fi
163        CHANGED_FILES="$(git diff --name-only "$COMPARE_REV" "$GITHUB_SHA")"
164
165    "#});
166
167    if target == OrchestrateTarget::Extension {
168        script.push_str(indoc::indoc! {r#"
169        # When running from a subdirectory, git diff returns repo-root-relative paths.
170        # Filter to only files within the current working directory and strip the prefix.
171        REPO_SUBDIR="$(git rev-parse --show-prefix)"
172        REPO_SUBDIR="${REPO_SUBDIR%/}"
173        if [ -n "$REPO_SUBDIR" ]; then
174            CHANGED_FILES="$(echo "$CHANGED_FILES" | grep "^${REPO_SUBDIR}/" | sed "s|^${REPO_SUBDIR}/||" || true)"
175        fi
176
177    "#});
178    }
179
180    script.push_str(indoc::indoc! {r#"
181        check_pattern() {
182          local output_name="$1"
183          local pattern="$2"
184          local grep_arg="$3"
185
186          echo "$CHANGED_FILES" | grep "$grep_arg" "$pattern" && \
187            echo "${output_name}=true" >> "$GITHUB_OUTPUT" || \
188            echo "${output_name}=false" >> "$GITHUB_OUTPUT"
189        }
190
191    "#});
192
193    let mut outputs = IndexMap::new();
194
195    if target == OrchestrateTarget::ZedRepo {
196        script.push_str(indoc::indoc! {r#"
197        # Check for changes that require full rebuild (no filter)
198        # Direct pushes to main/stable/preview always run full suite
199        if [ -z "$GITHUB_BASE_REF" ]; then
200          echo "Not a PR, running full test suite"
201          echo "changed_packages=" >> "$GITHUB_OUTPUT"
202        elif echo "$CHANGED_FILES" | grep -qP '^(rust-toolchain\.toml|\.cargo/|\.github/|Cargo\.(toml|lock)$)'; then
203          echo "Toolchain, cargo config, or root Cargo files changed, will run all tests"
204          echo "changed_packages=" >> "$GITHUB_OUTPUT"
205        else
206          # Extract changed directories from file paths
207          CHANGED_DIRS=$(echo "$CHANGED_FILES" | \
208            grep -oP '^(crates|tooling)/\K[^/]+' | \
209            sort -u || true)
210
211          # Build directory-to-package mapping using cargo metadata
212          DIR_TO_PKG=$(cargo metadata --format-version=1 --no-deps 2>/dev/null | \
213            jq -r '.packages[] | select(.manifest_path | test("crates/|tooling/")) | "\(.manifest_path | capture("(crates|tooling)/(?<dir>[^/]+)") | .dir)=\(.name)"')
214
215          # Map directory names to package names
216          FILE_CHANGED_PKGS=""
217          for dir in $CHANGED_DIRS; do
218            pkg=$(echo "$DIR_TO_PKG" | grep "^${dir}=" | cut -d= -f2 | head -1)
219            if [ -n "$pkg" ]; then
220              FILE_CHANGED_PKGS=$(printf '%s\n%s' "$FILE_CHANGED_PKGS" "$pkg")
221            else
222              # Fall back to directory name if no mapping found
223              FILE_CHANGED_PKGS=$(printf '%s\n%s' "$FILE_CHANGED_PKGS" "$dir")
224            fi
225          done
226          FILE_CHANGED_PKGS=$(echo "$FILE_CHANGED_PKGS" | grep -v '^$' | sort -u || true)
227
228          # If assets/ changed, add crates that depend on those assets
229          if echo "$CHANGED_FILES" | grep -qP '^assets/'; then
230            FILE_CHANGED_PKGS=$(printf '%s\n%s\n%s' "$FILE_CHANGED_PKGS" "settings" "assets" | sort -u)
231          fi
232
233          # Combine all changed packages
234          ALL_CHANGED_PKGS=$(echo "$FILE_CHANGED_PKGS" | grep -v '^$' || true)
235
236          if [ -z "$ALL_CHANGED_PKGS" ]; then
237            echo "No package changes detected, will run all tests"
238            echo "changed_packages=" >> "$GITHUB_OUTPUT"
239          else
240            # Build nextest filterset with rdeps for each package
241            FILTERSET=$(echo "$ALL_CHANGED_PKGS" | \
242              sed 's/.*/rdeps(&)/' | \
243              tr '\n' '|' | \
244              sed 's/|$//')
245            echo "Changed packages filterset: $FILTERSET"
246            echo "changed_packages=$FILTERSET" >> "$GITHUB_OUTPUT"
247          fi
248        fi
249
250    "#});
251
252        outputs.insert(
253            "changed_packages".to_owned(),
254            format!("${{{{ steps.{}.outputs.changed_packages }}}}", step_name),
255        );
256    }
257
258    for rule in rules {
259        assert!(
260            rule.set_by_step
261                .borrow_mut()
262                .replace(name.clone())
263                .is_none()
264        );
265        assert!(
266            outputs
267                .insert(
268                    rule.name.to_owned(),
269                    format!("${{{{ steps.{}.outputs.{} }}}}", step_name, rule.name)
270                )
271                .is_none()
272        );
273
274        let grep_arg = if rule.invert { "-qvP" } else { "-qP" };
275        script.push_str(&format!(
276            "check_pattern \"{}\" '{}' {}\n",
277            rule.name, rule.pattern, grep_arg
278        ));
279    }
280
281    if target == OrchestrateTarget::ZedRepo {
282        script.push_str(DETECT_CHANGED_EXTENSIONS_SCRIPT);
283        script.push_str("echo \"changed_extensions=$EXTENSIONS_JSON\" >> \"$GITHUB_OUTPUT\"\n");
284
285        outputs.insert(
286            "changed_extensions".to_owned(),
287            format!("${{{{ steps.{}.outputs.changed_extensions }}}}", step_name),
288        );
289    }
290
291    let job = Job::default()
292        .runs_on(runners::LINUX_SMALL)
293        .with_repository_owner_guard()
294        .outputs(outputs)
295        .add_step(steps::checkout_repo().with_deep_history_on_non_main())
296        .add_step(Step::new(step_name.clone()).run(script).id(step_name));
297
298    NamedJob { name, job }
299}
300
301pub fn tests_pass(jobs: &[NamedJob], extra_job_names: &[&str]) -> NamedJob {
302    let mut script = String::from(indoc::indoc! {r#"
303        set +x
304        EXIT_CODE=0
305
306        check_result() {
307          echo "* $1: $2"
308          if [[ "$2" != "skipped" && "$2" != "success" ]]; then EXIT_CODE=1; fi
309        }
310
311    "#});
312
313    let all_names: Vec<&str> = jobs
314        .iter()
315        .map(|job| job.name.as_str())
316        .chain(extra_job_names.iter().copied())
317        .collect();
318
319    let env_entries: Vec<_> = all_names
320        .iter()
321        .map(|name| {
322            let env_name = format!("RESULT_{}", name.to_uppercase());
323            let env_value = format!("${{{{ needs.{}.result }}}}", name);
324            (env_name, env_value)
325        })
326        .collect();
327
328    script.push_str(
329        &all_names
330            .iter()
331            .zip(env_entries.iter())
332            .map(|(name, (env_name, _))| format!("check_result \"{}\" \"${}\"", name, env_name))
333            .collect::<Vec<_>>()
334            .join("\n"),
335    );
336
337    script.push_str("\n\nexit $EXIT_CODE\n");
338
339    let job = Job::default()
340        .runs_on(runners::LINUX_SMALL)
341        .needs(
342            all_names
343                .iter()
344                .map(|name| name.to_string())
345                .collect::<Vec<String>>(),
346        )
347        .cond(repository_owner_guard_expression(true))
348        .add_step(
349            env_entries
350                .into_iter()
351                .fold(named::bash(&script), |step, env_item| {
352                    step.add_env(env_item)
353                }),
354        );
355
356    named::job(job)
357}
358
359/// Bash script snippet that detects changed extension directories from `$CHANGED_FILES`.
360/// Assumes `$CHANGED_FILES` is already set. Sets `$EXTENSIONS_JSON` to a JSON array of
361/// changed extension paths. Callers are responsible for writing the result to `$GITHUB_OUTPUT`.
362pub(crate) const DETECT_CHANGED_EXTENSIONS_SCRIPT: &str = indoc::indoc! {r#"
363    # Detect changed extension directories (excluding extensions/workflows)
364    CHANGED_EXTENSIONS=$(echo "$CHANGED_FILES" | grep -oP '^extensions/[^/]+(?=/)' | sort -u | grep -v '^extensions/workflows$' || true)
365    # Filter out deleted extensions
366    EXISTING_EXTENSIONS=""
367    for ext in $CHANGED_EXTENSIONS; do
368        if [ -f "$ext/extension.toml" ]; then
369            EXISTING_EXTENSIONS=$(printf '%s\n%s' "$EXISTING_EXTENSIONS" "$ext")
370        fi
371    done
372    CHANGED_EXTENSIONS=$(echo "$EXISTING_EXTENSIONS" | sed '/^$/d')
373    if [ -n "$CHANGED_EXTENSIONS" ]; then
374        EXTENSIONS_JSON=$(echo "$CHANGED_EXTENSIONS" | jq -R -s -c 'split("\n") | map(select(length > 0))')
375    else
376        EXTENSIONS_JSON="[]"
377    fi
378"#};
379
380const TS_QUERY_LS_FILE: &str = "ts_query_ls-x86_64-unknown-linux-gnu.tar.gz";
381const CI_TS_QUERY_RELEASE: &str = "tags/v3.15.1";
382
383pub(crate) fn fetch_ts_query_ls() -> Step<Use> {
384    named::uses(
385        "dsaltares",
386        "fetch-gh-release-asset",
387        "aa37ae5c44d3c9820bc12fe675e8670ecd93bd1c",
388    ) // v1.1.1
389    .add_with(("repo", "ribru17/ts_query_ls"))
390    .add_with(("version", CI_TS_QUERY_RELEASE))
391    .add_with(("file", TS_QUERY_LS_FILE))
392}
393
394pub(crate) fn run_ts_query_ls() -> Step<Run> {
395    named::bash(formatdoc!(
396        r#"tar -xf "$GITHUB_WORKSPACE/{TS_QUERY_LS_FILE}" -C "$GITHUB_WORKSPACE"
397        "$GITHUB_WORKSPACE/ts_query_ls" format --check . || {{
398            echo "Found unformatted queries, please format them with ts_query_ls."
399            echo "For easy use, install the Tree-sitter query extension:"
400            echo "zed://extension/tree-sitter-query"
401            false
402        }}"#
403    ))
404}
405
406fn check_style() -> NamedJob {
407    fn check_for_typos() -> Step<Use> {
408        named::uses(
409            "crate-ci",
410            "typos",
411            "2d0ce569feab1f8752f1dde43cc2f2aa53236e06",
412        ) // v1.40.0
413        .with(("config", "./typos.toml"))
414    }
415
416    named::job(
417        release_job(&[])
418            .runs_on(runners::LINUX_MEDIUM)
419            .add_step(steps::checkout_repo())
420            .add_step(steps::cache_rust_dependencies_namespace())
421            .add_step(steps::setup_pnpm())
422            .add_step(steps::prettier())
423            .add_step(steps::cargo_fmt())
424            .add_step(steps::script("./script/check-todos"))
425            .add_step(steps::script("./script/check-keymaps"))
426            .add_step(check_for_typos())
427            .add_step(fetch_ts_query_ls())
428            .add_step(run_ts_query_ls()),
429    )
430}
431
432fn check_dependencies() -> NamedJob {
433    fn install_cargo_machete() -> Step<Use> {
434        named::uses(
435            "taiki-e",
436            "install-action",
437            "02cc5f8ca9f2301050c0c099055816a41ee05507",
438        )
439        .add_with(("tool", "cargo-machete@0.7.0"))
440    }
441
442    fn run_cargo_machete() -> Step<Run> {
443        named::bash("cargo machete")
444    }
445
446    fn check_cargo_lock() -> Step<Run> {
447        named::bash("cargo update --locked --workspace")
448    }
449
450    fn check_vulnerable_dependencies() -> Step<Use> {
451        named::uses(
452            "actions",
453            "dependency-review-action",
454            "67d4f4bd7a9b17a0db54d2a7519187c65e339de8", // v4
455        )
456        .if_condition(Expression::new("github.event_name == 'pull_request'"))
457        .with(("license-check", false))
458    }
459
460    named::job(use_clang(
461        release_job(&[])
462            .runs_on(runners::LINUX_SMALL)
463            .add_step(steps::checkout_repo())
464            .add_step(steps::cache_rust_dependencies_namespace())
465            .add_step(install_cargo_machete())
466            .add_step(run_cargo_machete())
467            .add_step(check_cargo_lock())
468            .add_step(check_vulnerable_dependencies()),
469    ))
470}
471
472fn check_wasm() -> NamedJob {
473    fn install_nightly_wasm_toolchain() -> Step<Run> {
474        named::bash(
475            "rustup toolchain install nightly --component rust-src --target wasm32-unknown-unknown",
476        )
477    }
478
479    fn cargo_check_wasm() -> Step<Run> {
480        named::bash(concat!(
481            "cargo -Zbuild-std=std,panic_abort ",
482            "check --target wasm32-unknown-unknown -p gpui_platform",
483        ))
484        .add_env((
485            "CARGO_TARGET_WASM32_UNKNOWN_UNKNOWN_RUSTFLAGS",
486            "-C target-feature=+atomics,+bulk-memory,+mutable-globals",
487        ))
488        .add_env(("RUSTC_BOOTSTRAP", "1"))
489    }
490
491    named::job(
492        release_job(&[])
493            .runs_on(runners::LINUX_LARGE)
494            .add_step(steps::checkout_repo())
495            .add_step(steps::setup_cargo_config(Platform::Linux))
496            .add_step(steps::cache_rust_dependencies_namespace())
497            .add_step(install_nightly_wasm_toolchain())
498            .add_step(steps::setup_sccache(Platform::Linux))
499            .add_step(cargo_check_wasm())
500            .add_step(steps::show_sccache_stats(Platform::Linux))
501            .add_step(steps::cleanup_cargo_config(Platform::Linux)),
502    )
503}
504
505fn check_workspace_binaries() -> NamedJob {
506    named::job(use_clang(
507        release_job(&[])
508            .runs_on(runners::LINUX_LARGE)
509            .add_step(steps::checkout_repo())
510            .add_step(steps::setup_cargo_config(Platform::Linux))
511            .add_step(steps::cache_rust_dependencies_namespace())
512            .map(steps::install_linux_dependencies)
513            .add_step(steps::setup_sccache(Platform::Linux))
514            .add_step(steps::script("cargo build -p collab"))
515            .add_step(steps::script("cargo build --workspace --bins --examples"))
516            .add_step(steps::show_sccache_stats(Platform::Linux))
517            .add_step(steps::cleanup_cargo_config(Platform::Linux)),
518    ))
519}
520
521pub(crate) fn clippy(platform: Platform, arch: Option<Arch>) -> NamedJob {
522    let target = arch.map(|arch| match (platform, arch) {
523        (Platform::Mac, Arch::X86_64) => "x86_64-apple-darwin",
524        (Platform::Mac, Arch::AARCH64) => "aarch64-apple-darwin",
525        _ => unimplemented!("cross-arch clippy not supported for {platform}/{arch}"),
526    });
527    let runner = match platform {
528        Platform::Windows => runners::WINDOWS_DEFAULT,
529        Platform::Linux => runners::LINUX_DEFAULT,
530        Platform::Mac => runners::MAC_DEFAULT,
531    };
532    let mut job = release_job(&[])
533        .runs_on(runner)
534        .add_step(steps::checkout_repo())
535        .add_step(steps::setup_cargo_config(platform))
536        .when(
537            platform == Platform::Linux || platform == Platform::Mac,
538            |this| this.add_step(steps::cache_rust_dependencies_namespace()),
539        )
540        .when(
541            platform == Platform::Linux,
542            steps::install_linux_dependencies,
543        )
544        .when_some(target, |this, target| {
545            this.add_step(steps::install_rustup_target(target))
546        })
547        .add_step(steps::setup_sccache(platform))
548        .add_step(steps::clippy(platform, target))
549        .add_step(steps::show_sccache_stats(platform));
550    if platform == Platform::Linux {
551        job = use_clang(job);
552    }
553    let name = match arch {
554        Some(arch) => format!("clippy_{platform}_{arch}"),
555        None => format!("clippy_{platform}"),
556    };
557    NamedJob { name, job }
558}
559
560pub(crate) fn run_platform_tests(platform: Platform) -> NamedJob {
561    run_platform_tests_impl(platform, true)
562}
563
564pub(crate) fn run_platform_tests_no_filter(platform: Platform) -> NamedJob {
565    run_platform_tests_impl(platform, false)
566}
567
568fn run_platform_tests_impl(platform: Platform, filter_packages: bool) -> NamedJob {
569    let runner = match platform {
570        Platform::Windows => runners::WINDOWS_DEFAULT,
571        Platform::Linux => runners::LINUX_DEFAULT,
572        Platform::Mac => runners::MAC_DEFAULT,
573    };
574    NamedJob {
575        name: format!("run_tests_{platform}"),
576        job: release_job(&[])
577            .runs_on(runner)
578            .when(platform == Platform::Linux, |job| {
579                job.add_service(
580                    "postgres",
581                    Container::new("postgres:15")
582                        .add_env(("POSTGRES_HOST_AUTH_METHOD", "trust"))
583                        .ports(vec![Port::Name("5432:5432".into())])
584                        .options(
585                            "--health-cmd pg_isready \
586                             --health-interval 500ms \
587                             --health-timeout 5s \
588                             --health-retries 10",
589                        ),
590                )
591            })
592            .add_step(steps::checkout_repo())
593            .add_step(steps::setup_cargo_config(platform))
594            .when(platform == Platform::Mac, |this| {
595                this.add_step(steps::cache_rust_dependencies_namespace())
596            })
597            .when(platform == Platform::Linux, |this| {
598                use_clang(this.add_step(steps::cache_rust_dependencies_namespace()))
599            })
600            .when(
601                platform == Platform::Linux,
602                steps::install_linux_dependencies,
603            )
604            .add_step(steps::setup_node())
605            .when(
606                platform == Platform::Linux || platform == Platform::Mac,
607                |job| job.add_step(steps::cargo_install_nextest()),
608            )
609            .add_step(steps::clear_target_dir_if_large(platform))
610            .add_step(steps::setup_sccache(platform))
611            .when(filter_packages, |job| {
612                job.add_step(
613                    steps::cargo_nextest(platform).with_changed_packages_filter("orchestrate"),
614                )
615            })
616            .when(!filter_packages, |job| {
617                job.add_step(steps::cargo_nextest(platform))
618            })
619            .add_step(steps::show_sccache_stats(platform))
620            .add_step(steps::cleanup_cargo_config(platform)),
621    }
622}
623
624fn build_visual_tests_binary() -> NamedJob {
625    pub fn cargo_build_visual_tests() -> Step<Run> {
626        named::bash("cargo build -p zed --bin zed_visual_test_runner --features visual-tests")
627    }
628
629    named::job(
630        Job::default()
631            .runs_on(runners::MAC_DEFAULT)
632            .add_step(steps::checkout_repo())
633            .add_step(steps::setup_cargo_config(Platform::Mac))
634            .add_step(steps::cache_rust_dependencies_namespace())
635            .add_step(cargo_build_visual_tests())
636            .add_step(steps::cleanup_cargo_config(Platform::Mac)),
637    )
638}
639
640pub(crate) fn check_postgres_and_protobuf_migrations() -> NamedJob {
641    fn ensure_fresh_merge() -> Step<Run> {
642        named::bash(indoc::indoc! {r#"
643            if [ -z "$GITHUB_BASE_REF" ];
644            then
645              echo "BUF_BASE_BRANCH=$(git merge-base origin/main HEAD)" >> "$GITHUB_ENV"
646            else
647              git checkout -B temp
648              git merge -q "origin/$GITHUB_BASE_REF" -m "merge main into temp"
649              echo "BUF_BASE_BRANCH=$GITHUB_BASE_REF" >> "$GITHUB_ENV"
650            fi
651        "#})
652    }
653
654    fn bufbuild_setup_action() -> Step<Use> {
655        named::uses("bufbuild", "buf-setup-action", "v1")
656            .add_with(("version", "v1.29.0"))
657            .add_with(("github_token", vars::GITHUB_TOKEN))
658    }
659
660    fn bufbuild_breaking_action() -> Step<Use> {
661        named::uses("bufbuild", "buf-breaking-action", "v1").add_with(("input", "crates/proto/proto/"))
662            .add_with(("against", "https://github.com/${GITHUB_REPOSITORY}.git#branch=${BUF_BASE_BRANCH},subdir=crates/proto/proto/"))
663    }
664
665    fn buf_lint() -> Step<Run> {
666        named::bash("buf lint crates/proto/proto")
667    }
668
669    fn check_protobuf_formatting() -> Step<Run> {
670        named::bash("buf format --diff --exit-code crates/proto/proto")
671    }
672
673    named::job(
674        release_job(&[])
675            .runs_on(runners::LINUX_DEFAULT)
676            .add_env(("GIT_AUTHOR_NAME", "Protobuf Action"))
677            .add_env(("GIT_AUTHOR_EMAIL", "ci@zed.dev"))
678            .add_env(("GIT_COMMITTER_NAME", "Protobuf Action"))
679            .add_env(("GIT_COMMITTER_EMAIL", "ci@zed.dev"))
680            .add_step(steps::checkout_repo().with_full_history())
681            .add_step(ensure_fresh_merge())
682            .add_step(bufbuild_setup_action())
683            .add_step(bufbuild_breaking_action())
684            .add_step(buf_lint())
685            .add_step(check_protobuf_formatting()),
686    )
687}
688
689fn doctests() -> NamedJob {
690    fn run_doctests() -> Step<Run> {
691        named::bash(indoc::indoc! {r#"
692            cargo test --workspace --doc --no-fail-fast
693        "#})
694        .id("run_doctests")
695    }
696
697    named::job(use_clang(
698        release_job(&[])
699            .runs_on(runners::LINUX_DEFAULT)
700            .add_step(steps::checkout_repo())
701            .add_step(steps::cache_rust_dependencies_namespace())
702            .map(steps::install_linux_dependencies)
703            .add_step(steps::setup_cargo_config(Platform::Linux))
704            .add_step(steps::setup_sccache(Platform::Linux))
705            .add_step(run_doctests())
706            .add_step(steps::show_sccache_stats(Platform::Linux))
707            .add_step(steps::cleanup_cargo_config(Platform::Linux)),
708    ))
709}
710
711fn check_licenses() -> NamedJob {
712    named::job(
713        Job::default()
714            .runs_on(runners::LINUX_SMALL)
715            .add_step(steps::checkout_repo())
716            .add_step(steps::cache_rust_dependencies_namespace())
717            .add_step(steps::script("./script/check-licenses"))
718            .add_step(steps::script("./script/generate-licenses")),
719    )
720}
721
722pub(crate) fn check_scripts() -> NamedJob {
723    fn download_actionlint() -> Step<Run> {
724        named::bash(
725            "bash <(curl https://raw.githubusercontent.com/rhysd/actionlint/main/scripts/download-actionlint.bash)",
726        )
727    }
728
729    fn run_actionlint() -> Step<Run> {
730        named::bash(r#""$ACTIONLINT_BIN" -color"#).add_env((
731            "ACTIONLINT_BIN",
732            "${{ steps.get_actionlint.outputs.executable }}",
733        ))
734    }
735
736    fn run_shellcheck() -> Step<Run> {
737        named::bash("./script/shellcheck-scripts error")
738    }
739
740    fn check_xtask_workflows() -> Step<Run> {
741        named::bash(indoc::indoc! {r#"
742            cargo xtask workflows
743            if ! git diff --exit-code .github; then
744              echo "Error: .github directory has uncommitted changes after running 'cargo xtask workflows'"
745              echo "Please run 'cargo xtask workflows' locally and commit the changes"
746              exit 1
747            fi
748        "#})
749    }
750
751    named::job(
752        release_job(&[])
753            .runs_on(runners::LINUX_SMALL)
754            .add_step(steps::checkout_repo())
755            .add_step(run_shellcheck())
756            .add_step(download_actionlint().id("get_actionlint"))
757            .add_step(run_actionlint())
758            .add_step(cache_rust_dependencies_namespace())
759            .add_step(check_xtask_workflows()),
760    )
761}
762
763fn extension_tests() -> NamedJob<UsesJob> {
764    let job = Job::default()
765        .needs(vec!["orchestrate".to_owned()])
766        .cond(Expression::new(
767            "needs.orchestrate.outputs.changed_extensions != '[]'",
768        ))
769        .permissions(Permissions::default().contents(Level::Read))
770        .strategy(
771            Strategy::default()
772                .fail_fast(false)
773                // TODO: Remove the limit. We currently need this to workaround the concurrency group issue
774                // where different matrix jobs would be placed in the same concurrency group and thus cancelled.
775                .max_parallel(1u32)
776                .matrix(json!({
777                    "extension": "${{ fromJson(needs.orchestrate.outputs.changed_extensions) }}"
778                })),
779        )
780        .uses_local(".github/workflows/extension_tests.yml")
781        .with(Input::default().add("working-directory", "${{ matrix.extension }}"));
782
783    named::job(job)
784}