run_tests.rs

  1use gh_workflow::{
  2    Container, Event, Expression, Input, Job, Level, MergeGroup, Permissions, Port, PullRequest,
  3    Push, Run, Step, Strategy, Use, UsesJob, Workflow,
  4};
  5use indexmap::IndexMap;
  6use indoc::formatdoc;
  7use serde_json::json;
  8
  9use crate::tasks::workflows::{
 10    steps::{
 11        CommonJobConditions, cache_rust_dependencies_namespace, repository_owner_guard_expression,
 12        use_clang,
 13    },
 14    vars::{self, PathCondition},
 15};
 16
 17use super::{
 18    runners::{self, Arch, Platform},
 19    steps::{self, FluentBuilder, NamedJob, named, release_job},
 20};
 21
 22pub(crate) fn run_tests() -> Workflow {
 23    // Specify anything which should potentially skip full test suite in this regex:
 24    // - docs/
 25    // - script/update_top_ranking_issues/
 26    // - .github/ISSUE_TEMPLATE/
 27    // - .github/workflows/  (except .github/workflows/ci.yml)
 28    // - extensions/  (these have their own test workflow)
 29    let should_run_tests = PathCondition::inverted(
 30        "run_tests",
 31        r"^(docs/|script/update_top_ranking_issues/|\.github/(ISSUE_TEMPLATE|workflows/(?!run_tests))|extensions/)",
 32    );
 33    let should_check_docs = PathCondition::new("run_docs", r"^(docs/|crates/.*\.rs)");
 34    let should_check_scripts = PathCondition::new(
 35        "run_action_checks",
 36        r"^\.github/(workflows/|actions/|actionlint.yml)|tooling/xtask|script/",
 37    );
 38    let should_check_licences =
 39        PathCondition::new("run_licenses", r"^(Cargo.lock|script/.*licenses)");
 40
 41    let orchestrate = orchestrate(&[
 42        &should_check_scripts,
 43        &should_check_docs,
 44        &should_check_licences,
 45        &should_run_tests,
 46    ]);
 47
 48    let mut jobs = vec![
 49        orchestrate,
 50        check_style(),
 51        should_run_tests
 52            .and_not_in_merge_queue()
 53            .guard(clippy(Platform::Windows, None)),
 54        should_run_tests
 55            .and_always()
 56            .guard(clippy(Platform::Linux, None)),
 57        should_run_tests
 58            .and_not_in_merge_queue()
 59            .guard(clippy(Platform::Mac, None)),
 60        should_run_tests
 61            .and_not_in_merge_queue()
 62            .guard(clippy(Platform::Mac, Some(Arch::X86_64))),
 63        should_run_tests
 64            .and_not_in_merge_queue()
 65            .guard(run_platform_tests(Platform::Windows)),
 66        should_run_tests
 67            .and_not_in_merge_queue()
 68            .guard(run_platform_tests(Platform::Linux)),
 69        should_run_tests
 70            .and_not_in_merge_queue()
 71            .guard(run_platform_tests(Platform::Mac)),
 72        should_run_tests.and_not_in_merge_queue().guard(doctests()),
 73        should_run_tests
 74            .and_not_in_merge_queue()
 75            .guard(check_workspace_binaries()),
 76        should_run_tests
 77            .and_not_in_merge_queue()
 78            .guard(check_wasm()),
 79        should_run_tests
 80            .and_not_in_merge_queue()
 81            .guard(check_dependencies()), // could be more specific here?
 82        should_check_docs.and_always().guard(check_docs()),
 83        should_check_licences
 84            .and_not_in_merge_queue()
 85            .guard(check_licenses()),
 86        should_check_scripts.and_always().guard(check_scripts()),
 87    ];
 88    let ext_tests = extension_tests();
 89    let tests_pass = tests_pass(&jobs, &[&ext_tests.name]);
 90
 91    // TODO: For merge queues, this should fail in the merge queue context
 92    jobs.push(
 93        should_run_tests
 94            .and_always()
 95            .guard(check_postgres_and_protobuf_migrations()),
 96    ); // could be more specific here?
 97
 98    named::workflow()
 99        .add_event(
100            Event::default()
101                .push(
102                    Push::default()
103                        .add_branch("main")
104                        .add_branch("v[0-9]+.[0-9]+.x"),
105                )
106                .pull_request(PullRequest::default().add_branch("**"))
107                .merge_group(MergeGroup::default()),
108        )
109        .concurrency(vars::one_workflow_per_non_main_branch())
110        .add_env(("CARGO_TERM_COLOR", "always"))
111        .add_env(("RUST_BACKTRACE", 1))
112        .add_env(("CARGO_INCREMENTAL", 0))
113        .map(|mut workflow| {
114            for job in jobs {
115                workflow = workflow.add_job(job.name, job.job)
116            }
117            workflow
118        })
119        .add_job(ext_tests.name, ext_tests.job)
120        .add_job(tests_pass.name, tests_pass.job)
121}
122
123/// Controls which features `orchestrate_impl` includes in the generated script.
124#[derive(PartialEq, Eq)]
125enum OrchestrateTarget {
126    /// For the main Zed repo: includes the cargo package filter and extension
127    /// change detection, but no working-directory scoping.
128    ZedRepo,
129    /// For individual extension repos: scopes changed-file detection to the
130    /// working directory, with no package filter or extension detection.
131    Extension,
132}
133
134// Generates a bash script that checks changed files against regex patterns
135// and sets GitHub output variables accordingly
136pub fn orchestrate(rules: &[&PathCondition]) -> NamedJob {
137    orchestrate_impl(rules, OrchestrateTarget::ZedRepo)
138}
139
140pub fn orchestrate_for_extension(rules: &[&PathCondition]) -> NamedJob {
141    orchestrate_impl(rules, OrchestrateTarget::Extension)
142}
143
144fn orchestrate_impl(rules: &[&PathCondition], target: OrchestrateTarget) -> NamedJob {
145    let name = "orchestrate".to_owned();
146    let step_name = "filter".to_owned();
147    let mut script = String::new();
148
149    script.push_str(indoc::indoc! {r#"
150        set -euo pipefail
151        if [ -z "$GITHUB_BASE_REF" ]; then
152          echo "Not in a PR context (i.e., push to main/stable/preview)"
153          COMPARE_REV="$(git rev-parse HEAD~1)"
154        else
155          echo "In a PR context comparing to pull_request.base.ref"
156          git fetch origin "$GITHUB_BASE_REF" --depth=350
157          COMPARE_REV="$(git merge-base "origin/${GITHUB_BASE_REF}" HEAD)"
158        fi
159        CHANGED_FILES="$(git diff --name-only "$COMPARE_REV" "$GITHUB_SHA")"
160
161    "#});
162
163    if target == OrchestrateTarget::Extension {
164        script.push_str(indoc::indoc! {r#"
165        # When running from a subdirectory, git diff returns repo-root-relative paths.
166        # Filter to only files within the current working directory and strip the prefix.
167        REPO_SUBDIR="$(git rev-parse --show-prefix)"
168        REPO_SUBDIR="${REPO_SUBDIR%/}"
169        if [ -n "$REPO_SUBDIR" ]; then
170            CHANGED_FILES="$(echo "$CHANGED_FILES" | grep "^${REPO_SUBDIR}/" | sed "s|^${REPO_SUBDIR}/||" || true)"
171        fi
172
173    "#});
174    }
175
176    script.push_str(indoc::indoc! {r#"
177        check_pattern() {
178          local output_name="$1"
179          local pattern="$2"
180          local grep_arg="$3"
181
182          echo "$CHANGED_FILES" | grep "$grep_arg" "$pattern" && \
183            echo "${output_name}=true" >> "$GITHUB_OUTPUT" || \
184            echo "${output_name}=false" >> "$GITHUB_OUTPUT"
185        }
186
187    "#});
188
189    let mut outputs = IndexMap::new();
190
191    if target == OrchestrateTarget::ZedRepo {
192        script.push_str(indoc::indoc! {r#"
193        # Check for changes that require full rebuild (no filter)
194        # Direct pushes to main/stable/preview always run full suite
195        if [ -z "$GITHUB_BASE_REF" ]; then
196          echo "Not a PR, running full test suite"
197          echo "changed_packages=" >> "$GITHUB_OUTPUT"
198        elif echo "$CHANGED_FILES" | grep -qP '^(rust-toolchain\.toml|\.cargo/|\.github/|Cargo\.(toml|lock)$)'; then
199          echo "Toolchain, cargo config, or root Cargo files changed, will run all tests"
200          echo "changed_packages=" >> "$GITHUB_OUTPUT"
201        else
202          # Extract changed directories from file paths
203          CHANGED_DIRS=$(echo "$CHANGED_FILES" | \
204            grep -oP '^(crates|tooling)/\K[^/]+' | \
205            sort -u || true)
206
207          # Build directory-to-package mapping using cargo metadata
208          DIR_TO_PKG=$(cargo metadata --format-version=1 --no-deps 2>/dev/null | \
209            jq -r '.packages[] | select(.manifest_path | test("crates/|tooling/")) | "\(.manifest_path | capture("(crates|tooling)/(?<dir>[^/]+)") | .dir)=\(.name)"')
210
211          # Map directory names to package names
212          FILE_CHANGED_PKGS=""
213          for dir in $CHANGED_DIRS; do
214            pkg=$(echo "$DIR_TO_PKG" | grep "^${dir}=" | cut -d= -f2 | head -1)
215            if [ -n "$pkg" ]; then
216              FILE_CHANGED_PKGS=$(printf '%s\n%s' "$FILE_CHANGED_PKGS" "$pkg")
217            else
218              # Fall back to directory name if no mapping found
219              FILE_CHANGED_PKGS=$(printf '%s\n%s' "$FILE_CHANGED_PKGS" "$dir")
220            fi
221          done
222          FILE_CHANGED_PKGS=$(echo "$FILE_CHANGED_PKGS" | grep -v '^$' | sort -u || true)
223
224          # If assets/ changed, add crates that depend on those assets
225          if echo "$CHANGED_FILES" | grep -qP '^assets/'; then
226            FILE_CHANGED_PKGS=$(printf '%s\n%s\n%s\n%s' "$FILE_CHANGED_PKGS" "settings" "storybook" "assets" | sort -u)
227          fi
228
229          # Combine all changed packages
230          ALL_CHANGED_PKGS=$(echo "$FILE_CHANGED_PKGS" | grep -v '^$' || true)
231
232          if [ -z "$ALL_CHANGED_PKGS" ]; then
233            echo "No package changes detected, will run all tests"
234            echo "changed_packages=" >> "$GITHUB_OUTPUT"
235          else
236            # Build nextest filterset with rdeps for each package
237            FILTERSET=$(echo "$ALL_CHANGED_PKGS" | \
238              sed 's/.*/rdeps(&)/' | \
239              tr '\n' '|' | \
240              sed 's/|$//')
241            echo "Changed packages filterset: $FILTERSET"
242            echo "changed_packages=$FILTERSET" >> "$GITHUB_OUTPUT"
243          fi
244        fi
245
246    "#});
247
248        outputs.insert(
249            "changed_packages".to_owned(),
250            format!("${{{{ steps.{}.outputs.changed_packages }}}}", step_name),
251        );
252    }
253
254    for rule in rules {
255        assert!(
256            rule.set_by_step
257                .borrow_mut()
258                .replace(name.clone())
259                .is_none()
260        );
261        assert!(
262            outputs
263                .insert(
264                    rule.name.to_owned(),
265                    format!("${{{{ steps.{}.outputs.{} }}}}", step_name, rule.name)
266                )
267                .is_none()
268        );
269
270        let grep_arg = if rule.invert { "-qvP" } else { "-qP" };
271        script.push_str(&format!(
272            "check_pattern \"{}\" '{}' {}\n",
273            rule.name, rule.pattern, grep_arg
274        ));
275    }
276
277    if target == OrchestrateTarget::ZedRepo {
278        script.push_str(DETECT_CHANGED_EXTENSIONS_SCRIPT);
279        script.push_str("echo \"changed_extensions=$EXTENSIONS_JSON\" >> \"$GITHUB_OUTPUT\"\n");
280
281        outputs.insert(
282            "changed_extensions".to_owned(),
283            format!("${{{{ steps.{}.outputs.changed_extensions }}}}", step_name),
284        );
285    }
286
287    let job = Job::default()
288        .runs_on(runners::LINUX_SMALL)
289        .with_repository_owner_guard()
290        .outputs(outputs)
291        .add_step(steps::checkout_repo().with_deep_history_on_non_main())
292        .add_step(Step::new(step_name.clone()).run(script).id(step_name));
293
294    NamedJob { name, job }
295}
296
297pub fn tests_pass(jobs: &[NamedJob], extra_job_names: &[&str]) -> NamedJob {
298    let mut script = String::from(indoc::indoc! {r#"
299        set +x
300        EXIT_CODE=0
301
302        check_result() {
303          echo "* $1: $2"
304          if [[ "$2" != "skipped" && "$2" != "success" ]]; then EXIT_CODE=1; fi
305        }
306
307    "#});
308
309    let all_names: Vec<&str> = jobs
310        .iter()
311        .map(|job| job.name.as_str())
312        .chain(extra_job_names.iter().copied())
313        .collect();
314
315    let env_entries: Vec<_> = all_names
316        .iter()
317        .map(|name| {
318            let env_name = format!("RESULT_{}", name.to_uppercase());
319            let env_value = format!("${{{{ needs.{}.result }}}}", name);
320            (env_name, env_value)
321        })
322        .collect();
323
324    script.push_str(
325        &all_names
326            .iter()
327            .zip(env_entries.iter())
328            .map(|(name, (env_name, _))| format!("check_result \"{}\" \"${}\"", name, env_name))
329            .collect::<Vec<_>>()
330            .join("\n"),
331    );
332
333    script.push_str("\n\nexit $EXIT_CODE\n");
334
335    let job = Job::default()
336        .runs_on(runners::LINUX_SMALL)
337        .needs(
338            all_names
339                .iter()
340                .map(|name| name.to_string())
341                .collect::<Vec<String>>(),
342        )
343        .cond(repository_owner_guard_expression(true))
344        .add_step(
345            env_entries
346                .into_iter()
347                .fold(named::bash(&script), |step, env_item| {
348                    step.add_env(env_item)
349                }),
350        );
351
352    named::job(job)
353}
354
355/// Bash script snippet that detects changed extension directories from `$CHANGED_FILES`.
356/// Assumes `$CHANGED_FILES` is already set. Sets `$EXTENSIONS_JSON` to a JSON array of
357/// changed extension paths. Callers are responsible for writing the result to `$GITHUB_OUTPUT`.
358pub(crate) const DETECT_CHANGED_EXTENSIONS_SCRIPT: &str = indoc::indoc! {r#"
359    # Detect changed extension directories (excluding extensions/workflows)
360    CHANGED_EXTENSIONS=$(echo "$CHANGED_FILES" | grep -oP '^extensions/[^/]+(?=/)' | sort -u | grep -v '^extensions/workflows$' || true)
361    if [ -n "$CHANGED_EXTENSIONS" ]; then
362        EXTENSIONS_JSON=$(echo "$CHANGED_EXTENSIONS" | jq -R -s -c 'split("\n") | map(select(length > 0))')
363    else
364        EXTENSIONS_JSON="[]"
365    fi
366"#};
367
368const TS_QUERY_LS_FILE: &str = "ts_query_ls-x86_64-unknown-linux-gnu.tar.gz";
369const CI_TS_QUERY_RELEASE: &str = "tags/v3.15.1";
370
371pub(crate) fn fetch_ts_query_ls() -> Step<Use> {
372    named::uses(
373        "dsaltares",
374        "fetch-gh-release-asset",
375        "aa37ae5c44d3c9820bc12fe675e8670ecd93bd1c",
376    ) // v1.1.1
377    .add_with(("repo", "ribru17/ts_query_ls"))
378    .add_with(("version", CI_TS_QUERY_RELEASE))
379    .add_with(("file", TS_QUERY_LS_FILE))
380}
381
382pub(crate) fn run_ts_query_ls() -> Step<Run> {
383    named::bash(formatdoc!(
384        r#"tar -xf "$GITHUB_WORKSPACE/{TS_QUERY_LS_FILE}" -C "$GITHUB_WORKSPACE"
385        "$GITHUB_WORKSPACE/ts_query_ls" format --check . || {{
386            echo "Found unformatted queries, please format them with ts_query_ls."
387            echo "For easy use, install the Tree-sitter query extension:"
388            echo "zed://extension/tree-sitter-query"
389            false
390        }}"#
391    ))
392}
393
394fn check_style() -> NamedJob {
395    fn check_for_typos() -> Step<Use> {
396        named::uses(
397            "crate-ci",
398            "typos",
399            "2d0ce569feab1f8752f1dde43cc2f2aa53236e06",
400        ) // v1.40.0
401        .with(("config", "./typos.toml"))
402    }
403
404    named::job(
405        release_job(&[])
406            .runs_on(runners::LINUX_MEDIUM)
407            .add_step(steps::checkout_repo())
408            .add_step(steps::cache_rust_dependencies_namespace())
409            .add_step(steps::setup_pnpm())
410            .add_step(steps::prettier())
411            .add_step(steps::cargo_fmt())
412            .add_step(steps::script("./script/check-todos"))
413            .add_step(steps::script("./script/check-keymaps"))
414            .add_step(check_for_typos())
415            .add_step(fetch_ts_query_ls())
416            .add_step(run_ts_query_ls()),
417    )
418}
419
420fn check_dependencies() -> NamedJob {
421    fn install_cargo_machete() -> Step<Use> {
422        named::uses(
423            "clechasseur",
424            "rs-cargo",
425            "8435b10f6e71c2e3d4d3b7573003a8ce4bfc6386", // v2
426        )
427        .add_with(("command", "install"))
428        .add_with(("args", "cargo-machete@0.7.0"))
429    }
430
431    fn run_cargo_machete() -> Step<Use> {
432        named::uses(
433            "clechasseur",
434            "rs-cargo",
435            "8435b10f6e71c2e3d4d3b7573003a8ce4bfc6386", // v2
436        )
437        .add_with(("command", "machete"))
438    }
439
440    fn check_cargo_lock() -> Step<Run> {
441        named::bash("cargo update --locked --workspace")
442    }
443
444    fn check_vulnerable_dependencies() -> Step<Use> {
445        named::uses(
446            "actions",
447            "dependency-review-action",
448            "67d4f4bd7a9b17a0db54d2a7519187c65e339de8", // v4
449        )
450        .if_condition(Expression::new("github.event_name == 'pull_request'"))
451        .with(("license-check", false))
452    }
453
454    named::job(use_clang(
455        release_job(&[])
456            .runs_on(runners::LINUX_SMALL)
457            .add_step(steps::checkout_repo())
458            .add_step(steps::cache_rust_dependencies_namespace())
459            .add_step(install_cargo_machete())
460            .add_step(run_cargo_machete())
461            .add_step(check_cargo_lock())
462            .add_step(check_vulnerable_dependencies()),
463    ))
464}
465
466fn check_wasm() -> NamedJob {
467    fn install_nightly_wasm_toolchain() -> Step<Run> {
468        named::bash(
469            "rustup toolchain install nightly --component rust-src --target wasm32-unknown-unknown",
470        )
471    }
472
473    fn cargo_check_wasm() -> Step<Run> {
474        named::bash(concat!(
475            "cargo +nightly -Zbuild-std=std,panic_abort ",
476            "check --target wasm32-unknown-unknown -p gpui_platform",
477        ))
478        .add_env((
479            "CARGO_TARGET_WASM32_UNKNOWN_UNKNOWN_RUSTFLAGS",
480            "-C target-feature=+atomics,+bulk-memory,+mutable-globals",
481        ))
482    }
483
484    named::job(
485        release_job(&[])
486            .runs_on(runners::LINUX_LARGE)
487            .add_step(steps::checkout_repo())
488            .add_step(steps::setup_cargo_config(Platform::Linux))
489            .add_step(steps::cache_rust_dependencies_namespace())
490            .add_step(install_nightly_wasm_toolchain())
491            .add_step(steps::setup_sccache(Platform::Linux))
492            .add_step(cargo_check_wasm())
493            .add_step(steps::show_sccache_stats(Platform::Linux))
494            .add_step(steps::cleanup_cargo_config(Platform::Linux)),
495    )
496}
497
498fn check_workspace_binaries() -> NamedJob {
499    named::job(use_clang(
500        release_job(&[])
501            .runs_on(runners::LINUX_LARGE)
502            .add_step(steps::checkout_repo())
503            .add_step(steps::setup_cargo_config(Platform::Linux))
504            .add_step(steps::cache_rust_dependencies_namespace())
505            .map(steps::install_linux_dependencies)
506            .add_step(steps::setup_sccache(Platform::Linux))
507            .add_step(steps::script("cargo build -p collab"))
508            .add_step(steps::script("cargo build --workspace --bins --examples"))
509            .add_step(steps::show_sccache_stats(Platform::Linux))
510            .add_step(steps::cleanup_cargo_config(Platform::Linux)),
511    ))
512}
513
514pub(crate) fn clippy(platform: Platform, arch: Option<Arch>) -> NamedJob {
515    let target = arch.map(|arch| match (platform, arch) {
516        (Platform::Mac, Arch::X86_64) => "x86_64-apple-darwin",
517        (Platform::Mac, Arch::AARCH64) => "aarch64-apple-darwin",
518        _ => unimplemented!("cross-arch clippy not supported for {platform}/{arch}"),
519    });
520    let runner = match platform {
521        Platform::Windows => runners::WINDOWS_DEFAULT,
522        Platform::Linux => runners::LINUX_DEFAULT,
523        Platform::Mac => runners::MAC_DEFAULT,
524    };
525    let mut job = release_job(&[])
526        .runs_on(runner)
527        .add_step(steps::checkout_repo())
528        .add_step(steps::setup_cargo_config(platform))
529        .when(
530            platform == Platform::Linux || platform == Platform::Mac,
531            |this| this.add_step(steps::cache_rust_dependencies_namespace()),
532        )
533        .when(
534            platform == Platform::Linux,
535            steps::install_linux_dependencies,
536        )
537        .when_some(target, |this, target| {
538            this.add_step(steps::install_rustup_target(target))
539        })
540        .add_step(steps::setup_sccache(platform))
541        .add_step(steps::clippy(platform, target))
542        .add_step(steps::show_sccache_stats(platform));
543    if platform == Platform::Linux {
544        job = use_clang(job);
545    }
546    let name = match arch {
547        Some(arch) => format!("clippy_{platform}_{arch}"),
548        None => format!("clippy_{platform}"),
549    };
550    NamedJob { name, job }
551}
552
553pub(crate) fn run_platform_tests(platform: Platform) -> NamedJob {
554    run_platform_tests_impl(platform, true)
555}
556
557pub(crate) fn run_platform_tests_no_filter(platform: Platform) -> NamedJob {
558    run_platform_tests_impl(platform, false)
559}
560
561fn run_platform_tests_impl(platform: Platform, filter_packages: bool) -> NamedJob {
562    let runner = match platform {
563        Platform::Windows => runners::WINDOWS_DEFAULT,
564        Platform::Linux => runners::LINUX_DEFAULT,
565        Platform::Mac => runners::MAC_DEFAULT,
566    };
567    NamedJob {
568        name: format!("run_tests_{platform}"),
569        job: release_job(&[])
570            .runs_on(runner)
571            .when(platform == Platform::Linux, |job| {
572                job.add_service(
573                    "postgres",
574                    Container::new("postgres:15")
575                        .add_env(("POSTGRES_HOST_AUTH_METHOD", "trust"))
576                        .ports(vec![Port::Name("5432:5432".into())])
577                        .options(
578                            "--health-cmd pg_isready \
579                             --health-interval 500ms \
580                             --health-timeout 5s \
581                             --health-retries 10",
582                        ),
583                )
584            })
585            .add_step(steps::checkout_repo())
586            .add_step(steps::setup_cargo_config(platform))
587            .when(platform == Platform::Mac, |this| {
588                this.add_step(steps::cache_rust_dependencies_namespace())
589            })
590            .when(platform == Platform::Linux, |this| {
591                use_clang(this.add_step(steps::cache_rust_dependencies_namespace()))
592            })
593            .when(
594                platform == Platform::Linux,
595                steps::install_linux_dependencies,
596            )
597            .add_step(steps::setup_node())
598            .when(
599                platform == Platform::Linux || platform == Platform::Mac,
600                |job| job.add_step(steps::cargo_install_nextest()),
601            )
602            .add_step(steps::clear_target_dir_if_large(platform))
603            .add_step(steps::setup_sccache(platform))
604            .when(filter_packages, |job| {
605                job.add_step(
606                    steps::cargo_nextest(platform).with_changed_packages_filter("orchestrate"),
607                )
608            })
609            .when(!filter_packages, |job| {
610                job.add_step(steps::cargo_nextest(platform))
611            })
612            .add_step(steps::show_sccache_stats(platform))
613            .add_step(steps::cleanup_cargo_config(platform)),
614    }
615}
616
617pub(crate) fn check_postgres_and_protobuf_migrations() -> NamedJob {
618    fn ensure_fresh_merge() -> Step<Run> {
619        named::bash(indoc::indoc! {r#"
620            if [ -z "$GITHUB_BASE_REF" ];
621            then
622              echo "BUF_BASE_BRANCH=$(git merge-base origin/main HEAD)" >> "$GITHUB_ENV"
623            else
624              git checkout -B temp
625              git merge -q "origin/$GITHUB_BASE_REF" -m "merge main into temp"
626              echo "BUF_BASE_BRANCH=$GITHUB_BASE_REF" >> "$GITHUB_ENV"
627            fi
628        "#})
629    }
630
631    fn bufbuild_setup_action() -> Step<Use> {
632        named::uses("bufbuild", "buf-setup-action", "v1")
633            .add_with(("version", "v1.29.0"))
634            .add_with(("github_token", vars::GITHUB_TOKEN))
635    }
636
637    fn bufbuild_breaking_action() -> Step<Use> {
638        named::uses("bufbuild", "buf-breaking-action", "v1").add_with(("input", "crates/proto/proto/"))
639            .add_with(("against", "https://github.com/${GITHUB_REPOSITORY}.git#branch=${BUF_BASE_BRANCH},subdir=crates/proto/proto/"))
640    }
641
642    fn buf_lint() -> Step<Run> {
643        named::bash("buf lint crates/proto/proto")
644    }
645
646    fn check_protobuf_formatting() -> Step<Run> {
647        named::bash("buf format --diff --exit-code crates/proto/proto")
648    }
649
650    named::job(
651        release_job(&[])
652            .runs_on(runners::LINUX_DEFAULT)
653            .add_env(("GIT_AUTHOR_NAME", "Protobuf Action"))
654            .add_env(("GIT_AUTHOR_EMAIL", "ci@zed.dev"))
655            .add_env(("GIT_COMMITTER_NAME", "Protobuf Action"))
656            .add_env(("GIT_COMMITTER_EMAIL", "ci@zed.dev"))
657            .add_step(steps::checkout_repo().with_full_history())
658            .add_step(ensure_fresh_merge())
659            .add_step(bufbuild_setup_action())
660            .add_step(bufbuild_breaking_action())
661            .add_step(buf_lint())
662            .add_step(check_protobuf_formatting()),
663    )
664}
665
666fn doctests() -> NamedJob {
667    fn run_doctests() -> Step<Run> {
668        named::bash(indoc::indoc! {r#"
669            cargo test --workspace --doc --no-fail-fast
670        "#})
671        .id("run_doctests")
672    }
673
674    named::job(use_clang(
675        release_job(&[])
676            .runs_on(runners::LINUX_DEFAULT)
677            .add_step(steps::checkout_repo())
678            .add_step(steps::cache_rust_dependencies_namespace())
679            .map(steps::install_linux_dependencies)
680            .add_step(steps::setup_cargo_config(Platform::Linux))
681            .add_step(steps::setup_sccache(Platform::Linux))
682            .add_step(run_doctests())
683            .add_step(steps::show_sccache_stats(Platform::Linux))
684            .add_step(steps::cleanup_cargo_config(Platform::Linux)),
685    ))
686}
687
688fn check_licenses() -> NamedJob {
689    named::job(
690        Job::default()
691            .runs_on(runners::LINUX_SMALL)
692            .add_step(steps::checkout_repo())
693            .add_step(steps::cache_rust_dependencies_namespace())
694            .add_step(steps::script("./script/check-licenses"))
695            .add_step(steps::script("./script/generate-licenses")),
696    )
697}
698
699fn check_docs() -> NamedJob {
700    fn lychee_link_check(dir: &str) -> Step<Use> {
701        named::uses(
702            "lycheeverse",
703            "lychee-action",
704            "82202e5e9c2f4ef1a55a3d02563e1cb6041e5332",
705        ) // v2.4.1
706        .add_with(("args", format!("--no-progress --exclude '^http' '{dir}'")))
707        .add_with(("fail", true))
708        .add_with(("jobSummary", false))
709    }
710
711    fn install_mdbook() -> Step<Use> {
712        named::uses(
713            "peaceiris",
714            "actions-mdbook",
715            "ee69d230fe19748b7abf22df32acaa93833fad08", // v2
716        )
717        .with(("mdbook-version", "0.4.37"))
718    }
719
720    fn build_docs() -> Step<Run> {
721        named::bash(indoc::indoc! {r#"
722            mkdir -p target/deploy
723            mdbook build ./docs --dest-dir=../target/deploy/docs/
724        "#})
725    }
726
727    named::job(use_clang(
728        release_job(&[])
729            .runs_on(runners::LINUX_LARGE)
730            .add_step(steps::checkout_repo())
731            .add_step(steps::setup_cargo_config(Platform::Linux))
732            // todo(ci): un-inline build_docs/action.yml here
733            .add_step(steps::cache_rust_dependencies_namespace())
734            .add_step(
735                lychee_link_check("./docs/src/**/*"), // check markdown links
736            )
737            .map(steps::install_linux_dependencies)
738            .add_step(steps::script("./script/generate-action-metadata"))
739            .add_step(install_mdbook())
740            .add_step(build_docs())
741            .add_step(
742                lychee_link_check("target/deploy/docs"), // check links in generated html
743            ),
744    ))
745}
746
747pub(crate) fn check_scripts() -> NamedJob {
748    fn download_actionlint() -> Step<Run> {
749        named::bash(
750            "bash <(curl https://raw.githubusercontent.com/rhysd/actionlint/main/scripts/download-actionlint.bash)",
751        )
752    }
753
754    fn run_actionlint() -> Step<Run> {
755        named::bash(r#""$ACTIONLINT_BIN" -color"#).add_env((
756            "ACTIONLINT_BIN",
757            "${{ steps.get_actionlint.outputs.executable }}",
758        ))
759    }
760
761    fn run_shellcheck() -> Step<Run> {
762        named::bash("./script/shellcheck-scripts error")
763    }
764
765    fn check_xtask_workflows() -> Step<Run> {
766        named::bash(indoc::indoc! {r#"
767            cargo xtask workflows
768            if ! git diff --exit-code .github; then
769              echo "Error: .github directory has uncommitted changes after running 'cargo xtask workflows'"
770              echo "Please run 'cargo xtask workflows' locally and commit the changes"
771              exit 1
772            fi
773        "#})
774    }
775
776    named::job(
777        release_job(&[])
778            .runs_on(runners::LINUX_SMALL)
779            .add_step(steps::checkout_repo())
780            .add_step(run_shellcheck())
781            .add_step(download_actionlint().id("get_actionlint"))
782            .add_step(run_actionlint())
783            .add_step(cache_rust_dependencies_namespace())
784            .add_step(check_xtask_workflows()),
785    )
786}
787
788fn extension_tests() -> NamedJob<UsesJob> {
789    let job = Job::default()
790        .needs(vec!["orchestrate".to_owned()])
791        .cond(Expression::new(
792            "needs.orchestrate.outputs.changed_extensions != '[]'",
793        ))
794        .permissions(Permissions::default().contents(Level::Read))
795        .strategy(
796            Strategy::default()
797                .fail_fast(false)
798                // TODO: Remove the limit. We currently need this to workaround the concurrency group issue
799                // where different matrix jobs would be placed in the same concurrency group and thus cancelled.
800                .max_parallel(1u32)
801                .matrix(json!({
802                    "extension": "${{ fromJson(needs.orchestrate.outputs.changed_extensions) }}"
803                })),
804        )
805        .uses_local(".github/workflows/extension_tests.yml")
806        .with(Input::default().add("working-directory", "${{ matrix.extension }}"));
807
808    named::job(job)
809}