run_tests.rs

  1use gh_workflow::{
  2    Concurrency, Container, Event, Expression, Input, Job, Level, Permissions, Port, PullRequest,
  3    Push, Run, Step, Strategy, Use, UsesJob, Workflow,
  4};
  5use indexmap::IndexMap;
  6use indoc::formatdoc;
  7use serde_json::json;
  8
  9use crate::tasks::workflows::{
 10    steps::{
 11        CommonJobConditions, cache_rust_dependencies_namespace, repository_owner_guard_expression,
 12        use_clang,
 13    },
 14    vars::{self, PathCondition},
 15};
 16
 17use super::{
 18    runners::{self, Platform},
 19    steps::{self, FluentBuilder, NamedJob, named, release_job},
 20};
 21
 22pub(crate) fn run_tests() -> Workflow {
 23    // Specify anything which should potentially skip full test suite in this regex:
 24    // - docs/
 25    // - script/update_top_ranking_issues/
 26    // - .github/ISSUE_TEMPLATE/
 27    // - .github/workflows/  (except .github/workflows/ci.yml)
 28    // - extensions/  (these have their own test workflow)
 29    let should_run_tests = PathCondition::inverted(
 30        "run_tests",
 31        r"^(docs/|script/update_top_ranking_issues/|\.github/(ISSUE_TEMPLATE|workflows/(?!run_tests))|extensions/)",
 32    );
 33    let should_check_docs = PathCondition::new("run_docs", r"^(docs/|crates/.*\.rs)");
 34    let should_check_scripts = PathCondition::new(
 35        "run_action_checks",
 36        r"^\.github/(workflows/|actions/|actionlint.yml)|tooling/xtask|script/",
 37    );
 38    let should_check_licences =
 39        PathCondition::new("run_licenses", r"^(Cargo.lock|script/.*licenses)");
 40
 41    let orchestrate = orchestrate(&[
 42        &should_check_scripts,
 43        &should_check_docs,
 44        &should_check_licences,
 45        &should_run_tests,
 46    ]);
 47
 48    let mut jobs = vec![
 49        orchestrate,
 50        check_style(),
 51        should_run_tests.guard(clippy(Platform::Windows)),
 52        should_run_tests.guard(clippy(Platform::Linux)),
 53        should_run_tests.guard(clippy(Platform::Mac)),
 54        should_run_tests.guard(run_platform_tests(Platform::Windows)),
 55        should_run_tests.guard(run_platform_tests(Platform::Linux)),
 56        should_run_tests.guard(run_platform_tests(Platform::Mac)),
 57        should_run_tests.guard(doctests()),
 58        should_run_tests.guard(check_workspace_binaries()),
 59        should_run_tests.guard(check_wasm()),
 60        should_run_tests.guard(check_dependencies()), // could be more specific here?
 61        should_check_docs.guard(check_docs()),
 62        should_check_licences.guard(check_licenses()),
 63        should_check_scripts.guard(check_scripts()),
 64    ];
 65    let ext_tests = extension_tests();
 66    let tests_pass = tests_pass(&jobs, &[&ext_tests.name]);
 67
 68    jobs.push(should_run_tests.guard(check_postgres_and_protobuf_migrations())); // could be more specific here?
 69
 70    named::workflow()
 71        .add_event(
 72            Event::default()
 73                .push(
 74                    Push::default()
 75                        .add_branch("main")
 76                        .add_branch("v[0-9]+.[0-9]+.x"),
 77                )
 78                .pull_request(PullRequest::default().add_branch("**")),
 79        )
 80        .concurrency(
 81            Concurrency::default()
 82                .group(concat!(
 83                    "${{ github.workflow }}-${{ github.ref_name }}-",
 84                    "${{ github.ref_name == 'main' && github.sha || 'anysha' }}"
 85                ))
 86                .cancel_in_progress(true),
 87        )
 88        .add_env(("CARGO_TERM_COLOR", "always"))
 89        .add_env(("RUST_BACKTRACE", 1))
 90        .add_env(("CARGO_INCREMENTAL", 0))
 91        .map(|mut workflow| {
 92            for job in jobs {
 93                workflow = workflow.add_job(job.name, job.job)
 94            }
 95            workflow
 96        })
 97        .add_job(ext_tests.name, ext_tests.job)
 98        .add_job(tests_pass.name, tests_pass.job)
 99}
100
101/// Controls which features `orchestrate_impl` includes in the generated script.
102#[derive(PartialEq, Eq)]
103enum OrchestrateTarget {
104    /// For the main Zed repo: includes the cargo package filter and extension
105    /// change detection, but no working-directory scoping.
106    ZedRepo,
107    /// For individual extension repos: scopes changed-file detection to the
108    /// working directory, with no package filter or extension detection.
109    Extension,
110}
111
112// Generates a bash script that checks changed files against regex patterns
113// and sets GitHub output variables accordingly
114pub fn orchestrate(rules: &[&PathCondition]) -> NamedJob {
115    orchestrate_impl(rules, OrchestrateTarget::ZedRepo)
116}
117
118pub fn orchestrate_for_extension(rules: &[&PathCondition]) -> NamedJob {
119    orchestrate_impl(rules, OrchestrateTarget::Extension)
120}
121
122fn orchestrate_impl(rules: &[&PathCondition], target: OrchestrateTarget) -> NamedJob {
123    let name = "orchestrate".to_owned();
124    let step_name = "filter".to_owned();
125    let mut script = String::new();
126
127    script.push_str(indoc::indoc! {r#"
128        set -euo pipefail
129        if [ -z "$GITHUB_BASE_REF" ]; then
130          echo "Not in a PR context (i.e., push to main/stable/preview)"
131          COMPARE_REV="$(git rev-parse HEAD~1)"
132        else
133          echo "In a PR context comparing to pull_request.base.ref"
134          git fetch origin "$GITHUB_BASE_REF" --depth=350
135          COMPARE_REV="$(git merge-base "origin/${GITHUB_BASE_REF}" HEAD)"
136        fi
137        CHANGED_FILES="$(git diff --name-only "$COMPARE_REV" "$GITHUB_SHA")"
138
139    "#});
140
141    if target == OrchestrateTarget::Extension {
142        script.push_str(indoc::indoc! {r#"
143        # When running from a subdirectory, git diff returns repo-root-relative paths.
144        # Filter to only files within the current working directory and strip the prefix.
145        REPO_SUBDIR="$(git rev-parse --show-prefix)"
146        REPO_SUBDIR="${REPO_SUBDIR%/}"
147        if [ -n "$REPO_SUBDIR" ]; then
148            CHANGED_FILES="$(echo "$CHANGED_FILES" | grep "^${REPO_SUBDIR}/" | sed "s|^${REPO_SUBDIR}/||" || true)"
149        fi
150
151    "#});
152    }
153
154    script.push_str(indoc::indoc! {r#"
155        check_pattern() {
156          local output_name="$1"
157          local pattern="$2"
158          local grep_arg="$3"
159
160          echo "$CHANGED_FILES" | grep "$grep_arg" "$pattern" && \
161            echo "${output_name}=true" >> "$GITHUB_OUTPUT" || \
162            echo "${output_name}=false" >> "$GITHUB_OUTPUT"
163        }
164
165    "#});
166
167    let mut outputs = IndexMap::new();
168
169    if target == OrchestrateTarget::ZedRepo {
170        script.push_str(indoc::indoc! {r#"
171        # Check for changes that require full rebuild (no filter)
172        # Direct pushes to main/stable/preview always run full suite
173        if [ -z "$GITHUB_BASE_REF" ]; then
174          echo "Not a PR, running full test suite"
175          echo "changed_packages=" >> "$GITHUB_OUTPUT"
176        elif echo "$CHANGED_FILES" | grep -qP '^(rust-toolchain\.toml|\.cargo/|\.github/|Cargo\.(toml|lock)$)'; then
177          echo "Toolchain, cargo config, or root Cargo files changed, will run all tests"
178          echo "changed_packages=" >> "$GITHUB_OUTPUT"
179        else
180          # Extract changed directories from file paths
181          CHANGED_DIRS=$(echo "$CHANGED_FILES" | \
182            grep -oP '^(crates|tooling)/\K[^/]+' | \
183            sort -u || true)
184
185          # Build directory-to-package mapping using cargo metadata
186          DIR_TO_PKG=$(cargo metadata --format-version=1 --no-deps 2>/dev/null | \
187            jq -r '.packages[] | select(.manifest_path | test("crates/|tooling/")) | "\(.manifest_path | capture("(crates|tooling)/(?<dir>[^/]+)") | .dir)=\(.name)"')
188
189          # Map directory names to package names
190          FILE_CHANGED_PKGS=""
191          for dir in $CHANGED_DIRS; do
192            pkg=$(echo "$DIR_TO_PKG" | grep "^${dir}=" | cut -d= -f2 | head -1)
193            if [ -n "$pkg" ]; then
194              FILE_CHANGED_PKGS=$(printf '%s\n%s' "$FILE_CHANGED_PKGS" "$pkg")
195            else
196              # Fall back to directory name if no mapping found
197              FILE_CHANGED_PKGS=$(printf '%s\n%s' "$FILE_CHANGED_PKGS" "$dir")
198            fi
199          done
200          FILE_CHANGED_PKGS=$(echo "$FILE_CHANGED_PKGS" | grep -v '^$' | sort -u || true)
201
202          # If assets/ changed, add crates that depend on those assets
203          if echo "$CHANGED_FILES" | grep -qP '^assets/'; then
204            FILE_CHANGED_PKGS=$(printf '%s\n%s\n%s\n%s' "$FILE_CHANGED_PKGS" "settings" "storybook" "assets" | sort -u)
205          fi
206
207          # Combine all changed packages
208          ALL_CHANGED_PKGS=$(echo "$FILE_CHANGED_PKGS" | grep -v '^$' || true)
209
210          if [ -z "$ALL_CHANGED_PKGS" ]; then
211            echo "No package changes detected, will run all tests"
212            echo "changed_packages=" >> "$GITHUB_OUTPUT"
213          else
214            # Build nextest filterset with rdeps for each package
215            FILTERSET=$(echo "$ALL_CHANGED_PKGS" | \
216              sed 's/.*/rdeps(&)/' | \
217              tr '\n' '|' | \
218              sed 's/|$//')
219            echo "Changed packages filterset: $FILTERSET"
220            echo "changed_packages=$FILTERSET" >> "$GITHUB_OUTPUT"
221          fi
222        fi
223
224    "#});
225
226        outputs.insert(
227            "changed_packages".to_owned(),
228            format!("${{{{ steps.{}.outputs.changed_packages }}}}", step_name),
229        );
230    }
231
232    for rule in rules {
233        assert!(
234            rule.set_by_step
235                .borrow_mut()
236                .replace(name.clone())
237                .is_none()
238        );
239        assert!(
240            outputs
241                .insert(
242                    rule.name.to_owned(),
243                    format!("${{{{ steps.{}.outputs.{} }}}}", step_name, rule.name)
244                )
245                .is_none()
246        );
247
248        let grep_arg = if rule.invert { "-qvP" } else { "-qP" };
249        script.push_str(&format!(
250            "check_pattern \"{}\" '{}' {}\n",
251            rule.name, rule.pattern, grep_arg
252        ));
253    }
254
255    if target == OrchestrateTarget::ZedRepo {
256        script.push_str(DETECT_CHANGED_EXTENSIONS_SCRIPT);
257        script.push_str("echo \"changed_extensions=$EXTENSIONS_JSON\" >> \"$GITHUB_OUTPUT\"\n");
258
259        outputs.insert(
260            "changed_extensions".to_owned(),
261            format!("${{{{ steps.{}.outputs.changed_extensions }}}}", step_name),
262        );
263    }
264
265    let job = Job::default()
266        .runs_on(runners::LINUX_SMALL)
267        .with_repository_owner_guard()
268        .outputs(outputs)
269        .add_step(steps::checkout_repo().with_deep_history_on_non_main())
270        .add_step(Step::new(step_name.clone()).run(script).id(step_name));
271
272    NamedJob { name, job }
273}
274
275pub fn tests_pass(jobs: &[NamedJob], extra_job_names: &[&str]) -> NamedJob {
276    let mut script = String::from(indoc::indoc! {r#"
277        set +x
278        EXIT_CODE=0
279
280        check_result() {
281          echo "* $1: $2"
282          if [[ "$2" != "skipped" && "$2" != "success" ]]; then EXIT_CODE=1; fi
283        }
284
285    "#});
286
287    let all_names: Vec<&str> = jobs
288        .iter()
289        .map(|job| job.name.as_str())
290        .chain(extra_job_names.iter().copied())
291        .collect();
292
293    let env_entries: Vec<_> = all_names
294        .iter()
295        .map(|name| {
296            let env_name = format!("RESULT_{}", name.to_uppercase());
297            let env_value = format!("${{{{ needs.{}.result }}}}", name);
298            (env_name, env_value)
299        })
300        .collect();
301
302    script.push_str(
303        &all_names
304            .iter()
305            .zip(env_entries.iter())
306            .map(|(name, (env_name, _))| format!("check_result \"{}\" \"${}\"", name, env_name))
307            .collect::<Vec<_>>()
308            .join("\n"),
309    );
310
311    script.push_str("\n\nexit $EXIT_CODE\n");
312
313    let job = Job::default()
314        .runs_on(runners::LINUX_SMALL)
315        .needs(
316            all_names
317                .iter()
318                .map(|name| name.to_string())
319                .collect::<Vec<String>>(),
320        )
321        .cond(repository_owner_guard_expression(true))
322        .add_step(
323            env_entries
324                .into_iter()
325                .fold(named::bash(&script), |step, env_item| {
326                    step.add_env(env_item)
327                }),
328        );
329
330    named::job(job)
331}
332
333/// Bash script snippet that detects changed extension directories from `$CHANGED_FILES`.
334/// Assumes `$CHANGED_FILES` is already set. Sets `$EXTENSIONS_JSON` to a JSON array of
335/// changed extension paths. Callers are responsible for writing the result to `$GITHUB_OUTPUT`.
336pub(crate) const DETECT_CHANGED_EXTENSIONS_SCRIPT: &str = indoc::indoc! {r#"
337    # Detect changed extension directories (excluding extensions/workflows)
338    CHANGED_EXTENSIONS=$(echo "$CHANGED_FILES" | grep -oP '^extensions/[^/]+(?=/)' | sort -u | grep -v '^extensions/workflows$' || true)
339    if [ -n "$CHANGED_EXTENSIONS" ]; then
340        EXTENSIONS_JSON=$(echo "$CHANGED_EXTENSIONS" | jq -R -s -c 'split("\n") | map(select(length > 0))')
341    else
342        EXTENSIONS_JSON="[]"
343    fi
344"#};
345
346const TS_QUERY_LS_FILE: &str = "ts_query_ls-x86_64-unknown-linux-gnu.tar.gz";
347const CI_TS_QUERY_RELEASE: &str = "tags/v3.15.1";
348
349pub(crate) fn fetch_ts_query_ls() -> Step<Use> {
350    named::uses(
351        "dsaltares",
352        "fetch-gh-release-asset",
353        "aa37ae5c44d3c9820bc12fe675e8670ecd93bd1c",
354    ) // v1.1.1
355    .add_with(("repo", "ribru17/ts_query_ls"))
356    .add_with(("version", CI_TS_QUERY_RELEASE))
357    .add_with(("file", TS_QUERY_LS_FILE))
358}
359
360pub(crate) fn run_ts_query_ls() -> Step<Run> {
361    named::bash(formatdoc!(
362        r#"tar -xf "$GITHUB_WORKSPACE/{TS_QUERY_LS_FILE}" -C "$GITHUB_WORKSPACE"
363        "$GITHUB_WORKSPACE/ts_query_ls" format --check . || {{
364            echo "Found unformatted queries, please format them with ts_query_ls."
365            echo "For easy use, install the Tree-sitter query extension:"
366            echo "zed://extension/tree-sitter-query"
367            false
368        }}"#
369    ))
370}
371
372fn check_style() -> NamedJob {
373    fn check_for_typos() -> Step<Use> {
374        named::uses(
375            "crate-ci",
376            "typos",
377            "2d0ce569feab1f8752f1dde43cc2f2aa53236e06",
378        ) // v1.40.0
379        .with(("config", "./typos.toml"))
380    }
381
382    named::job(
383        release_job(&[])
384            .runs_on(runners::LINUX_MEDIUM)
385            .add_step(steps::checkout_repo())
386            .add_step(steps::cache_rust_dependencies_namespace())
387            .add_step(steps::setup_pnpm())
388            .add_step(steps::prettier())
389            .add_step(steps::cargo_fmt())
390            .add_step(steps::script("./script/check-todos"))
391            .add_step(steps::script("./script/check-keymaps"))
392            .add_step(check_for_typos())
393            .add_step(fetch_ts_query_ls())
394            .add_step(run_ts_query_ls()),
395    )
396}
397
398fn check_dependencies() -> NamedJob {
399    fn install_cargo_machete() -> Step<Use> {
400        named::uses(
401            "clechasseur",
402            "rs-cargo",
403            "8435b10f6e71c2e3d4d3b7573003a8ce4bfc6386", // v2
404        )
405        .add_with(("command", "install"))
406        .add_with(("args", "cargo-machete@0.7.0"))
407    }
408
409    fn run_cargo_machete() -> Step<Use> {
410        named::uses(
411            "clechasseur",
412            "rs-cargo",
413            "8435b10f6e71c2e3d4d3b7573003a8ce4bfc6386", // v2
414        )
415        .add_with(("command", "machete"))
416    }
417
418    fn check_cargo_lock() -> Step<Run> {
419        named::bash("cargo update --locked --workspace")
420    }
421
422    fn check_vulnerable_dependencies() -> Step<Use> {
423        named::uses(
424            "actions",
425            "dependency-review-action",
426            "67d4f4bd7a9b17a0db54d2a7519187c65e339de8", // v4
427        )
428        .if_condition(Expression::new("github.event_name == 'pull_request'"))
429        .with(("license-check", false))
430    }
431
432    named::job(use_clang(
433        release_job(&[])
434            .runs_on(runners::LINUX_SMALL)
435            .add_step(steps::checkout_repo())
436            .add_step(steps::cache_rust_dependencies_namespace())
437            .add_step(install_cargo_machete())
438            .add_step(run_cargo_machete())
439            .add_step(check_cargo_lock())
440            .add_step(check_vulnerable_dependencies()),
441    ))
442}
443
444fn check_wasm() -> NamedJob {
445    fn install_nightly_wasm_toolchain() -> Step<Run> {
446        named::bash(
447            "rustup toolchain install nightly --component rust-src --target wasm32-unknown-unknown",
448        )
449    }
450
451    fn cargo_check_wasm() -> Step<Run> {
452        named::bash(concat!(
453            "cargo +nightly -Zbuild-std=std,panic_abort ",
454            "check --target wasm32-unknown-unknown -p gpui_platform",
455        ))
456        .add_env((
457            "CARGO_TARGET_WASM32_UNKNOWN_UNKNOWN_RUSTFLAGS",
458            "-C target-feature=+atomics,+bulk-memory,+mutable-globals",
459        ))
460    }
461
462    named::job(
463        release_job(&[])
464            .runs_on(runners::LINUX_LARGE)
465            .add_step(steps::checkout_repo())
466            .add_step(steps::setup_cargo_config(Platform::Linux))
467            .add_step(steps::cache_rust_dependencies_namespace())
468            .add_step(install_nightly_wasm_toolchain())
469            .add_step(steps::setup_sccache(Platform::Linux))
470            .add_step(cargo_check_wasm())
471            .add_step(steps::show_sccache_stats(Platform::Linux))
472            .add_step(steps::cleanup_cargo_config(Platform::Linux)),
473    )
474}
475
476fn check_workspace_binaries() -> NamedJob {
477    named::job(use_clang(
478        release_job(&[])
479            .runs_on(runners::LINUX_LARGE)
480            .add_step(steps::checkout_repo())
481            .add_step(steps::setup_cargo_config(Platform::Linux))
482            .add_step(steps::cache_rust_dependencies_namespace())
483            .map(steps::install_linux_dependencies)
484            .add_step(steps::setup_sccache(Platform::Linux))
485            .add_step(steps::script("cargo build -p collab"))
486            .add_step(steps::script("cargo build --workspace --bins --examples"))
487            .add_step(steps::show_sccache_stats(Platform::Linux))
488            .add_step(steps::cleanup_cargo_config(Platform::Linux)),
489    ))
490}
491
492pub(crate) fn clippy(platform: Platform) -> NamedJob {
493    let runner = match platform {
494        Platform::Windows => runners::WINDOWS_DEFAULT,
495        Platform::Linux => runners::LINUX_DEFAULT,
496        Platform::Mac => runners::MAC_DEFAULT,
497    };
498    let mut job = release_job(&[])
499        .runs_on(runner)
500        .add_step(steps::checkout_repo())
501        .add_step(steps::setup_cargo_config(platform))
502        .when(
503            platform == Platform::Linux || platform == Platform::Mac,
504            |this| this.add_step(steps::cache_rust_dependencies_namespace()),
505        )
506        .when(
507            platform == Platform::Linux,
508            steps::install_linux_dependencies,
509        )
510        .add_step(steps::setup_sccache(platform))
511        .add_step(steps::clippy(platform))
512        .add_step(steps::show_sccache_stats(platform));
513    if platform == Platform::Linux {
514        job = use_clang(job);
515    }
516    NamedJob {
517        name: format!("clippy_{platform}"),
518        job,
519    }
520}
521
522pub(crate) fn run_platform_tests(platform: Platform) -> NamedJob {
523    run_platform_tests_impl(platform, true)
524}
525
526pub(crate) fn run_platform_tests_no_filter(platform: Platform) -> NamedJob {
527    run_platform_tests_impl(platform, false)
528}
529
530fn run_platform_tests_impl(platform: Platform, filter_packages: bool) -> NamedJob {
531    let runner = match platform {
532        Platform::Windows => runners::WINDOWS_DEFAULT,
533        Platform::Linux => runners::LINUX_DEFAULT,
534        Platform::Mac => runners::MAC_DEFAULT,
535    };
536    NamedJob {
537        name: format!("run_tests_{platform}"),
538        job: release_job(&[])
539            .runs_on(runner)
540            .when(platform == Platform::Linux, |job| {
541                job.add_service(
542                    "postgres",
543                    Container::new("postgres:15")
544                        .add_env(("POSTGRES_HOST_AUTH_METHOD", "trust"))
545                        .ports(vec![Port::Name("5432:5432".into())])
546                        .options(
547                            "--health-cmd pg_isready \
548                             --health-interval 500ms \
549                             --health-timeout 5s \
550                             --health-retries 10",
551                        ),
552                )
553            })
554            .add_step(steps::checkout_repo())
555            .add_step(steps::setup_cargo_config(platform))
556            .when(platform == Platform::Mac, |this| {
557                this.add_step(steps::cache_rust_dependencies_namespace())
558            })
559            .when(platform == Platform::Linux, |this| {
560                use_clang(this.add_step(steps::cache_rust_dependencies_namespace()))
561            })
562            .when(
563                platform == Platform::Linux,
564                steps::install_linux_dependencies,
565            )
566            .add_step(steps::setup_node())
567            .when(
568                platform == Platform::Linux || platform == Platform::Mac,
569                |job| job.add_step(steps::cargo_install_nextest()),
570            )
571            .add_step(steps::clear_target_dir_if_large(platform))
572            .add_step(steps::setup_sccache(platform))
573            .when(filter_packages, |job| {
574                job.add_step(
575                    steps::cargo_nextest(platform).with_changed_packages_filter("orchestrate"),
576                )
577            })
578            .when(!filter_packages, |job| {
579                job.add_step(steps::cargo_nextest(platform))
580            })
581            .add_step(steps::show_sccache_stats(platform))
582            .add_step(steps::cleanup_cargo_config(platform)),
583    }
584}
585
586pub(crate) fn check_postgres_and_protobuf_migrations() -> NamedJob {
587    fn ensure_fresh_merge() -> Step<Run> {
588        named::bash(indoc::indoc! {r#"
589            if [ -z "$GITHUB_BASE_REF" ];
590            then
591              echo "BUF_BASE_BRANCH=$(git merge-base origin/main HEAD)" >> "$GITHUB_ENV"
592            else
593              git checkout -B temp
594              git merge -q "origin/$GITHUB_BASE_REF" -m "merge main into temp"
595              echo "BUF_BASE_BRANCH=$GITHUB_BASE_REF" >> "$GITHUB_ENV"
596            fi
597        "#})
598    }
599
600    fn bufbuild_setup_action() -> Step<Use> {
601        named::uses("bufbuild", "buf-setup-action", "v1")
602            .add_with(("version", "v1.29.0"))
603            .add_with(("github_token", vars::GITHUB_TOKEN))
604    }
605
606    fn bufbuild_breaking_action() -> Step<Use> {
607        named::uses("bufbuild", "buf-breaking-action", "v1").add_with(("input", "crates/proto/proto/"))
608            .add_with(("against", "https://github.com/${GITHUB_REPOSITORY}.git#branch=${BUF_BASE_BRANCH},subdir=crates/proto/proto/"))
609    }
610
611    fn buf_lint() -> Step<Run> {
612        named::bash("buf lint crates/proto/proto")
613    }
614
615    fn check_protobuf_formatting() -> Step<Run> {
616        named::bash("buf format --diff --exit-code crates/proto/proto")
617    }
618
619    named::job(
620        release_job(&[])
621            .runs_on(runners::LINUX_DEFAULT)
622            .add_env(("GIT_AUTHOR_NAME", "Protobuf Action"))
623            .add_env(("GIT_AUTHOR_EMAIL", "ci@zed.dev"))
624            .add_env(("GIT_COMMITTER_NAME", "Protobuf Action"))
625            .add_env(("GIT_COMMITTER_EMAIL", "ci@zed.dev"))
626            .add_step(steps::checkout_repo().with_full_history())
627            .add_step(ensure_fresh_merge())
628            .add_step(bufbuild_setup_action())
629            .add_step(bufbuild_breaking_action())
630            .add_step(buf_lint())
631            .add_step(check_protobuf_formatting()),
632    )
633}
634
635fn doctests() -> NamedJob {
636    fn run_doctests() -> Step<Run> {
637        named::bash(indoc::indoc! {r#"
638            cargo test --workspace --doc --no-fail-fast
639        "#})
640        .id("run_doctests")
641    }
642
643    named::job(use_clang(
644        release_job(&[])
645            .runs_on(runners::LINUX_DEFAULT)
646            .add_step(steps::checkout_repo())
647            .add_step(steps::cache_rust_dependencies_namespace())
648            .map(steps::install_linux_dependencies)
649            .add_step(steps::setup_cargo_config(Platform::Linux))
650            .add_step(steps::setup_sccache(Platform::Linux))
651            .add_step(run_doctests())
652            .add_step(steps::show_sccache_stats(Platform::Linux))
653            .add_step(steps::cleanup_cargo_config(Platform::Linux)),
654    ))
655}
656
657fn check_licenses() -> NamedJob {
658    named::job(
659        Job::default()
660            .runs_on(runners::LINUX_SMALL)
661            .add_step(steps::checkout_repo())
662            .add_step(steps::cache_rust_dependencies_namespace())
663            .add_step(steps::script("./script/check-licenses"))
664            .add_step(steps::script("./script/generate-licenses")),
665    )
666}
667
668fn check_docs() -> NamedJob {
669    fn lychee_link_check(dir: &str) -> Step<Use> {
670        named::uses(
671            "lycheeverse",
672            "lychee-action",
673            "82202e5e9c2f4ef1a55a3d02563e1cb6041e5332",
674        ) // v2.4.1
675        .add_with(("args", format!("--no-progress --exclude '^http' '{dir}'")))
676        .add_with(("fail", true))
677        .add_with(("jobSummary", false))
678    }
679
680    fn install_mdbook() -> Step<Use> {
681        named::uses(
682            "peaceiris",
683            "actions-mdbook",
684            "ee69d230fe19748b7abf22df32acaa93833fad08", // v2
685        )
686        .with(("mdbook-version", "0.4.37"))
687    }
688
689    fn build_docs() -> Step<Run> {
690        named::bash(indoc::indoc! {r#"
691            mkdir -p target/deploy
692            mdbook build ./docs --dest-dir=../target/deploy/docs/
693        "#})
694    }
695
696    named::job(use_clang(
697        release_job(&[])
698            .runs_on(runners::LINUX_LARGE)
699            .add_step(steps::checkout_repo())
700            .add_step(steps::setup_cargo_config(Platform::Linux))
701            // todo(ci): un-inline build_docs/action.yml here
702            .add_step(steps::cache_rust_dependencies_namespace())
703            .add_step(
704                lychee_link_check("./docs/src/**/*"), // check markdown links
705            )
706            .map(steps::install_linux_dependencies)
707            .add_step(steps::script("./script/generate-action-metadata"))
708            .add_step(install_mdbook())
709            .add_step(build_docs())
710            .add_step(
711                lychee_link_check("target/deploy/docs"), // check links in generated html
712            ),
713    ))
714}
715
716pub(crate) fn check_scripts() -> NamedJob {
717    fn download_actionlint() -> Step<Run> {
718        named::bash(
719            "bash <(curl https://raw.githubusercontent.com/rhysd/actionlint/main/scripts/download-actionlint.bash)",
720        )
721    }
722
723    fn run_actionlint() -> Step<Run> {
724        named::bash(r#""$ACTIONLINT_BIN" -color"#).add_env((
725            "ACTIONLINT_BIN",
726            "${{ steps.get_actionlint.outputs.executable }}",
727        ))
728    }
729
730    fn run_shellcheck() -> Step<Run> {
731        named::bash("./script/shellcheck-scripts error")
732    }
733
734    fn check_xtask_workflows() -> Step<Run> {
735        named::bash(indoc::indoc! {r#"
736            cargo xtask workflows
737            if ! git diff --exit-code .github; then
738              echo "Error: .github directory has uncommitted changes after running 'cargo xtask workflows'"
739              echo "Please run 'cargo xtask workflows' locally and commit the changes"
740              exit 1
741            fi
742        "#})
743    }
744
745    named::job(
746        release_job(&[])
747            .runs_on(runners::LINUX_SMALL)
748            .add_step(steps::checkout_repo())
749            .add_step(run_shellcheck())
750            .add_step(download_actionlint().id("get_actionlint"))
751            .add_step(run_actionlint())
752            .add_step(cache_rust_dependencies_namespace())
753            .add_step(check_xtask_workflows()),
754    )
755}
756
757fn extension_tests() -> NamedJob<UsesJob> {
758    let job = Job::default()
759        .needs(vec!["orchestrate".to_owned()])
760        .cond(Expression::new(
761            "needs.orchestrate.outputs.changed_extensions != '[]'",
762        ))
763        .permissions(Permissions::default().contents(Level::Read))
764        .strategy(
765            Strategy::default()
766                .fail_fast(false)
767                // TODO: Remove the limit. We currently need this to workaround the concurrency group issue
768                // where different matrix jobs would be placed in the same concurrency group and thus cancelled.
769                .max_parallel(1u32)
770                .matrix(json!({
771                    "extension": "${{ fromJson(needs.orchestrate.outputs.changed_extensions) }}"
772                })),
773        )
774        .uses_local(".github/workflows/extension_tests.yml")
775        .with(Input::default().add("working-directory", "${{ matrix.extension }}"));
776
777    named::job(job)
778}