run_tests.rs

  1use gh_workflow::{
  2    Concurrency, Container, Event, Expression, Input, Job, Level, Permissions, Port, PullRequest,
  3    Push, Run, Step, Strategy, Use, UsesJob, Workflow,
  4};
  5use indexmap::IndexMap;
  6use indoc::formatdoc;
  7use serde_json::json;
  8
  9use crate::tasks::workflows::{
 10    steps::{
 11        CommonJobConditions, cache_rust_dependencies_namespace, repository_owner_guard_expression,
 12        use_clang,
 13    },
 14    vars::{self, PathCondition},
 15};
 16
 17use super::{
 18    runners::{self, Arch, Platform},
 19    steps::{self, FluentBuilder, NamedJob, named, release_job},
 20};
 21
 22pub(crate) fn run_tests() -> Workflow {
 23    // Specify anything which should potentially skip full test suite in this regex:
 24    // - docs/
 25    // - script/update_top_ranking_issues/
 26    // - .github/ISSUE_TEMPLATE/
 27    // - .github/workflows/  (except .github/workflows/ci.yml)
 28    // - extensions/  (these have their own test workflow)
 29    let should_run_tests = PathCondition::inverted(
 30        "run_tests",
 31        r"^(docs/|script/update_top_ranking_issues/|\.github/(ISSUE_TEMPLATE|workflows/(?!run_tests))|extensions/)",
 32    );
 33    let should_check_docs = PathCondition::new("run_docs", r"^(docs/|crates/.*\.rs)");
 34    let should_check_scripts = PathCondition::new(
 35        "run_action_checks",
 36        r"^\.github/(workflows/|actions/|actionlint.yml)|tooling/xtask|script/",
 37    );
 38    let should_check_licences =
 39        PathCondition::new("run_licenses", r"^(Cargo.lock|script/.*licenses)");
 40
 41    let orchestrate = orchestrate(&[
 42        &should_check_scripts,
 43        &should_check_docs,
 44        &should_check_licences,
 45        &should_run_tests,
 46    ]);
 47
 48    let mut jobs = vec![
 49        orchestrate,
 50        check_style(),
 51        should_run_tests.guard(clippy(Platform::Windows, None)),
 52        should_run_tests.guard(clippy(Platform::Linux, None)),
 53        should_run_tests.guard(clippy(Platform::Mac, None)),
 54        should_run_tests.guard(clippy(Platform::Mac, Some(Arch::X86_64))),
 55        should_run_tests.guard(run_platform_tests(Platform::Windows)),
 56        should_run_tests.guard(run_platform_tests(Platform::Linux)),
 57        should_run_tests.guard(run_platform_tests(Platform::Mac)),
 58        should_run_tests.guard(doctests()),
 59        should_run_tests.guard(check_workspace_binaries()),
 60        should_run_tests.guard(check_wasm()),
 61        should_run_tests.guard(check_dependencies()), // could be more specific here?
 62        should_check_docs.guard(check_docs()),
 63        should_check_licences.guard(check_licenses()),
 64        should_check_scripts.guard(check_scripts()),
 65    ];
 66    let ext_tests = extension_tests();
 67    let tests_pass = tests_pass(&jobs, &[&ext_tests.name]);
 68
 69    jobs.push(should_run_tests.guard(check_postgres_and_protobuf_migrations())); // could be more specific here?
 70
 71    named::workflow()
 72        .add_event(
 73            Event::default()
 74                .push(
 75                    Push::default()
 76                        .add_branch("main")
 77                        .add_branch("v[0-9]+.[0-9]+.x"),
 78                )
 79                .pull_request(PullRequest::default().add_branch("**")),
 80        )
 81        .concurrency(
 82            Concurrency::default()
 83                .group(concat!(
 84                    "${{ github.workflow }}-${{ github.ref_name }}-",
 85                    "${{ github.ref_name == 'main' && github.sha || 'anysha' }}"
 86                ))
 87                .cancel_in_progress(true),
 88        )
 89        .add_env(("CARGO_TERM_COLOR", "always"))
 90        .add_env(("RUST_BACKTRACE", 1))
 91        .add_env(("CARGO_INCREMENTAL", 0))
 92        .map(|mut workflow| {
 93            for job in jobs {
 94                workflow = workflow.add_job(job.name, job.job)
 95            }
 96            workflow
 97        })
 98        .add_job(ext_tests.name, ext_tests.job)
 99        .add_job(tests_pass.name, tests_pass.job)
100}
101
102/// Controls which features `orchestrate_impl` includes in the generated script.
103#[derive(PartialEq, Eq)]
104enum OrchestrateTarget {
105    /// For the main Zed repo: includes the cargo package filter and extension
106    /// change detection, but no working-directory scoping.
107    ZedRepo,
108    /// For individual extension repos: scopes changed-file detection to the
109    /// working directory, with no package filter or extension detection.
110    Extension,
111}
112
113// Generates a bash script that checks changed files against regex patterns
114// and sets GitHub output variables accordingly
115pub fn orchestrate(rules: &[&PathCondition]) -> NamedJob {
116    orchestrate_impl(rules, OrchestrateTarget::ZedRepo)
117}
118
119pub fn orchestrate_for_extension(rules: &[&PathCondition]) -> NamedJob {
120    orchestrate_impl(rules, OrchestrateTarget::Extension)
121}
122
123fn orchestrate_impl(rules: &[&PathCondition], target: OrchestrateTarget) -> NamedJob {
124    let name = "orchestrate".to_owned();
125    let step_name = "filter".to_owned();
126    let mut script = String::new();
127
128    script.push_str(indoc::indoc! {r#"
129        set -euo pipefail
130        if [ -z "$GITHUB_BASE_REF" ]; then
131          echo "Not in a PR context (i.e., push to main/stable/preview)"
132          COMPARE_REV="$(git rev-parse HEAD~1)"
133        else
134          echo "In a PR context comparing to pull_request.base.ref"
135          git fetch origin "$GITHUB_BASE_REF" --depth=350
136          COMPARE_REV="$(git merge-base "origin/${GITHUB_BASE_REF}" HEAD)"
137        fi
138        CHANGED_FILES="$(git diff --name-only "$COMPARE_REV" "$GITHUB_SHA")"
139
140    "#});
141
142    if target == OrchestrateTarget::Extension {
143        script.push_str(indoc::indoc! {r#"
144        # When running from a subdirectory, git diff returns repo-root-relative paths.
145        # Filter to only files within the current working directory and strip the prefix.
146        REPO_SUBDIR="$(git rev-parse --show-prefix)"
147        REPO_SUBDIR="${REPO_SUBDIR%/}"
148        if [ -n "$REPO_SUBDIR" ]; then
149            CHANGED_FILES="$(echo "$CHANGED_FILES" | grep "^${REPO_SUBDIR}/" | sed "s|^${REPO_SUBDIR}/||" || true)"
150        fi
151
152    "#});
153    }
154
155    script.push_str(indoc::indoc! {r#"
156        check_pattern() {
157          local output_name="$1"
158          local pattern="$2"
159          local grep_arg="$3"
160
161          echo "$CHANGED_FILES" | grep "$grep_arg" "$pattern" && \
162            echo "${output_name}=true" >> "$GITHUB_OUTPUT" || \
163            echo "${output_name}=false" >> "$GITHUB_OUTPUT"
164        }
165
166    "#});
167
168    let mut outputs = IndexMap::new();
169
170    if target == OrchestrateTarget::ZedRepo {
171        script.push_str(indoc::indoc! {r#"
172        # Check for changes that require full rebuild (no filter)
173        # Direct pushes to main/stable/preview always run full suite
174        if [ -z "$GITHUB_BASE_REF" ]; then
175          echo "Not a PR, running full test suite"
176          echo "changed_packages=" >> "$GITHUB_OUTPUT"
177        elif echo "$CHANGED_FILES" | grep -qP '^(rust-toolchain\.toml|\.cargo/|\.github/|Cargo\.(toml|lock)$)'; then
178          echo "Toolchain, cargo config, or root Cargo files changed, will run all tests"
179          echo "changed_packages=" >> "$GITHUB_OUTPUT"
180        else
181          # Extract changed directories from file paths
182          CHANGED_DIRS=$(echo "$CHANGED_FILES" | \
183            grep -oP '^(crates|tooling)/\K[^/]+' | \
184            sort -u || true)
185
186          # Build directory-to-package mapping using cargo metadata
187          DIR_TO_PKG=$(cargo metadata --format-version=1 --no-deps 2>/dev/null | \
188            jq -r '.packages[] | select(.manifest_path | test("crates/|tooling/")) | "\(.manifest_path | capture("(crates|tooling)/(?<dir>[^/]+)") | .dir)=\(.name)"')
189
190          # Map directory names to package names
191          FILE_CHANGED_PKGS=""
192          for dir in $CHANGED_DIRS; do
193            pkg=$(echo "$DIR_TO_PKG" | grep "^${dir}=" | cut -d= -f2 | head -1)
194            if [ -n "$pkg" ]; then
195              FILE_CHANGED_PKGS=$(printf '%s\n%s' "$FILE_CHANGED_PKGS" "$pkg")
196            else
197              # Fall back to directory name if no mapping found
198              FILE_CHANGED_PKGS=$(printf '%s\n%s' "$FILE_CHANGED_PKGS" "$dir")
199            fi
200          done
201          FILE_CHANGED_PKGS=$(echo "$FILE_CHANGED_PKGS" | grep -v '^$' | sort -u || true)
202
203          # If assets/ changed, add crates that depend on those assets
204          if echo "$CHANGED_FILES" | grep -qP '^assets/'; then
205            FILE_CHANGED_PKGS=$(printf '%s\n%s\n%s\n%s' "$FILE_CHANGED_PKGS" "settings" "storybook" "assets" | sort -u)
206          fi
207
208          # Combine all changed packages
209          ALL_CHANGED_PKGS=$(echo "$FILE_CHANGED_PKGS" | grep -v '^$' || true)
210
211          if [ -z "$ALL_CHANGED_PKGS" ]; then
212            echo "No package changes detected, will run all tests"
213            echo "changed_packages=" >> "$GITHUB_OUTPUT"
214          else
215            # Build nextest filterset with rdeps for each package
216            FILTERSET=$(echo "$ALL_CHANGED_PKGS" | \
217              sed 's/.*/rdeps(&)/' | \
218              tr '\n' '|' | \
219              sed 's/|$//')
220            echo "Changed packages filterset: $FILTERSET"
221            echo "changed_packages=$FILTERSET" >> "$GITHUB_OUTPUT"
222          fi
223        fi
224
225    "#});
226
227        outputs.insert(
228            "changed_packages".to_owned(),
229            format!("${{{{ steps.{}.outputs.changed_packages }}}}", step_name),
230        );
231    }
232
233    for rule in rules {
234        assert!(
235            rule.set_by_step
236                .borrow_mut()
237                .replace(name.clone())
238                .is_none()
239        );
240        assert!(
241            outputs
242                .insert(
243                    rule.name.to_owned(),
244                    format!("${{{{ steps.{}.outputs.{} }}}}", step_name, rule.name)
245                )
246                .is_none()
247        );
248
249        let grep_arg = if rule.invert { "-qvP" } else { "-qP" };
250        script.push_str(&format!(
251            "check_pattern \"{}\" '{}' {}\n",
252            rule.name, rule.pattern, grep_arg
253        ));
254    }
255
256    if target == OrchestrateTarget::ZedRepo {
257        script.push_str(DETECT_CHANGED_EXTENSIONS_SCRIPT);
258        script.push_str("echo \"changed_extensions=$EXTENSIONS_JSON\" >> \"$GITHUB_OUTPUT\"\n");
259
260        outputs.insert(
261            "changed_extensions".to_owned(),
262            format!("${{{{ steps.{}.outputs.changed_extensions }}}}", step_name),
263        );
264    }
265
266    let job = Job::default()
267        .runs_on(runners::LINUX_SMALL)
268        .with_repository_owner_guard()
269        .outputs(outputs)
270        .add_step(steps::checkout_repo().with_deep_history_on_non_main())
271        .add_step(Step::new(step_name.clone()).run(script).id(step_name));
272
273    NamedJob { name, job }
274}
275
276pub fn tests_pass(jobs: &[NamedJob], extra_job_names: &[&str]) -> NamedJob {
277    let mut script = String::from(indoc::indoc! {r#"
278        set +x
279        EXIT_CODE=0
280
281        check_result() {
282          echo "* $1: $2"
283          if [[ "$2" != "skipped" && "$2" != "success" ]]; then EXIT_CODE=1; fi
284        }
285
286    "#});
287
288    let all_names: Vec<&str> = jobs
289        .iter()
290        .map(|job| job.name.as_str())
291        .chain(extra_job_names.iter().copied())
292        .collect();
293
294    let env_entries: Vec<_> = all_names
295        .iter()
296        .map(|name| {
297            let env_name = format!("RESULT_{}", name.to_uppercase());
298            let env_value = format!("${{{{ needs.{}.result }}}}", name);
299            (env_name, env_value)
300        })
301        .collect();
302
303    script.push_str(
304        &all_names
305            .iter()
306            .zip(env_entries.iter())
307            .map(|(name, (env_name, _))| format!("check_result \"{}\" \"${}\"", name, env_name))
308            .collect::<Vec<_>>()
309            .join("\n"),
310    );
311
312    script.push_str("\n\nexit $EXIT_CODE\n");
313
314    let job = Job::default()
315        .runs_on(runners::LINUX_SMALL)
316        .needs(
317            all_names
318                .iter()
319                .map(|name| name.to_string())
320                .collect::<Vec<String>>(),
321        )
322        .cond(repository_owner_guard_expression(true))
323        .add_step(
324            env_entries
325                .into_iter()
326                .fold(named::bash(&script), |step, env_item| {
327                    step.add_env(env_item)
328                }),
329        );
330
331    named::job(job)
332}
333
334/// Bash script snippet that detects changed extension directories from `$CHANGED_FILES`.
335/// Assumes `$CHANGED_FILES` is already set. Sets `$EXTENSIONS_JSON` to a JSON array of
336/// changed extension paths. Callers are responsible for writing the result to `$GITHUB_OUTPUT`.
337pub(crate) const DETECT_CHANGED_EXTENSIONS_SCRIPT: &str = indoc::indoc! {r#"
338    # Detect changed extension directories (excluding extensions/workflows)
339    CHANGED_EXTENSIONS=$(echo "$CHANGED_FILES" | grep -oP '^extensions/[^/]+(?=/)' | sort -u | grep -v '^extensions/workflows$' || true)
340    if [ -n "$CHANGED_EXTENSIONS" ]; then
341        EXTENSIONS_JSON=$(echo "$CHANGED_EXTENSIONS" | jq -R -s -c 'split("\n") | map(select(length > 0))')
342    else
343        EXTENSIONS_JSON="[]"
344    fi
345"#};
346
347const TS_QUERY_LS_FILE: &str = "ts_query_ls-x86_64-unknown-linux-gnu.tar.gz";
348const CI_TS_QUERY_RELEASE: &str = "tags/v3.15.1";
349
350pub(crate) fn fetch_ts_query_ls() -> Step<Use> {
351    named::uses(
352        "dsaltares",
353        "fetch-gh-release-asset",
354        "aa37ae5c44d3c9820bc12fe675e8670ecd93bd1c",
355    ) // v1.1.1
356    .add_with(("repo", "ribru17/ts_query_ls"))
357    .add_with(("version", CI_TS_QUERY_RELEASE))
358    .add_with(("file", TS_QUERY_LS_FILE))
359}
360
361pub(crate) fn run_ts_query_ls() -> Step<Run> {
362    named::bash(formatdoc!(
363        r#"tar -xf "$GITHUB_WORKSPACE/{TS_QUERY_LS_FILE}" -C "$GITHUB_WORKSPACE"
364        "$GITHUB_WORKSPACE/ts_query_ls" format --check . || {{
365            echo "Found unformatted queries, please format them with ts_query_ls."
366            echo "For easy use, install the Tree-sitter query extension:"
367            echo "zed://extension/tree-sitter-query"
368            false
369        }}"#
370    ))
371}
372
373fn check_style() -> NamedJob {
374    fn check_for_typos() -> Step<Use> {
375        named::uses(
376            "crate-ci",
377            "typos",
378            "2d0ce569feab1f8752f1dde43cc2f2aa53236e06",
379        ) // v1.40.0
380        .with(("config", "./typos.toml"))
381    }
382
383    named::job(
384        release_job(&[])
385            .runs_on(runners::LINUX_MEDIUM)
386            .add_step(steps::checkout_repo())
387            .add_step(steps::cache_rust_dependencies_namespace())
388            .add_step(steps::setup_pnpm())
389            .add_step(steps::prettier())
390            .add_step(steps::cargo_fmt())
391            .add_step(steps::script("./script/check-todos"))
392            .add_step(steps::script("./script/check-keymaps"))
393            .add_step(check_for_typos())
394            .add_step(fetch_ts_query_ls())
395            .add_step(run_ts_query_ls()),
396    )
397}
398
399fn check_dependencies() -> NamedJob {
400    fn install_cargo_machete() -> Step<Use> {
401        named::uses(
402            "clechasseur",
403            "rs-cargo",
404            "8435b10f6e71c2e3d4d3b7573003a8ce4bfc6386", // v2
405        )
406        .add_with(("command", "install"))
407        .add_with(("args", "cargo-machete@0.7.0"))
408    }
409
410    fn run_cargo_machete() -> Step<Use> {
411        named::uses(
412            "clechasseur",
413            "rs-cargo",
414            "8435b10f6e71c2e3d4d3b7573003a8ce4bfc6386", // v2
415        )
416        .add_with(("command", "machete"))
417    }
418
419    fn check_cargo_lock() -> Step<Run> {
420        named::bash("cargo update --locked --workspace")
421    }
422
423    fn check_vulnerable_dependencies() -> Step<Use> {
424        named::uses(
425            "actions",
426            "dependency-review-action",
427            "67d4f4bd7a9b17a0db54d2a7519187c65e339de8", // v4
428        )
429        .if_condition(Expression::new("github.event_name == 'pull_request'"))
430        .with(("license-check", false))
431    }
432
433    named::job(use_clang(
434        release_job(&[])
435            .runs_on(runners::LINUX_SMALL)
436            .add_step(steps::checkout_repo())
437            .add_step(steps::cache_rust_dependencies_namespace())
438            .add_step(install_cargo_machete())
439            .add_step(run_cargo_machete())
440            .add_step(check_cargo_lock())
441            .add_step(check_vulnerable_dependencies()),
442    ))
443}
444
445fn check_wasm() -> NamedJob {
446    fn install_nightly_wasm_toolchain() -> Step<Run> {
447        named::bash(
448            "rustup toolchain install nightly --component rust-src --target wasm32-unknown-unknown",
449        )
450    }
451
452    fn cargo_check_wasm() -> Step<Run> {
453        named::bash(concat!(
454            "cargo +nightly -Zbuild-std=std,panic_abort ",
455            "check --target wasm32-unknown-unknown -p gpui_platform",
456        ))
457        .add_env((
458            "CARGO_TARGET_WASM32_UNKNOWN_UNKNOWN_RUSTFLAGS",
459            "-C target-feature=+atomics,+bulk-memory,+mutable-globals",
460        ))
461    }
462
463    named::job(
464        release_job(&[])
465            .runs_on(runners::LINUX_LARGE)
466            .add_step(steps::checkout_repo())
467            .add_step(steps::setup_cargo_config(Platform::Linux))
468            .add_step(steps::cache_rust_dependencies_namespace())
469            .add_step(install_nightly_wasm_toolchain())
470            .add_step(steps::setup_sccache(Platform::Linux))
471            .add_step(cargo_check_wasm())
472            .add_step(steps::show_sccache_stats(Platform::Linux))
473            .add_step(steps::cleanup_cargo_config(Platform::Linux)),
474    )
475}
476
477fn check_workspace_binaries() -> NamedJob {
478    named::job(use_clang(
479        release_job(&[])
480            .runs_on(runners::LINUX_LARGE)
481            .add_step(steps::checkout_repo())
482            .add_step(steps::setup_cargo_config(Platform::Linux))
483            .add_step(steps::cache_rust_dependencies_namespace())
484            .map(steps::install_linux_dependencies)
485            .add_step(steps::setup_sccache(Platform::Linux))
486            .add_step(steps::script("cargo build -p collab"))
487            .add_step(steps::script("cargo build --workspace --bins --examples"))
488            .add_step(steps::show_sccache_stats(Platform::Linux))
489            .add_step(steps::cleanup_cargo_config(Platform::Linux)),
490    ))
491}
492
493pub(crate) fn clippy(platform: Platform, arch: Option<Arch>) -> NamedJob {
494    let target = arch.map(|arch| match (platform, arch) {
495        (Platform::Mac, Arch::X86_64) => "x86_64-apple-darwin",
496        (Platform::Mac, Arch::AARCH64) => "aarch64-apple-darwin",
497        _ => unimplemented!("cross-arch clippy not supported for {platform}/{arch}"),
498    });
499    let runner = match platform {
500        Platform::Windows => runners::WINDOWS_DEFAULT,
501        Platform::Linux => runners::LINUX_DEFAULT,
502        Platform::Mac => runners::MAC_DEFAULT,
503    };
504    let mut job = release_job(&[])
505        .runs_on(runner)
506        .add_step(steps::checkout_repo())
507        .add_step(steps::setup_cargo_config(platform))
508        .when(
509            platform == Platform::Linux || platform == Platform::Mac,
510            |this| this.add_step(steps::cache_rust_dependencies_namespace()),
511        )
512        .when(
513            platform == Platform::Linux,
514            steps::install_linux_dependencies,
515        )
516        .when_some(target, |this, target| {
517            this.add_step(steps::install_rustup_target(target))
518        })
519        .add_step(steps::setup_sccache(platform))
520        .add_step(steps::clippy(platform, target))
521        .add_step(steps::show_sccache_stats(platform));
522    if platform == Platform::Linux {
523        job = use_clang(job);
524    }
525    let name = match arch {
526        Some(arch) => format!("clippy_{platform}_{arch}"),
527        None => format!("clippy_{platform}"),
528    };
529    NamedJob { name, job }
530}
531
532pub(crate) fn run_platform_tests(platform: Platform) -> NamedJob {
533    run_platform_tests_impl(platform, true)
534}
535
536pub(crate) fn run_platform_tests_no_filter(platform: Platform) -> NamedJob {
537    run_platform_tests_impl(platform, false)
538}
539
540fn run_platform_tests_impl(platform: Platform, filter_packages: bool) -> NamedJob {
541    let runner = match platform {
542        Platform::Windows => runners::WINDOWS_DEFAULT,
543        Platform::Linux => runners::LINUX_DEFAULT,
544        Platform::Mac => runners::MAC_DEFAULT,
545    };
546    NamedJob {
547        name: format!("run_tests_{platform}"),
548        job: release_job(&[])
549            .runs_on(runner)
550            .when(platform == Platform::Linux, |job| {
551                job.add_service(
552                    "postgres",
553                    Container::new("postgres:15")
554                        .add_env(("POSTGRES_HOST_AUTH_METHOD", "trust"))
555                        .ports(vec![Port::Name("5432:5432".into())])
556                        .options(
557                            "--health-cmd pg_isready \
558                             --health-interval 500ms \
559                             --health-timeout 5s \
560                             --health-retries 10",
561                        ),
562                )
563            })
564            .add_step(steps::checkout_repo())
565            .add_step(steps::setup_cargo_config(platform))
566            .when(platform == Platform::Mac, |this| {
567                this.add_step(steps::cache_rust_dependencies_namespace())
568            })
569            .when(platform == Platform::Linux, |this| {
570                use_clang(this.add_step(steps::cache_rust_dependencies_namespace()))
571            })
572            .when(
573                platform == Platform::Linux,
574                steps::install_linux_dependencies,
575            )
576            .add_step(steps::setup_node())
577            .when(
578                platform == Platform::Linux || platform == Platform::Mac,
579                |job| job.add_step(steps::cargo_install_nextest()),
580            )
581            .add_step(steps::clear_target_dir_if_large(platform))
582            .add_step(steps::setup_sccache(platform))
583            .when(filter_packages, |job| {
584                job.add_step(
585                    steps::cargo_nextest(platform).with_changed_packages_filter("orchestrate"),
586                )
587            })
588            .when(!filter_packages, |job| {
589                job.add_step(steps::cargo_nextest(platform))
590            })
591            .add_step(steps::show_sccache_stats(platform))
592            .add_step(steps::cleanup_cargo_config(platform)),
593    }
594}
595
596pub(crate) fn check_postgres_and_protobuf_migrations() -> NamedJob {
597    fn ensure_fresh_merge() -> Step<Run> {
598        named::bash(indoc::indoc! {r#"
599            if [ -z "$GITHUB_BASE_REF" ];
600            then
601              echo "BUF_BASE_BRANCH=$(git merge-base origin/main HEAD)" >> "$GITHUB_ENV"
602            else
603              git checkout -B temp
604              git merge -q "origin/$GITHUB_BASE_REF" -m "merge main into temp"
605              echo "BUF_BASE_BRANCH=$GITHUB_BASE_REF" >> "$GITHUB_ENV"
606            fi
607        "#})
608    }
609
610    fn bufbuild_setup_action() -> Step<Use> {
611        named::uses("bufbuild", "buf-setup-action", "v1")
612            .add_with(("version", "v1.29.0"))
613            .add_with(("github_token", vars::GITHUB_TOKEN))
614    }
615
616    fn bufbuild_breaking_action() -> Step<Use> {
617        named::uses("bufbuild", "buf-breaking-action", "v1").add_with(("input", "crates/proto/proto/"))
618            .add_with(("against", "https://github.com/${GITHUB_REPOSITORY}.git#branch=${BUF_BASE_BRANCH},subdir=crates/proto/proto/"))
619    }
620
621    fn buf_lint() -> Step<Run> {
622        named::bash("buf lint crates/proto/proto")
623    }
624
625    fn check_protobuf_formatting() -> Step<Run> {
626        named::bash("buf format --diff --exit-code crates/proto/proto")
627    }
628
629    named::job(
630        release_job(&[])
631            .runs_on(runners::LINUX_DEFAULT)
632            .add_env(("GIT_AUTHOR_NAME", "Protobuf Action"))
633            .add_env(("GIT_AUTHOR_EMAIL", "ci@zed.dev"))
634            .add_env(("GIT_COMMITTER_NAME", "Protobuf Action"))
635            .add_env(("GIT_COMMITTER_EMAIL", "ci@zed.dev"))
636            .add_step(steps::checkout_repo().with_full_history())
637            .add_step(ensure_fresh_merge())
638            .add_step(bufbuild_setup_action())
639            .add_step(bufbuild_breaking_action())
640            .add_step(buf_lint())
641            .add_step(check_protobuf_formatting()),
642    )
643}
644
645fn doctests() -> NamedJob {
646    fn run_doctests() -> Step<Run> {
647        named::bash(indoc::indoc! {r#"
648            cargo test --workspace --doc --no-fail-fast
649        "#})
650        .id("run_doctests")
651    }
652
653    named::job(use_clang(
654        release_job(&[])
655            .runs_on(runners::LINUX_DEFAULT)
656            .add_step(steps::checkout_repo())
657            .add_step(steps::cache_rust_dependencies_namespace())
658            .map(steps::install_linux_dependencies)
659            .add_step(steps::setup_cargo_config(Platform::Linux))
660            .add_step(steps::setup_sccache(Platform::Linux))
661            .add_step(run_doctests())
662            .add_step(steps::show_sccache_stats(Platform::Linux))
663            .add_step(steps::cleanup_cargo_config(Platform::Linux)),
664    ))
665}
666
667fn check_licenses() -> NamedJob {
668    named::job(
669        Job::default()
670            .runs_on(runners::LINUX_SMALL)
671            .add_step(steps::checkout_repo())
672            .add_step(steps::cache_rust_dependencies_namespace())
673            .add_step(steps::script("./script/check-licenses"))
674            .add_step(steps::script("./script/generate-licenses")),
675    )
676}
677
678fn check_docs() -> NamedJob {
679    fn lychee_link_check(dir: &str) -> Step<Use> {
680        named::uses(
681            "lycheeverse",
682            "lychee-action",
683            "82202e5e9c2f4ef1a55a3d02563e1cb6041e5332",
684        ) // v2.4.1
685        .add_with(("args", format!("--no-progress --exclude '^http' '{dir}'")))
686        .add_with(("fail", true))
687        .add_with(("jobSummary", false))
688    }
689
690    fn install_mdbook() -> Step<Use> {
691        named::uses(
692            "peaceiris",
693            "actions-mdbook",
694            "ee69d230fe19748b7abf22df32acaa93833fad08", // v2
695        )
696        .with(("mdbook-version", "0.4.37"))
697    }
698
699    fn build_docs() -> Step<Run> {
700        named::bash(indoc::indoc! {r#"
701            mkdir -p target/deploy
702            mdbook build ./docs --dest-dir=../target/deploy/docs/
703        "#})
704    }
705
706    named::job(use_clang(
707        release_job(&[])
708            .runs_on(runners::LINUX_LARGE)
709            .add_step(steps::checkout_repo())
710            .add_step(steps::setup_cargo_config(Platform::Linux))
711            // todo(ci): un-inline build_docs/action.yml here
712            .add_step(steps::cache_rust_dependencies_namespace())
713            .add_step(
714                lychee_link_check("./docs/src/**/*"), // check markdown links
715            )
716            .map(steps::install_linux_dependencies)
717            .add_step(steps::script("./script/generate-action-metadata"))
718            .add_step(install_mdbook())
719            .add_step(build_docs())
720            .add_step(
721                lychee_link_check("target/deploy/docs"), // check links in generated html
722            ),
723    ))
724}
725
726pub(crate) fn check_scripts() -> NamedJob {
727    fn download_actionlint() -> Step<Run> {
728        named::bash(
729            "bash <(curl https://raw.githubusercontent.com/rhysd/actionlint/main/scripts/download-actionlint.bash)",
730        )
731    }
732
733    fn run_actionlint() -> Step<Run> {
734        named::bash(r#""$ACTIONLINT_BIN" -color"#).add_env((
735            "ACTIONLINT_BIN",
736            "${{ steps.get_actionlint.outputs.executable }}",
737        ))
738    }
739
740    fn run_shellcheck() -> Step<Run> {
741        named::bash("./script/shellcheck-scripts error")
742    }
743
744    fn check_xtask_workflows() -> Step<Run> {
745        named::bash(indoc::indoc! {r#"
746            cargo xtask workflows
747            if ! git diff --exit-code .github; then
748              echo "Error: .github directory has uncommitted changes after running 'cargo xtask workflows'"
749              echo "Please run 'cargo xtask workflows' locally and commit the changes"
750              exit 1
751            fi
752        "#})
753    }
754
755    named::job(
756        release_job(&[])
757            .runs_on(runners::LINUX_SMALL)
758            .add_step(steps::checkout_repo())
759            .add_step(run_shellcheck())
760            .add_step(download_actionlint().id("get_actionlint"))
761            .add_step(run_actionlint())
762            .add_step(cache_rust_dependencies_namespace())
763            .add_step(check_xtask_workflows()),
764    )
765}
766
767fn extension_tests() -> NamedJob<UsesJob> {
768    let job = Job::default()
769        .needs(vec!["orchestrate".to_owned()])
770        .cond(Expression::new(
771            "needs.orchestrate.outputs.changed_extensions != '[]'",
772        ))
773        .permissions(Permissions::default().contents(Level::Read))
774        .strategy(
775            Strategy::default()
776                .fail_fast(false)
777                // TODO: Remove the limit. We currently need this to workaround the concurrency group issue
778                // where different matrix jobs would be placed in the same concurrency group and thus cancelled.
779                .max_parallel(1u32)
780                .matrix(json!({
781                    "extension": "${{ fromJson(needs.orchestrate.outputs.changed_extensions) }}"
782                })),
783        )
784        .uses_local(".github/workflows/extension_tests.yml")
785        .with(Input::default().add("working-directory", "${{ matrix.extension }}"));
786
787    named::job(job)
788}