run_tests.rs

  1use gh_workflow::{
  2    Concurrency, Container, Event, Expression, Input, Job, Level, Permissions, Port, PullRequest,
  3    Push, Run, Step, Strategy, Use, UsesJob, Workflow,
  4};
  5use indexmap::IndexMap;
  6use indoc::formatdoc;
  7use serde_json::json;
  8
  9use crate::tasks::workflows::{
 10    steps::{
 11        CommonJobConditions, cache_rust_dependencies_namespace, repository_owner_guard_expression,
 12        use_clang,
 13    },
 14    vars::{self, PathCondition},
 15};
 16
 17use super::{
 18    runners::{self, Arch, Platform},
 19    steps::{self, FluentBuilder, NamedJob, named, release_job},
 20};
 21
 22pub(crate) fn run_tests() -> Workflow {
 23    // Specify anything which should potentially skip full test suite in this regex:
 24    // - docs/
 25    // - script/update_top_ranking_issues/
 26    // - .github/ISSUE_TEMPLATE/
 27    // - .github/workflows/  (except .github/workflows/ci.yml)
 28    // - extensions/  (these have their own test workflow)
 29    let should_run_tests = PathCondition::inverted(
 30        "run_tests",
 31        r"^(docs/|script/update_top_ranking_issues/|\.github/(ISSUE_TEMPLATE|workflows/(?!run_tests))|extensions/)",
 32    );
 33    let should_check_docs = PathCondition::new("run_docs", r"^(docs/|crates/.*\.rs)");
 34    let should_check_scripts = PathCondition::new(
 35        "run_action_checks",
 36        r"^\.github/(workflows/|actions/|actionlint.yml)|tooling/xtask|script/",
 37    );
 38    let should_check_licences =
 39        PathCondition::new("run_licenses", r"^(Cargo.lock|script/.*licenses)");
 40
 41    let orchestrate = orchestrate(&[
 42        &should_check_scripts,
 43        &should_check_docs,
 44        &should_check_licences,
 45        &should_run_tests,
 46    ]);
 47
 48    let mut jobs = vec![
 49        orchestrate,
 50        check_style(),
 51        should_run_tests.guard(clippy(Platform::Windows, None)),
 52        should_run_tests.guard(clippy(Platform::Linux, None)),
 53        should_run_tests.guard(clippy(Platform::Mac, None)),
 54        should_run_tests.guard(clippy(Platform::Mac, Some(Arch::X86_64))),
 55        should_run_tests.guard(run_platform_tests(Platform::Windows)),
 56        should_run_tests.guard(run_platform_tests(Platform::Linux)),
 57        should_run_tests.guard(run_platform_tests(Platform::Mac)),
 58        should_run_tests.guard(doctests()),
 59        should_run_tests.guard(check_workspace_binaries()),
 60        should_run_tests.guard(build_visual_tests_binary()),
 61        should_run_tests.guard(check_wasm()),
 62        should_run_tests.guard(check_dependencies()), // could be more specific here?
 63        should_check_docs.guard(check_docs()),
 64        should_check_licences.guard(check_licenses()),
 65        should_check_scripts.guard(check_scripts()),
 66    ];
 67    let ext_tests = extension_tests();
 68    let tests_pass = tests_pass(&jobs, &[&ext_tests.name]);
 69
 70    jobs.push(should_run_tests.guard(check_postgres_and_protobuf_migrations())); // could be more specific here?
 71
 72    named::workflow()
 73        .add_event(
 74            Event::default()
 75                .push(
 76                    Push::default()
 77                        .add_branch("main")
 78                        .add_branch("v[0-9]+.[0-9]+.x"),
 79                )
 80                .pull_request(PullRequest::default().add_branch("**")),
 81        )
 82        .concurrency(
 83            Concurrency::default()
 84                .group(concat!(
 85                    "${{ github.workflow }}-${{ github.ref_name }}-",
 86                    "${{ github.ref_name == 'main' && github.sha || 'anysha' }}"
 87                ))
 88                .cancel_in_progress(true),
 89        )
 90        .add_env(("CARGO_TERM_COLOR", "always"))
 91        .add_env(("RUST_BACKTRACE", 1))
 92        .add_env(("CARGO_INCREMENTAL", 0))
 93        .map(|mut workflow| {
 94            for job in jobs {
 95                workflow = workflow.add_job(job.name, job.job)
 96            }
 97            workflow
 98        })
 99        .add_job(ext_tests.name, ext_tests.job)
100        .add_job(tests_pass.name, tests_pass.job)
101}
102
103/// Controls which features `orchestrate_impl` includes in the generated script.
104#[derive(PartialEq, Eq)]
105enum OrchestrateTarget {
106    /// For the main Zed repo: includes the cargo package filter and extension
107    /// change detection, but no working-directory scoping.
108    ZedRepo,
109    /// For individual extension repos: scopes changed-file detection to the
110    /// working directory, with no package filter or extension detection.
111    Extension,
112}
113
114// Generates a bash script that checks changed files against regex patterns
115// and sets GitHub output variables accordingly
116pub fn orchestrate(rules: &[&PathCondition]) -> NamedJob {
117    orchestrate_impl(rules, OrchestrateTarget::ZedRepo)
118}
119
120pub fn orchestrate_for_extension(rules: &[&PathCondition]) -> NamedJob {
121    orchestrate_impl(rules, OrchestrateTarget::Extension)
122}
123
124fn orchestrate_impl(rules: &[&PathCondition], target: OrchestrateTarget) -> NamedJob {
125    let name = "orchestrate".to_owned();
126    let step_name = "filter".to_owned();
127    let mut script = String::new();
128
129    script.push_str(indoc::indoc! {r#"
130        set -euo pipefail
131        if [ -z "$GITHUB_BASE_REF" ]; then
132          echo "Not in a PR context (i.e., push to main/stable/preview)"
133          COMPARE_REV="$(git rev-parse HEAD~1)"
134        else
135          echo "In a PR context comparing to pull_request.base.ref"
136          git fetch origin "$GITHUB_BASE_REF" --depth=350
137          COMPARE_REV="$(git merge-base "origin/${GITHUB_BASE_REF}" HEAD)"
138        fi
139        CHANGED_FILES="$(git diff --name-only "$COMPARE_REV" "$GITHUB_SHA")"
140
141    "#});
142
143    if target == OrchestrateTarget::Extension {
144        script.push_str(indoc::indoc! {r#"
145        # When running from a subdirectory, git diff returns repo-root-relative paths.
146        # Filter to only files within the current working directory and strip the prefix.
147        REPO_SUBDIR="$(git rev-parse --show-prefix)"
148        REPO_SUBDIR="${REPO_SUBDIR%/}"
149        if [ -n "$REPO_SUBDIR" ]; then
150            CHANGED_FILES="$(echo "$CHANGED_FILES" | grep "^${REPO_SUBDIR}/" | sed "s|^${REPO_SUBDIR}/||" || true)"
151        fi
152
153    "#});
154    }
155
156    script.push_str(indoc::indoc! {r#"
157        check_pattern() {
158          local output_name="$1"
159          local pattern="$2"
160          local grep_arg="$3"
161
162          echo "$CHANGED_FILES" | grep "$grep_arg" "$pattern" && \
163            echo "${output_name}=true" >> "$GITHUB_OUTPUT" || \
164            echo "${output_name}=false" >> "$GITHUB_OUTPUT"
165        }
166
167    "#});
168
169    let mut outputs = IndexMap::new();
170
171    if target == OrchestrateTarget::ZedRepo {
172        script.push_str(indoc::indoc! {r#"
173        # Check for changes that require full rebuild (no filter)
174        # Direct pushes to main/stable/preview always run full suite
175        if [ -z "$GITHUB_BASE_REF" ]; then
176          echo "Not a PR, running full test suite"
177          echo "changed_packages=" >> "$GITHUB_OUTPUT"
178        elif echo "$CHANGED_FILES" | grep -qP '^(rust-toolchain\.toml|\.cargo/|\.github/|Cargo\.(toml|lock)$)'; then
179          echo "Toolchain, cargo config, or root Cargo files changed, will run all tests"
180          echo "changed_packages=" >> "$GITHUB_OUTPUT"
181        else
182          # Extract changed directories from file paths
183          CHANGED_DIRS=$(echo "$CHANGED_FILES" | \
184            grep -oP '^(crates|tooling)/\K[^/]+' | \
185            sort -u || true)
186
187          # Build directory-to-package mapping using cargo metadata
188          DIR_TO_PKG=$(cargo metadata --format-version=1 --no-deps 2>/dev/null | \
189            jq -r '.packages[] | select(.manifest_path | test("crates/|tooling/")) | "\(.manifest_path | capture("(crates|tooling)/(?<dir>[^/]+)") | .dir)=\(.name)"')
190
191          # Map directory names to package names
192          FILE_CHANGED_PKGS=""
193          for dir in $CHANGED_DIRS; do
194            pkg=$(echo "$DIR_TO_PKG" | grep "^${dir}=" | cut -d= -f2 | head -1)
195            if [ -n "$pkg" ]; then
196              FILE_CHANGED_PKGS=$(printf '%s\n%s' "$FILE_CHANGED_PKGS" "$pkg")
197            else
198              # Fall back to directory name if no mapping found
199              FILE_CHANGED_PKGS=$(printf '%s\n%s' "$FILE_CHANGED_PKGS" "$dir")
200            fi
201          done
202          FILE_CHANGED_PKGS=$(echo "$FILE_CHANGED_PKGS" | grep -v '^$' | sort -u || true)
203
204          # If assets/ changed, add crates that depend on those assets
205          if echo "$CHANGED_FILES" | grep -qP '^assets/'; then
206            FILE_CHANGED_PKGS=$(printf '%s\n%s\n%s' "$FILE_CHANGED_PKGS" "settings" "assets" | sort -u)
207          fi
208
209          # Combine all changed packages
210          ALL_CHANGED_PKGS=$(echo "$FILE_CHANGED_PKGS" | grep -v '^$' || true)
211
212          if [ -z "$ALL_CHANGED_PKGS" ]; then
213            echo "No package changes detected, will run all tests"
214            echo "changed_packages=" >> "$GITHUB_OUTPUT"
215          else
216            # Build nextest filterset with rdeps for each package
217            FILTERSET=$(echo "$ALL_CHANGED_PKGS" | \
218              sed 's/.*/rdeps(&)/' | \
219              tr '\n' '|' | \
220              sed 's/|$//')
221            echo "Changed packages filterset: $FILTERSET"
222            echo "changed_packages=$FILTERSET" >> "$GITHUB_OUTPUT"
223          fi
224        fi
225
226    "#});
227
228        outputs.insert(
229            "changed_packages".to_owned(),
230            format!("${{{{ steps.{}.outputs.changed_packages }}}}", step_name),
231        );
232    }
233
234    for rule in rules {
235        assert!(
236            rule.set_by_step
237                .borrow_mut()
238                .replace(name.clone())
239                .is_none()
240        );
241        assert!(
242            outputs
243                .insert(
244                    rule.name.to_owned(),
245                    format!("${{{{ steps.{}.outputs.{} }}}}", step_name, rule.name)
246                )
247                .is_none()
248        );
249
250        let grep_arg = if rule.invert { "-qvP" } else { "-qP" };
251        script.push_str(&format!(
252            "check_pattern \"{}\" '{}' {}\n",
253            rule.name, rule.pattern, grep_arg
254        ));
255    }
256
257    if target == OrchestrateTarget::ZedRepo {
258        script.push_str(DETECT_CHANGED_EXTENSIONS_SCRIPT);
259        script.push_str("echo \"changed_extensions=$EXTENSIONS_JSON\" >> \"$GITHUB_OUTPUT\"\n");
260
261        outputs.insert(
262            "changed_extensions".to_owned(),
263            format!("${{{{ steps.{}.outputs.changed_extensions }}}}", step_name),
264        );
265    }
266
267    let job = Job::default()
268        .runs_on(runners::LINUX_SMALL)
269        .with_repository_owner_guard()
270        .outputs(outputs)
271        .add_step(steps::checkout_repo().with_deep_history_on_non_main())
272        .add_step(Step::new(step_name.clone()).run(script).id(step_name));
273
274    NamedJob { name, job }
275}
276
277pub fn tests_pass(jobs: &[NamedJob], extra_job_names: &[&str]) -> NamedJob {
278    let mut script = String::from(indoc::indoc! {r#"
279        set +x
280        EXIT_CODE=0
281
282        check_result() {
283          echo "* $1: $2"
284          if [[ "$2" != "skipped" && "$2" != "success" ]]; then EXIT_CODE=1; fi
285        }
286
287    "#});
288
289    let all_names: Vec<&str> = jobs
290        .iter()
291        .map(|job| job.name.as_str())
292        .chain(extra_job_names.iter().copied())
293        .collect();
294
295    let env_entries: Vec<_> = all_names
296        .iter()
297        .map(|name| {
298            let env_name = format!("RESULT_{}", name.to_uppercase());
299            let env_value = format!("${{{{ needs.{}.result }}}}", name);
300            (env_name, env_value)
301        })
302        .collect();
303
304    script.push_str(
305        &all_names
306            .iter()
307            .zip(env_entries.iter())
308            .map(|(name, (env_name, _))| format!("check_result \"{}\" \"${}\"", name, env_name))
309            .collect::<Vec<_>>()
310            .join("\n"),
311    );
312
313    script.push_str("\n\nexit $EXIT_CODE\n");
314
315    let job = Job::default()
316        .runs_on(runners::LINUX_SMALL)
317        .needs(
318            all_names
319                .iter()
320                .map(|name| name.to_string())
321                .collect::<Vec<String>>(),
322        )
323        .cond(repository_owner_guard_expression(true))
324        .add_step(
325            env_entries
326                .into_iter()
327                .fold(named::bash(&script), |step, env_item| {
328                    step.add_env(env_item)
329                }),
330        );
331
332    named::job(job)
333}
334
335/// Bash script snippet that detects changed extension directories from `$CHANGED_FILES`.
336/// Assumes `$CHANGED_FILES` is already set. Sets `$EXTENSIONS_JSON` to a JSON array of
337/// changed extension paths. Callers are responsible for writing the result to `$GITHUB_OUTPUT`.
338pub(crate) const DETECT_CHANGED_EXTENSIONS_SCRIPT: &str = indoc::indoc! {r#"
339    # Detect changed extension directories (excluding extensions/workflows)
340    CHANGED_EXTENSIONS=$(echo "$CHANGED_FILES" | grep -oP '^extensions/[^/]+(?=/)' | sort -u | grep -v '^extensions/workflows$' || true)
341    # Filter out deleted extensions
342    EXISTING_EXTENSIONS=""
343    for ext in $CHANGED_EXTENSIONS; do
344        if [ -f "$ext/extension.toml" ]; then
345            EXISTING_EXTENSIONS=$(printf '%s\n%s' "$EXISTING_EXTENSIONS" "$ext")
346        fi
347    done
348    CHANGED_EXTENSIONS=$(echo "$EXISTING_EXTENSIONS" | sed '/^$/d')
349    if [ -n "$CHANGED_EXTENSIONS" ]; then
350        EXTENSIONS_JSON=$(echo "$CHANGED_EXTENSIONS" | jq -R -s -c 'split("\n") | map(select(length > 0))')
351    else
352        EXTENSIONS_JSON="[]"
353    fi
354"#};
355
356const TS_QUERY_LS_FILE: &str = "ts_query_ls-x86_64-unknown-linux-gnu.tar.gz";
357const CI_TS_QUERY_RELEASE: &str = "tags/v3.15.1";
358
359pub(crate) fn fetch_ts_query_ls() -> Step<Use> {
360    named::uses(
361        "dsaltares",
362        "fetch-gh-release-asset",
363        "aa37ae5c44d3c9820bc12fe675e8670ecd93bd1c",
364    ) // v1.1.1
365    .add_with(("repo", "ribru17/ts_query_ls"))
366    .add_with(("version", CI_TS_QUERY_RELEASE))
367    .add_with(("file", TS_QUERY_LS_FILE))
368}
369
370pub(crate) fn run_ts_query_ls() -> Step<Run> {
371    named::bash(formatdoc!(
372        r#"tar -xf "$GITHUB_WORKSPACE/{TS_QUERY_LS_FILE}" -C "$GITHUB_WORKSPACE"
373        "$GITHUB_WORKSPACE/ts_query_ls" format --check . || {{
374            echo "Found unformatted queries, please format them with ts_query_ls."
375            echo "For easy use, install the Tree-sitter query extension:"
376            echo "zed://extension/tree-sitter-query"
377            false
378        }}"#
379    ))
380}
381
382fn check_style() -> NamedJob {
383    fn check_for_typos() -> Step<Use> {
384        named::uses(
385            "crate-ci",
386            "typos",
387            "2d0ce569feab1f8752f1dde43cc2f2aa53236e06",
388        ) // v1.40.0
389        .with(("config", "./typos.toml"))
390    }
391
392    named::job(
393        release_job(&[])
394            .runs_on(runners::LINUX_MEDIUM)
395            .add_step(steps::checkout_repo())
396            .add_step(steps::cache_rust_dependencies_namespace())
397            .add_step(steps::setup_pnpm())
398            .add_step(steps::prettier())
399            .add_step(steps::cargo_fmt())
400            .add_step(steps::script("./script/check-todos"))
401            .add_step(steps::script("./script/check-keymaps"))
402            .add_step(check_for_typos())
403            .add_step(fetch_ts_query_ls())
404            .add_step(run_ts_query_ls()),
405    )
406}
407
408fn check_dependencies() -> NamedJob {
409    fn install_cargo_machete() -> Step<Use> {
410        named::uses(
411            "taiki-e",
412            "install-action",
413            "02cc5f8ca9f2301050c0c099055816a41ee05507",
414        )
415        .add_with(("tool", "cargo-machete@0.7.0"))
416    }
417
418    fn run_cargo_machete() -> Step<Run> {
419        named::bash("cargo machete")
420    }
421
422    fn check_cargo_lock() -> Step<Run> {
423        named::bash("cargo update --locked --workspace")
424    }
425
426    fn check_vulnerable_dependencies() -> Step<Use> {
427        named::uses(
428            "actions",
429            "dependency-review-action",
430            "67d4f4bd7a9b17a0db54d2a7519187c65e339de8", // v4
431        )
432        .if_condition(Expression::new("github.event_name == 'pull_request'"))
433        .with(("license-check", false))
434    }
435
436    named::job(use_clang(
437        release_job(&[])
438            .runs_on(runners::LINUX_SMALL)
439            .add_step(steps::checkout_repo())
440            .add_step(steps::cache_rust_dependencies_namespace())
441            .add_step(install_cargo_machete())
442            .add_step(run_cargo_machete())
443            .add_step(check_cargo_lock())
444            .add_step(check_vulnerable_dependencies()),
445    ))
446}
447
448fn check_wasm() -> NamedJob {
449    fn install_nightly_wasm_toolchain() -> Step<Run> {
450        named::bash(
451            "rustup toolchain install nightly --component rust-src --target wasm32-unknown-unknown",
452        )
453    }
454
455    fn cargo_check_wasm() -> Step<Run> {
456        named::bash(concat!(
457            "cargo +nightly -Zbuild-std=std,panic_abort ",
458            "check --target wasm32-unknown-unknown -p gpui_platform",
459        ))
460        .add_env((
461            "CARGO_TARGET_WASM32_UNKNOWN_UNKNOWN_RUSTFLAGS",
462            "-C target-feature=+atomics,+bulk-memory,+mutable-globals",
463        ))
464    }
465
466    named::job(
467        release_job(&[])
468            .runs_on(runners::LINUX_LARGE)
469            .add_step(steps::checkout_repo())
470            .add_step(steps::setup_cargo_config(Platform::Linux))
471            .add_step(steps::cache_rust_dependencies_namespace())
472            .add_step(install_nightly_wasm_toolchain())
473            .add_step(steps::setup_sccache(Platform::Linux))
474            .add_step(cargo_check_wasm())
475            .add_step(steps::show_sccache_stats(Platform::Linux))
476            .add_step(steps::cleanup_cargo_config(Platform::Linux)),
477    )
478}
479
480fn check_workspace_binaries() -> NamedJob {
481    named::job(use_clang(
482        release_job(&[])
483            .runs_on(runners::LINUX_LARGE)
484            .add_step(steps::checkout_repo())
485            .add_step(steps::setup_cargo_config(Platform::Linux))
486            .add_step(steps::cache_rust_dependencies_namespace())
487            .map(steps::install_linux_dependencies)
488            .add_step(steps::setup_sccache(Platform::Linux))
489            .add_step(steps::script("cargo build -p collab"))
490            .add_step(steps::script("cargo build --workspace --bins --examples"))
491            .add_step(steps::show_sccache_stats(Platform::Linux))
492            .add_step(steps::cleanup_cargo_config(Platform::Linux)),
493    ))
494}
495
496pub(crate) fn clippy(platform: Platform, arch: Option<Arch>) -> NamedJob {
497    let target = arch.map(|arch| match (platform, arch) {
498        (Platform::Mac, Arch::X86_64) => "x86_64-apple-darwin",
499        (Platform::Mac, Arch::AARCH64) => "aarch64-apple-darwin",
500        _ => unimplemented!("cross-arch clippy not supported for {platform}/{arch}"),
501    });
502    let runner = match platform {
503        Platform::Windows => runners::WINDOWS_DEFAULT,
504        Platform::Linux => runners::LINUX_DEFAULT,
505        Platform::Mac => runners::MAC_DEFAULT,
506    };
507    let mut job = release_job(&[])
508        .runs_on(runner)
509        .add_step(steps::checkout_repo())
510        .add_step(steps::setup_cargo_config(platform))
511        .when(
512            platform == Platform::Linux || platform == Platform::Mac,
513            |this| this.add_step(steps::cache_rust_dependencies_namespace()),
514        )
515        .when(
516            platform == Platform::Linux,
517            steps::install_linux_dependencies,
518        )
519        .when_some(target, |this, target| {
520            this.add_step(steps::install_rustup_target(target))
521        })
522        .add_step(steps::setup_sccache(platform))
523        .add_step(steps::clippy(platform, target))
524        .add_step(steps::show_sccache_stats(platform));
525    if platform == Platform::Linux {
526        job = use_clang(job);
527    }
528    let name = match arch {
529        Some(arch) => format!("clippy_{platform}_{arch}"),
530        None => format!("clippy_{platform}"),
531    };
532    NamedJob { name, job }
533}
534
535pub(crate) fn run_platform_tests(platform: Platform) -> NamedJob {
536    run_platform_tests_impl(platform, true)
537}
538
539pub(crate) fn run_platform_tests_no_filter(platform: Platform) -> NamedJob {
540    run_platform_tests_impl(platform, false)
541}
542
543fn run_platform_tests_impl(platform: Platform, filter_packages: bool) -> NamedJob {
544    let runner = match platform {
545        Platform::Windows => runners::WINDOWS_DEFAULT,
546        Platform::Linux => runners::LINUX_DEFAULT,
547        Platform::Mac => runners::MAC_DEFAULT,
548    };
549    NamedJob {
550        name: format!("run_tests_{platform}"),
551        job: release_job(&[])
552            .runs_on(runner)
553            .when(platform == Platform::Linux, |job| {
554                job.add_service(
555                    "postgres",
556                    Container::new("postgres:15")
557                        .add_env(("POSTGRES_HOST_AUTH_METHOD", "trust"))
558                        .ports(vec![Port::Name("5432:5432".into())])
559                        .options(
560                            "--health-cmd pg_isready \
561                             --health-interval 500ms \
562                             --health-timeout 5s \
563                             --health-retries 10",
564                        ),
565                )
566            })
567            .add_step(steps::checkout_repo())
568            .add_step(steps::setup_cargo_config(platform))
569            .when(platform == Platform::Mac, |this| {
570                this.add_step(steps::cache_rust_dependencies_namespace())
571            })
572            .when(platform == Platform::Linux, |this| {
573                use_clang(this.add_step(steps::cache_rust_dependencies_namespace()))
574            })
575            .when(
576                platform == Platform::Linux,
577                steps::install_linux_dependencies,
578            )
579            .add_step(steps::setup_node())
580            .when(
581                platform == Platform::Linux || platform == Platform::Mac,
582                |job| job.add_step(steps::cargo_install_nextest()),
583            )
584            .add_step(steps::clear_target_dir_if_large(platform))
585            .add_step(steps::setup_sccache(platform))
586            .when(filter_packages, |job| {
587                job.add_step(
588                    steps::cargo_nextest(platform).with_changed_packages_filter("orchestrate"),
589                )
590            })
591            .when(!filter_packages, |job| {
592                job.add_step(steps::cargo_nextest(platform))
593            })
594            .add_step(steps::show_sccache_stats(platform))
595            .add_step(steps::cleanup_cargo_config(platform)),
596    }
597}
598
599fn build_visual_tests_binary() -> NamedJob {
600    pub fn cargo_build_visual_tests() -> Step<Run> {
601        named::bash("cargo build -p zed --bin zed_visual_test_runner --features visual-tests")
602    }
603
604    named::job(
605        Job::default()
606            .runs_on(runners::MAC_DEFAULT)
607            .add_step(steps::checkout_repo())
608            .add_step(steps::setup_cargo_config(Platform::Mac))
609            .add_step(steps::cache_rust_dependencies_namespace())
610            .add_step(cargo_build_visual_tests())
611            .add_step(steps::cleanup_cargo_config(Platform::Mac)),
612    )
613}
614
615pub(crate) fn check_postgres_and_protobuf_migrations() -> NamedJob {
616    fn ensure_fresh_merge() -> Step<Run> {
617        named::bash(indoc::indoc! {r#"
618            if [ -z "$GITHUB_BASE_REF" ];
619            then
620              echo "BUF_BASE_BRANCH=$(git merge-base origin/main HEAD)" >> "$GITHUB_ENV"
621            else
622              git checkout -B temp
623              git merge -q "origin/$GITHUB_BASE_REF" -m "merge main into temp"
624              echo "BUF_BASE_BRANCH=$GITHUB_BASE_REF" >> "$GITHUB_ENV"
625            fi
626        "#})
627    }
628
629    fn bufbuild_setup_action() -> Step<Use> {
630        named::uses("bufbuild", "buf-setup-action", "v1")
631            .add_with(("version", "v1.29.0"))
632            .add_with(("github_token", vars::GITHUB_TOKEN))
633    }
634
635    fn bufbuild_breaking_action() -> Step<Use> {
636        named::uses("bufbuild", "buf-breaking-action", "v1").add_with(("input", "crates/proto/proto/"))
637            .add_with(("against", "https://github.com/${GITHUB_REPOSITORY}.git#branch=${BUF_BASE_BRANCH},subdir=crates/proto/proto/"))
638    }
639
640    fn buf_lint() -> Step<Run> {
641        named::bash("buf lint crates/proto/proto")
642    }
643
644    fn check_protobuf_formatting() -> Step<Run> {
645        named::bash("buf format --diff --exit-code crates/proto/proto")
646    }
647
648    named::job(
649        release_job(&[])
650            .runs_on(runners::LINUX_DEFAULT)
651            .add_env(("GIT_AUTHOR_NAME", "Protobuf Action"))
652            .add_env(("GIT_AUTHOR_EMAIL", "ci@zed.dev"))
653            .add_env(("GIT_COMMITTER_NAME", "Protobuf Action"))
654            .add_env(("GIT_COMMITTER_EMAIL", "ci@zed.dev"))
655            .add_step(steps::checkout_repo().with_full_history())
656            .add_step(ensure_fresh_merge())
657            .add_step(bufbuild_setup_action())
658            .add_step(bufbuild_breaking_action())
659            .add_step(buf_lint())
660            .add_step(check_protobuf_formatting()),
661    )
662}
663
664fn doctests() -> NamedJob {
665    fn run_doctests() -> Step<Run> {
666        named::bash(indoc::indoc! {r#"
667            cargo test --workspace --doc --no-fail-fast
668        "#})
669        .id("run_doctests")
670    }
671
672    named::job(use_clang(
673        release_job(&[])
674            .runs_on(runners::LINUX_DEFAULT)
675            .add_step(steps::checkout_repo())
676            .add_step(steps::cache_rust_dependencies_namespace())
677            .map(steps::install_linux_dependencies)
678            .add_step(steps::setup_cargo_config(Platform::Linux))
679            .add_step(steps::setup_sccache(Platform::Linux))
680            .add_step(run_doctests())
681            .add_step(steps::show_sccache_stats(Platform::Linux))
682            .add_step(steps::cleanup_cargo_config(Platform::Linux)),
683    ))
684}
685
686fn check_licenses() -> NamedJob {
687    named::job(
688        Job::default()
689            .runs_on(runners::LINUX_SMALL)
690            .add_step(steps::checkout_repo())
691            .add_step(steps::cache_rust_dependencies_namespace())
692            .add_step(steps::script("./script/check-licenses"))
693            .add_step(steps::script("./script/generate-licenses")),
694    )
695}
696
697fn check_docs() -> NamedJob {
698    fn lychee_link_check(dir: &str) -> Step<Use> {
699        named::uses(
700            "lycheeverse",
701            "lychee-action",
702            "82202e5e9c2f4ef1a55a3d02563e1cb6041e5332",
703        ) // v2.4.1
704        .add_with(("args", format!("--no-progress --exclude '^http' '{dir}'")))
705        .add_with(("fail", true))
706        .add_with(("jobSummary", false))
707    }
708
709    fn install_mdbook() -> Step<Use> {
710        named::uses(
711            "peaceiris",
712            "actions-mdbook",
713            "ee69d230fe19748b7abf22df32acaa93833fad08", // v2
714        )
715        .with(("mdbook-version", "0.4.37"))
716    }
717
718    fn build_docs() -> Step<Run> {
719        named::bash(indoc::indoc! {r#"
720            mkdir -p target/deploy
721            mdbook build ./docs --dest-dir=../target/deploy/docs/
722        "#})
723    }
724
725    named::job(use_clang(
726        release_job(&[])
727            .runs_on(runners::LINUX_LARGE)
728            .add_step(steps::checkout_repo())
729            .add_step(steps::setup_cargo_config(Platform::Linux))
730            // todo(ci): un-inline build_docs/action.yml here
731            .add_step(steps::cache_rust_dependencies_namespace())
732            .add_step(
733                lychee_link_check("./docs/src/**/*"), // check markdown links
734            )
735            .map(steps::install_linux_dependencies)
736            .add_step(steps::script("./script/generate-action-metadata"))
737            .add_step(install_mdbook())
738            .add_step(build_docs())
739            .add_step(
740                lychee_link_check("target/deploy/docs"), // check links in generated html
741            ),
742    ))
743}
744
745pub(crate) fn check_scripts() -> NamedJob {
746    fn download_actionlint() -> Step<Run> {
747        named::bash(
748            "bash <(curl https://raw.githubusercontent.com/rhysd/actionlint/main/scripts/download-actionlint.bash)",
749        )
750    }
751
752    fn run_actionlint() -> Step<Run> {
753        named::bash(r#""$ACTIONLINT_BIN" -color"#).add_env((
754            "ACTIONLINT_BIN",
755            "${{ steps.get_actionlint.outputs.executable }}",
756        ))
757    }
758
759    fn run_shellcheck() -> Step<Run> {
760        named::bash("./script/shellcheck-scripts error")
761    }
762
763    fn check_xtask_workflows() -> Step<Run> {
764        named::bash(indoc::indoc! {r#"
765            cargo xtask workflows
766            if ! git diff --exit-code .github; then
767              echo "Error: .github directory has uncommitted changes after running 'cargo xtask workflows'"
768              echo "Please run 'cargo xtask workflows' locally and commit the changes"
769              exit 1
770            fi
771        "#})
772    }
773
774    named::job(
775        release_job(&[])
776            .runs_on(runners::LINUX_SMALL)
777            .add_step(steps::checkout_repo())
778            .add_step(run_shellcheck())
779            .add_step(download_actionlint().id("get_actionlint"))
780            .add_step(run_actionlint())
781            .add_step(cache_rust_dependencies_namespace())
782            .add_step(check_xtask_workflows()),
783    )
784}
785
786fn extension_tests() -> NamedJob<UsesJob> {
787    let job = Job::default()
788        .needs(vec!["orchestrate".to_owned()])
789        .cond(Expression::new(
790            "needs.orchestrate.outputs.changed_extensions != '[]'",
791        ))
792        .permissions(Permissions::default().contents(Level::Read))
793        .strategy(
794            Strategy::default()
795                .fail_fast(false)
796                // TODO: Remove the limit. We currently need this to workaround the concurrency group issue
797                // where different matrix jobs would be placed in the same concurrency group and thus cancelled.
798                .max_parallel(1u32)
799                .matrix(json!({
800                    "extension": "${{ fromJson(needs.orchestrate.outputs.changed_extensions) }}"
801                })),
802        )
803        .uses_local(".github/workflows/extension_tests.yml")
804        .with(Input::default().add("working-directory", "${{ matrix.extension }}"));
805
806    named::job(job)
807}