run_tests.rs

  1use gh_workflow::{
  2    Concurrency, Container, Event, Expression, Job, Port, PullRequest, Push, Run, Step, Use,
  3    Workflow,
  4};
  5use indexmap::IndexMap;
  6
  7use crate::tasks::workflows::{
  8    nix_build::build_nix,
  9    runners::Arch,
 10    steps::{CommonJobConditions, repository_owner_guard_expression},
 11    vars::{self, PathCondition},
 12};
 13
 14use super::{
 15    runners::{self, Platform},
 16    steps::{self, FluentBuilder, NamedJob, named, release_job},
 17};
 18
 19pub(crate) fn run_tests() -> Workflow {
 20    // Specify anything which should potentially skip full test suite in this regex:
 21    // - docs/
 22    // - script/update_top_ranking_issues/
 23    // - .github/ISSUE_TEMPLATE/
 24    // - .github/workflows/  (except .github/workflows/ci.yml)
 25    let should_run_tests = PathCondition::inverted(
 26        "run_tests",
 27        r"^(docs/|script/update_top_ranking_issues/|\.github/(ISSUE_TEMPLATE|workflows/(?!run_tests)))",
 28    );
 29    let should_check_docs = PathCondition::new("run_docs", r"^(docs/|crates/.*\.rs)");
 30    let should_check_scripts = PathCondition::new(
 31        "run_action_checks",
 32        r"^\.github/(workflows/|actions/|actionlint.yml)|tooling/xtask|script/",
 33    );
 34    let should_check_licences =
 35        PathCondition::new("run_licenses", r"^(Cargo.lock|script/.*licenses)");
 36    let should_build_nix = PathCondition::new(
 37        "run_nix",
 38        r"^(nix/|flake\.|Cargo\.|rust-toolchain.toml|\.cargo/config.toml)",
 39    );
 40
 41    let orchestrate = orchestrate(&[
 42        &should_check_scripts,
 43        &should_check_docs,
 44        &should_check_licences,
 45        &should_build_nix,
 46        &should_run_tests,
 47    ]);
 48
 49    let mut jobs = vec![
 50        orchestrate,
 51        check_style(),
 52        should_run_tests.guard(clippy(Platform::Windows)),
 53        should_run_tests.guard(clippy(Platform::Linux)),
 54        should_run_tests.guard(clippy(Platform::Mac)),
 55        should_run_tests.guard(run_platform_tests(Platform::Windows)),
 56        should_run_tests.guard(run_platform_tests(Platform::Linux)),
 57        should_run_tests.guard(run_platform_tests(Platform::Mac)),
 58        should_run_tests.guard(doctests()),
 59        should_run_tests.guard(check_workspace_binaries()),
 60        should_run_tests.guard(check_dependencies()), // could be more specific here?
 61        should_check_docs.guard(check_docs()),
 62        should_check_licences.guard(check_licenses()),
 63        should_check_scripts.guard(check_scripts()),
 64        should_build_nix.guard(build_nix(
 65            Platform::Linux,
 66            Arch::X86_64,
 67            "debug",
 68            // *don't* cache the built output
 69            Some("-zed-editor-[0-9.]*-nightly"),
 70            &[],
 71        )),
 72        should_build_nix.guard(build_nix(
 73            Platform::Mac,
 74            Arch::AARCH64,
 75            "debug",
 76            // *don't* cache the built output
 77            Some("-zed-editor-[0-9.]*-nightly"),
 78            &[],
 79        )),
 80    ];
 81    let tests_pass = tests_pass(&jobs);
 82
 83    jobs.push(should_run_tests.guard(check_postgres_and_protobuf_migrations())); // could be more specific here?
 84
 85    named::workflow()
 86        .add_event(
 87            Event::default()
 88                .push(
 89                    Push::default()
 90                        .add_branch("main")
 91                        .add_branch("v[0-9]+.[0-9]+.x"),
 92                )
 93                .pull_request(PullRequest::default().add_branch("**")),
 94        )
 95        .concurrency(
 96            Concurrency::default()
 97                .group(concat!(
 98                    "${{ github.workflow }}-${{ github.ref_name }}-",
 99                    "${{ github.ref_name == 'main' && github.sha || 'anysha' }}"
100                ))
101                .cancel_in_progress(true),
102        )
103        .add_env(("CARGO_TERM_COLOR", "always"))
104        .add_env(("RUST_BACKTRACE", 1))
105        .add_env(("CARGO_INCREMENTAL", 0))
106        .map(|mut workflow| {
107            for job in jobs {
108                workflow = workflow.add_job(job.name, job.job)
109            }
110            workflow
111        })
112        .add_job(tests_pass.name, tests_pass.job)
113}
114
115// Generates a bash script that checks changed files against regex patterns
116// and sets GitHub output variables accordingly
117pub fn orchestrate(rules: &[&PathCondition]) -> NamedJob {
118    orchestrate_impl(rules, true)
119}
120
121pub fn orchestrate_without_package_filter(rules: &[&PathCondition]) -> NamedJob {
122    orchestrate_impl(rules, false)
123}
124
125fn orchestrate_impl(rules: &[&PathCondition], include_package_filter: bool) -> NamedJob {
126    let name = "orchestrate".to_owned();
127    let step_name = "filter".to_owned();
128    let mut script = String::new();
129
130    script.push_str(indoc::indoc! {r#"
131        if [ -z "$GITHUB_BASE_REF" ]; then
132          echo "Not in a PR context (i.e., push to main/stable/preview)"
133          COMPARE_REV="$(git rev-parse HEAD~1)"
134        else
135          echo "In a PR context comparing to pull_request.base.ref"
136          git fetch origin "$GITHUB_BASE_REF" --depth=350
137          COMPARE_REV="$(git merge-base "origin/${GITHUB_BASE_REF}" HEAD)"
138        fi
139        CHANGED_FILES="$(git diff --name-only "$COMPARE_REV" ${{ github.sha }})"
140
141        check_pattern() {
142          local output_name="$1"
143          local pattern="$2"
144          local grep_arg="$3"
145
146          echo "$CHANGED_FILES" | grep "$grep_arg" "$pattern" && \
147            echo "${output_name}=true" >> "$GITHUB_OUTPUT" || \
148            echo "${output_name}=false" >> "$GITHUB_OUTPUT"
149        }
150
151    "#});
152
153    let mut outputs = IndexMap::new();
154
155    if include_package_filter {
156        script.push_str(indoc::indoc! {r#"
157        # Check for changes that require full rebuild (no filter)
158        # Direct pushes to main/stable/preview always run full suite
159        if [ -z "$GITHUB_BASE_REF" ]; then
160          echo "Not a PR, running full test suite"
161          echo "changed_packages=" >> "$GITHUB_OUTPUT"
162        elif echo "$CHANGED_FILES" | grep -qP '^(rust-toolchain\.toml|\.cargo/|\.github/|Cargo\.(toml|lock)$)'; then
163          echo "Toolchain, cargo config, or root Cargo files changed, will run all tests"
164          echo "changed_packages=" >> "$GITHUB_OUTPUT"
165        else
166          # Extract changed directories from file paths
167          CHANGED_DIRS=$(echo "$CHANGED_FILES" | \
168            grep -oP '^(crates|tooling)/\K[^/]+' | \
169            sort -u || true)
170
171          # Build directory-to-package mapping using cargo metadata
172          DIR_TO_PKG=$(cargo metadata --format-version=1 --no-deps 2>/dev/null | \
173            jq -r '.packages[] | select(.manifest_path | test("crates/|tooling/")) | "\(.manifest_path | capture("(crates|tooling)/(?<dir>[^/]+)") | .dir)=\(.name)"')
174
175          # Map directory names to package names
176          FILE_CHANGED_PKGS=""
177          for dir in $CHANGED_DIRS; do
178            pkg=$(echo "$DIR_TO_PKG" | grep "^${dir}=" | cut -d= -f2 | head -1)
179            if [ -n "$pkg" ]; then
180              FILE_CHANGED_PKGS=$(printf '%s\n%s' "$FILE_CHANGED_PKGS" "$pkg")
181            else
182              # Fall back to directory name if no mapping found
183              FILE_CHANGED_PKGS=$(printf '%s\n%s' "$FILE_CHANGED_PKGS" "$dir")
184            fi
185          done
186          FILE_CHANGED_PKGS=$(echo "$FILE_CHANGED_PKGS" | grep -v '^$' | sort -u || true)
187
188          # If assets/ changed, add crates that depend on those assets
189          if echo "$CHANGED_FILES" | grep -qP '^assets/'; then
190            FILE_CHANGED_PKGS=$(printf '%s\n%s\n%s\n%s' "$FILE_CHANGED_PKGS" "settings" "storybook" "assets" | sort -u)
191          fi
192
193          # Combine all changed packages
194          ALL_CHANGED_PKGS=$(echo "$FILE_CHANGED_PKGS" | grep -v '^$' || true)
195
196          if [ -z "$ALL_CHANGED_PKGS" ]; then
197            echo "No package changes detected, will run all tests"
198            echo "changed_packages=" >> "$GITHUB_OUTPUT"
199          else
200            # Build nextest filterset with rdeps for each package
201            FILTERSET=$(echo "$ALL_CHANGED_PKGS" | \
202              sed 's/.*/rdeps(&)/' | \
203              tr '\n' '|' | \
204              sed 's/|$//')
205            echo "Changed packages filterset: $FILTERSET"
206            echo "changed_packages=$FILTERSET" >> "$GITHUB_OUTPUT"
207          fi
208        fi
209
210    "#});
211
212        outputs.insert(
213            "changed_packages".to_owned(),
214            format!("${{{{ steps.{}.outputs.changed_packages }}}}", step_name),
215        );
216    }
217
218    for rule in rules {
219        assert!(
220            rule.set_by_step
221                .borrow_mut()
222                .replace(name.clone())
223                .is_none()
224        );
225        assert!(
226            outputs
227                .insert(
228                    rule.name.to_owned(),
229                    format!("${{{{ steps.{}.outputs.{} }}}}", step_name, rule.name)
230                )
231                .is_none()
232        );
233
234        let grep_arg = if rule.invert { "-qvP" } else { "-qP" };
235        script.push_str(&format!(
236            "check_pattern \"{}\" '{}' {}\n",
237            rule.name, rule.pattern, grep_arg
238        ));
239    }
240
241    let job = Job::default()
242        .runs_on(runners::LINUX_SMALL)
243        .with_repository_owner_guard()
244        .outputs(outputs)
245        .add_step(steps::checkout_repo().add_with((
246            "fetch-depth",
247            "${{ github.ref == 'refs/heads/main' && 2 || 350 }}",
248        )))
249        .add_step(Step::new(step_name.clone()).run(script).id(step_name));
250
251    NamedJob { name, job }
252}
253
254pub fn tests_pass(jobs: &[NamedJob]) -> NamedJob {
255    let mut script = String::from(indoc::indoc! {r#"
256        set +x
257        EXIT_CODE=0
258
259        check_result() {
260          echo "* $1: $2"
261          if [[ "$2" != "skipped" && "$2" != "success" ]]; then EXIT_CODE=1; fi
262        }
263
264    "#});
265
266    script.push_str(
267        &jobs
268            .iter()
269            .map(|job| {
270                format!(
271                    "check_result \"{}\" \"${{{{ needs.{}.result }}}}\"",
272                    job.name, job.name
273                )
274            })
275            .collect::<Vec<_>>()
276            .join("\n"),
277    );
278
279    script.push_str("\n\nexit $EXIT_CODE\n");
280
281    let job = Job::default()
282        .runs_on(runners::LINUX_SMALL)
283        .needs(
284            jobs.iter()
285                .map(|j| j.name.to_string())
286                .collect::<Vec<String>>(),
287        )
288        .cond(repository_owner_guard_expression(true))
289        .add_step(named::bash(&script));
290
291    named::job(job)
292}
293
294fn check_style() -> NamedJob {
295    fn check_for_typos() -> Step<Use> {
296        named::uses(
297            "crate-ci",
298            "typos",
299            "2d0ce569feab1f8752f1dde43cc2f2aa53236e06",
300        ) // v1.40.0
301        .with(("config", "./typos.toml"))
302    }
303    named::job(
304        release_job(&[])
305            .runs_on(runners::LINUX_MEDIUM)
306            .add_step(steps::checkout_repo())
307            .add_step(steps::cache_rust_dependencies_namespace())
308            .add_step(steps::setup_pnpm())
309            .add_step(steps::prettier())
310            .add_step(steps::cargo_fmt())
311            .add_step(steps::script("./script/check-todos"))
312            .add_step(steps::script("./script/check-keymaps"))
313            .add_step(check_for_typos()),
314    )
315}
316
317fn check_dependencies() -> NamedJob {
318    fn install_cargo_machete() -> Step<Use> {
319        named::uses(
320            "clechasseur",
321            "rs-cargo",
322            "8435b10f6e71c2e3d4d3b7573003a8ce4bfc6386", // v2
323        )
324        .add_with(("command", "install"))
325        .add_with(("args", "cargo-machete@0.7.0"))
326    }
327
328    fn run_cargo_machete() -> Step<Use> {
329        named::uses(
330            "clechasseur",
331            "rs-cargo",
332            "8435b10f6e71c2e3d4d3b7573003a8ce4bfc6386", // v2
333        )
334        .add_with(("command", "machete"))
335    }
336
337    fn check_cargo_lock() -> Step<Run> {
338        named::bash("cargo update --locked --workspace")
339    }
340
341    fn check_vulnerable_dependencies() -> Step<Use> {
342        named::uses(
343            "actions",
344            "dependency-review-action",
345            "67d4f4bd7a9b17a0db54d2a7519187c65e339de8", // v4
346        )
347        .if_condition(Expression::new("github.event_name == 'pull_request'"))
348        .with(("license-check", false))
349    }
350
351    named::job(
352        release_job(&[])
353            .runs_on(runners::LINUX_SMALL)
354            .add_step(steps::checkout_repo())
355            .add_step(steps::cache_rust_dependencies_namespace())
356            .add_step(install_cargo_machete())
357            .add_step(run_cargo_machete())
358            .add_step(check_cargo_lock())
359            .add_step(check_vulnerable_dependencies()),
360    )
361}
362
363fn check_workspace_binaries() -> NamedJob {
364    named::job(
365        release_job(&[])
366            .runs_on(runners::LINUX_LARGE)
367            .add_step(steps::checkout_repo())
368            .add_step(steps::setup_cargo_config(Platform::Linux))
369            .add_step(steps::cache_rust_dependencies_namespace())
370            .map(steps::install_linux_dependencies)
371            .add_step(steps::script("cargo build -p collab"))
372            .add_step(steps::script("cargo build --workspace --bins --examples"))
373            .add_step(steps::cleanup_cargo_config(Platform::Linux)),
374    )
375}
376
377pub(crate) fn clippy(platform: Platform) -> NamedJob {
378    let runner = match platform {
379        Platform::Windows => runners::WINDOWS_DEFAULT,
380        Platform::Linux => runners::LINUX_DEFAULT,
381        Platform::Mac => runners::MAC_DEFAULT,
382    };
383    NamedJob {
384        name: format!("clippy_{platform}"),
385        job: release_job(&[])
386            .runs_on(runner)
387            .add_step(steps::checkout_repo().add_with(("fetch-depth", 0)))
388            .add_step(steps::restore_mtime())
389            .add_step(steps::setup_cargo_config(platform))
390            .when(
391                platform == Platform::Linux || platform == Platform::Mac,
392                |this| this.add_step(steps::cache_rust_dependencies_namespace()),
393            )
394            .when(
395                platform == Platform::Linux,
396                steps::install_linux_dependencies,
397            )
398            .add_step(steps::clippy(platform)),
399    }
400}
401
402pub(crate) fn run_platform_tests(platform: Platform) -> NamedJob {
403    run_platform_tests_impl(platform, true)
404}
405
406pub(crate) fn run_platform_tests_no_filter(platform: Platform) -> NamedJob {
407    run_platform_tests_impl(platform, false)
408}
409
410fn run_platform_tests_impl(platform: Platform, filter_packages: bool) -> NamedJob {
411    let runner = match platform {
412        Platform::Windows => runners::WINDOWS_DEFAULT,
413        Platform::Linux => runners::LINUX_DEFAULT,
414        Platform::Mac => runners::MAC_DEFAULT,
415    };
416    NamedJob {
417        name: format!("run_tests_{platform}"),
418        job: release_job(&[])
419            .runs_on(runner)
420            .when(platform == Platform::Linux, |job| {
421                job.add_service(
422                    "postgres",
423                    Container::new("postgres:15")
424                        .add_env(("POSTGRES_HOST_AUTH_METHOD", "trust"))
425                        .ports(vec![Port::Name("5432:5432".into())])
426                        .options(
427                            "--health-cmd pg_isready \
428                             --health-interval 500ms \
429                             --health-timeout 5s \
430                             --health-retries 10",
431                        ),
432                )
433            })
434            .add_step(steps::checkout_repo().add_with(("fetch-depth", 0)))
435            .add_step(steps::restore_mtime())
436            .add_step(steps::setup_cargo_config(platform))
437            .when(
438                platform == Platform::Linux || platform == Platform::Mac,
439                |this| this.add_step(steps::cache_rust_dependencies_namespace()),
440            )
441            .when(
442                platform == Platform::Linux,
443                steps::install_linux_dependencies,
444            )
445            .add_step(steps::setup_node())
446            .when(
447                platform == Platform::Linux || platform == Platform::Mac,
448                |job| job.add_step(steps::cargo_install_nextest()),
449            )
450            .add_step(steps::clear_target_dir_if_large(platform))
451            .when(filter_packages, |job| {
452                job.add_step(
453                    steps::cargo_nextest(platform).with_changed_packages_filter("orchestrate"),
454                )
455            })
456            .when(!filter_packages, |job| {
457                job.add_step(steps::cargo_nextest(platform))
458            })
459            .add_step(steps::cleanup_cargo_config(platform)),
460    }
461}
462
463pub(crate) fn check_postgres_and_protobuf_migrations() -> NamedJob {
464    fn remove_untracked_files() -> Step<Run> {
465        named::bash("git clean -df")
466    }
467
468    fn ensure_fresh_merge() -> Step<Run> {
469        named::bash(indoc::indoc! {r#"
470            if [ -z "$GITHUB_BASE_REF" ];
471            then
472              echo "BUF_BASE_BRANCH=$(git merge-base origin/main HEAD)" >> "$GITHUB_ENV"
473            else
474              git checkout -B temp
475              git merge -q "origin/$GITHUB_BASE_REF" -m "merge main into temp"
476              echo "BUF_BASE_BRANCH=$GITHUB_BASE_REF" >> "$GITHUB_ENV"
477            fi
478        "#})
479    }
480
481    fn bufbuild_setup_action() -> Step<Use> {
482        named::uses("bufbuild", "buf-setup-action", "v1")
483            .add_with(("version", "v1.29.0"))
484            .add_with(("github_token", vars::GITHUB_TOKEN))
485    }
486
487    fn bufbuild_breaking_action() -> Step<Use> {
488        named::uses("bufbuild", "buf-breaking-action", "v1").add_with(("input", "crates/proto/proto/"))
489            .add_with(("against", "https://github.com/${GITHUB_REPOSITORY}.git#branch=${BUF_BASE_BRANCH},subdir=crates/proto/proto/"))
490    }
491
492    named::job(
493        release_job(&[])
494            .runs_on(runners::LINUX_DEFAULT)
495            .add_env(("GIT_AUTHOR_NAME", "Protobuf Action"))
496            .add_env(("GIT_AUTHOR_EMAIL", "ci@zed.dev"))
497            .add_env(("GIT_COMMITTER_NAME", "Protobuf Action"))
498            .add_env(("GIT_COMMITTER_EMAIL", "ci@zed.dev"))
499            .add_step(steps::checkout_repo().with(("fetch-depth", 0))) // fetch full history
500            .add_step(remove_untracked_files())
501            .add_step(ensure_fresh_merge())
502            .add_step(bufbuild_setup_action())
503            .add_step(bufbuild_breaking_action()),
504    )
505}
506
507fn doctests() -> NamedJob {
508    fn run_doctests() -> Step<Run> {
509        named::bash(indoc::indoc! {r#"
510            cargo test --workspace --doc --no-fail-fast
511        "#})
512        .id("run_doctests")
513    }
514
515    named::job(
516        release_job(&[])
517            .runs_on(runners::LINUX_DEFAULT)
518            .add_step(steps::checkout_repo().add_with(("fetch-depth", 0)))
519            .add_step(steps::restore_mtime())
520            .add_step(steps::cache_rust_dependencies_namespace())
521            .map(steps::install_linux_dependencies)
522            .add_step(steps::setup_cargo_config(Platform::Linux))
523            .add_step(run_doctests())
524            .add_step(steps::cleanup_cargo_config(Platform::Linux)),
525    )
526}
527
528fn check_licenses() -> NamedJob {
529    named::job(
530        Job::default()
531            .runs_on(runners::LINUX_SMALL)
532            .add_step(steps::checkout_repo())
533            .add_step(steps::cache_rust_dependencies_namespace())
534            .add_step(steps::script("./script/check-licenses"))
535            .add_step(steps::script("./script/generate-licenses")),
536    )
537}
538
539fn check_docs() -> NamedJob {
540    fn lychee_link_check(dir: &str) -> Step<Use> {
541        named::uses(
542            "lycheeverse",
543            "lychee-action",
544            "82202e5e9c2f4ef1a55a3d02563e1cb6041e5332",
545        ) // v2.4.1
546        .add_with(("args", format!("--no-progress --exclude '^http' '{dir}'")))
547        .add_with(("fail", true))
548        .add_with(("jobSummary", false))
549    }
550
551    fn install_mdbook() -> Step<Use> {
552        named::uses(
553            "peaceiris",
554            "actions-mdbook",
555            "ee69d230fe19748b7abf22df32acaa93833fad08", // v2
556        )
557        .with(("mdbook-version", "0.4.37"))
558    }
559
560    fn build_docs() -> Step<Run> {
561        named::bash(indoc::indoc! {r#"
562            mkdir -p target/deploy
563            mdbook build ./docs --dest-dir=../target/deploy/docs/
564        "#})
565    }
566
567    named::job(
568        release_job(&[])
569            .runs_on(runners::LINUX_LARGE)
570            .add_step(steps::checkout_repo())
571            .add_step(steps::setup_cargo_config(Platform::Linux))
572            // todo(ci): un-inline build_docs/action.yml here
573            .add_step(steps::cache_rust_dependencies_namespace())
574            .add_step(
575                lychee_link_check("./docs/src/**/*"), // check markdown links
576            )
577            .map(steps::install_linux_dependencies)
578            .add_step(steps::script("./script/generate-action-metadata"))
579            .add_step(install_mdbook())
580            .add_step(build_docs())
581            .add_step(
582                lychee_link_check("target/deploy/docs"), // check links in generated html
583            ),
584    )
585}
586
587pub(crate) fn check_scripts() -> NamedJob {
588    fn download_actionlint() -> Step<Run> {
589        named::bash(
590            "bash <(curl https://raw.githubusercontent.com/rhysd/actionlint/main/scripts/download-actionlint.bash)",
591        )
592    }
593
594    fn run_actionlint() -> Step<Run> {
595        named::bash(indoc::indoc! {r#"
596            ${{ steps.get_actionlint.outputs.executable }} -color
597        "#})
598    }
599
600    fn run_shellcheck() -> Step<Run> {
601        named::bash("./script/shellcheck-scripts error")
602    }
603
604    fn check_xtask_workflows() -> Step<Run> {
605        named::bash(indoc::indoc! {r#"
606            cargo xtask workflows
607            if ! git diff --exit-code .github; then
608              echo "Error: .github directory has uncommitted changes after running 'cargo xtask workflows'"
609              echo "Please run 'cargo xtask workflows' locally and commit the changes"
610              exit 1
611            fi
612        "#})
613    }
614
615    named::job(
616        release_job(&[])
617            .runs_on(runners::LINUX_SMALL)
618            .add_step(steps::checkout_repo())
619            .add_step(run_shellcheck())
620            .add_step(download_actionlint().id("get_actionlint"))
621            .add_step(run_actionlint())
622            .add_step(check_xtask_workflows()),
623    )
624}