run_tests.rs

  1use gh_workflow::{
  2    Concurrency, Container, Event, Expression, Job, Port, PullRequest, Push, Run, Step, Use,
  3    Workflow,
  4};
  5use indexmap::IndexMap;
  6
  7use crate::tasks::workflows::{
  8    steps::{CommonJobConditions, repository_owner_guard_expression},
  9    vars::{self, PathCondition},
 10};
 11
 12use super::{
 13    runners::{self, Platform},
 14    steps::{self, FluentBuilder, NamedJob, named, release_job},
 15};
 16
 17pub(crate) fn run_tests() -> Workflow {
 18    // Specify anything which should potentially skip full test suite in this regex:
 19    // - docs/
 20    // - script/update_top_ranking_issues/
 21    // - .github/ISSUE_TEMPLATE/
 22    // - .github/workflows/  (except .github/workflows/ci.yml)
 23    let should_run_tests = PathCondition::inverted(
 24        "run_tests",
 25        r"^(docs/|script/update_top_ranking_issues/|\.github/(ISSUE_TEMPLATE|workflows/(?!run_tests)))",
 26    );
 27    let should_check_docs = PathCondition::new("run_docs", r"^(docs/|crates/.*\.rs)");
 28    let should_check_scripts = PathCondition::new(
 29        "run_action_checks",
 30        r"^\.github/(workflows/|actions/|actionlint.yml)|tooling/xtask|script/",
 31    );
 32    let should_check_licences =
 33        PathCondition::new("run_licenses", r"^(Cargo.lock|script/.*licenses)");
 34
 35    let orchestrate = orchestrate(&[
 36        &should_check_scripts,
 37        &should_check_docs,
 38        &should_check_licences,
 39        &should_run_tests,
 40    ]);
 41
 42    let mut jobs = vec![
 43        orchestrate,
 44        check_style(),
 45        should_run_tests.guard(clippy(Platform::Windows)),
 46        should_run_tests.guard(clippy(Platform::Linux)),
 47        should_run_tests.guard(clippy(Platform::Mac)),
 48        should_run_tests.guard(run_platform_tests(Platform::Windows)),
 49        should_run_tests.guard(run_platform_tests(Platform::Linux)),
 50        should_run_tests.guard(run_platform_tests(Platform::Mac)),
 51        should_run_tests.guard(doctests()),
 52        should_run_tests.guard(check_workspace_binaries()),
 53        should_run_tests.guard(check_dependencies()), // could be more specific here?
 54        should_check_docs.guard(check_docs()),
 55        should_check_licences.guard(check_licenses()),
 56        should_check_scripts.guard(check_scripts()),
 57    ];
 58    let tests_pass = tests_pass(&jobs);
 59
 60    jobs.push(should_run_tests.guard(check_postgres_and_protobuf_migrations())); // could be more specific here?
 61
 62    named::workflow()
 63        .add_event(
 64            Event::default()
 65                .push(
 66                    Push::default()
 67                        .add_branch("main")
 68                        .add_branch("v[0-9]+.[0-9]+.x"),
 69                )
 70                .pull_request(PullRequest::default().add_branch("**")),
 71        )
 72        .concurrency(
 73            Concurrency::default()
 74                .group(concat!(
 75                    "${{ github.workflow }}-${{ github.ref_name }}-",
 76                    "${{ github.ref_name == 'main' && github.sha || 'anysha' }}"
 77                ))
 78                .cancel_in_progress(true),
 79        )
 80        .add_env(("CARGO_TERM_COLOR", "always"))
 81        .add_env(("RUST_BACKTRACE", 1))
 82        .add_env(("CARGO_INCREMENTAL", 0))
 83        .map(|mut workflow| {
 84            for job in jobs {
 85                workflow = workflow.add_job(job.name, job.job)
 86            }
 87            workflow
 88        })
 89        .add_job(tests_pass.name, tests_pass.job)
 90}
 91
 92// Generates a bash script that checks changed files against regex patterns
 93// and sets GitHub output variables accordingly
 94pub fn orchestrate(rules: &[&PathCondition]) -> NamedJob {
 95    orchestrate_impl(rules, true)
 96}
 97
 98pub fn orchestrate_without_package_filter(rules: &[&PathCondition]) -> NamedJob {
 99    orchestrate_impl(rules, false)
100}
101
102fn orchestrate_impl(rules: &[&PathCondition], include_package_filter: bool) -> NamedJob {
103    let name = "orchestrate".to_owned();
104    let step_name = "filter".to_owned();
105    let mut script = String::new();
106
107    script.push_str(indoc::indoc! {r#"
108        if [ -z "$GITHUB_BASE_REF" ]; then
109          echo "Not in a PR context (i.e., push to main/stable/preview)"
110          COMPARE_REV="$(git rev-parse HEAD~1)"
111        else
112          echo "In a PR context comparing to pull_request.base.ref"
113          git fetch origin "$GITHUB_BASE_REF" --depth=350
114          COMPARE_REV="$(git merge-base "origin/${GITHUB_BASE_REF}" HEAD)"
115        fi
116        CHANGED_FILES="$(git diff --name-only "$COMPARE_REV" ${{ github.sha }})"
117
118        check_pattern() {
119          local output_name="$1"
120          local pattern="$2"
121          local grep_arg="$3"
122
123          echo "$CHANGED_FILES" | grep "$grep_arg" "$pattern" && \
124            echo "${output_name}=true" >> "$GITHUB_OUTPUT" || \
125            echo "${output_name}=false" >> "$GITHUB_OUTPUT"
126        }
127
128    "#});
129
130    let mut outputs = IndexMap::new();
131
132    if include_package_filter {
133        script.push_str(indoc::indoc! {r#"
134        # Check for changes that require full rebuild (no filter)
135        # Direct pushes to main/stable/preview always run full suite
136        if [ -z "$GITHUB_BASE_REF" ]; then
137          echo "Not a PR, running full test suite"
138          echo "changed_packages=" >> "$GITHUB_OUTPUT"
139        elif echo "$CHANGED_FILES" | grep -qP '^(rust-toolchain\.toml|\.cargo/|\.github/|Cargo\.(toml|lock)$)'; then
140          echo "Toolchain, cargo config, or root Cargo files changed, will run all tests"
141          echo "changed_packages=" >> "$GITHUB_OUTPUT"
142        else
143          # Extract changed directories from file paths
144          CHANGED_DIRS=$(echo "$CHANGED_FILES" | \
145            grep -oP '^(crates|tooling)/\K[^/]+' | \
146            sort -u || true)
147
148          # Build directory-to-package mapping using cargo metadata
149          DIR_TO_PKG=$(cargo metadata --format-version=1 --no-deps 2>/dev/null | \
150            jq -r '.packages[] | select(.manifest_path | test("crates/|tooling/")) | "\(.manifest_path | capture("(crates|tooling)/(?<dir>[^/]+)") | .dir)=\(.name)"')
151
152          # Map directory names to package names
153          FILE_CHANGED_PKGS=""
154          for dir in $CHANGED_DIRS; do
155            pkg=$(echo "$DIR_TO_PKG" | grep "^${dir}=" | cut -d= -f2 | head -1)
156            if [ -n "$pkg" ]; then
157              FILE_CHANGED_PKGS=$(printf '%s\n%s' "$FILE_CHANGED_PKGS" "$pkg")
158            else
159              # Fall back to directory name if no mapping found
160              FILE_CHANGED_PKGS=$(printf '%s\n%s' "$FILE_CHANGED_PKGS" "$dir")
161            fi
162          done
163          FILE_CHANGED_PKGS=$(echo "$FILE_CHANGED_PKGS" | grep -v '^$' | sort -u || true)
164
165          # If assets/ changed, add crates that depend on those assets
166          if echo "$CHANGED_FILES" | grep -qP '^assets/'; then
167            FILE_CHANGED_PKGS=$(printf '%s\n%s\n%s\n%s' "$FILE_CHANGED_PKGS" "settings" "storybook" "assets" | sort -u)
168          fi
169
170          # Combine all changed packages
171          ALL_CHANGED_PKGS=$(echo "$FILE_CHANGED_PKGS" | grep -v '^$' || true)
172
173          if [ -z "$ALL_CHANGED_PKGS" ]; then
174            echo "No package changes detected, will run all tests"
175            echo "changed_packages=" >> "$GITHUB_OUTPUT"
176          else
177            # Build nextest filterset with rdeps for each package
178            FILTERSET=$(echo "$ALL_CHANGED_PKGS" | \
179              sed 's/.*/rdeps(&)/' | \
180              tr '\n' '|' | \
181              sed 's/|$//')
182            echo "Changed packages filterset: $FILTERSET"
183            echo "changed_packages=$FILTERSET" >> "$GITHUB_OUTPUT"
184          fi
185        fi
186
187    "#});
188
189        outputs.insert(
190            "changed_packages".to_owned(),
191            format!("${{{{ steps.{}.outputs.changed_packages }}}}", step_name),
192        );
193    }
194
195    for rule in rules {
196        assert!(
197            rule.set_by_step
198                .borrow_mut()
199                .replace(name.clone())
200                .is_none()
201        );
202        assert!(
203            outputs
204                .insert(
205                    rule.name.to_owned(),
206                    format!("${{{{ steps.{}.outputs.{} }}}}", step_name, rule.name)
207                )
208                .is_none()
209        );
210
211        let grep_arg = if rule.invert { "-qvP" } else { "-qP" };
212        script.push_str(&format!(
213            "check_pattern \"{}\" '{}' {}\n",
214            rule.name, rule.pattern, grep_arg
215        ));
216    }
217
218    let job = Job::default()
219        .runs_on(runners::LINUX_SMALL)
220        .with_repository_owner_guard()
221        .outputs(outputs)
222        .add_step(steps::checkout_repo().with_deep_history_on_non_main())
223        .add_step(Step::new(step_name.clone()).run(script).id(step_name));
224
225    NamedJob { name, job }
226}
227
228pub fn tests_pass(jobs: &[NamedJob]) -> NamedJob {
229    let mut script = String::from(indoc::indoc! {r#"
230        set +x
231        EXIT_CODE=0
232
233        check_result() {
234          echo "* $1: $2"
235          if [[ "$2" != "skipped" && "$2" != "success" ]]; then EXIT_CODE=1; fi
236        }
237
238    "#});
239
240    script.push_str(
241        &jobs
242            .iter()
243            .map(|job| {
244                format!(
245                    "check_result \"{}\" \"${{{{ needs.{}.result }}}}\"",
246                    job.name, job.name
247                )
248            })
249            .collect::<Vec<_>>()
250            .join("\n"),
251    );
252
253    script.push_str("\n\nexit $EXIT_CODE\n");
254
255    let job = Job::default()
256        .runs_on(runners::LINUX_SMALL)
257        .needs(
258            jobs.iter()
259                .map(|j| j.name.to_string())
260                .collect::<Vec<String>>(),
261        )
262        .cond(repository_owner_guard_expression(true))
263        .add_step(named::bash(&script));
264
265    named::job(job)
266}
267
268fn check_style() -> NamedJob {
269    fn check_for_typos() -> Step<Use> {
270        named::uses(
271            "crate-ci",
272            "typos",
273            "2d0ce569feab1f8752f1dde43cc2f2aa53236e06",
274        ) // v1.40.0
275        .with(("config", "./typos.toml"))
276    }
277    named::job(
278        release_job(&[])
279            .runs_on(runners::LINUX_MEDIUM)
280            .add_step(steps::checkout_repo())
281            .add_step(steps::cache_rust_dependencies_namespace())
282            .add_step(steps::setup_pnpm())
283            .add_step(steps::prettier())
284            .add_step(steps::cargo_fmt())
285            .add_step(steps::script("./script/check-todos"))
286            .add_step(steps::script("./script/check-keymaps"))
287            .add_step(check_for_typos()),
288    )
289}
290
291fn check_dependencies() -> NamedJob {
292    fn install_cargo_machete() -> Step<Use> {
293        named::uses(
294            "clechasseur",
295            "rs-cargo",
296            "8435b10f6e71c2e3d4d3b7573003a8ce4bfc6386", // v2
297        )
298        .add_with(("command", "install"))
299        .add_with(("args", "cargo-machete@0.7.0"))
300    }
301
302    fn run_cargo_machete() -> Step<Use> {
303        named::uses(
304            "clechasseur",
305            "rs-cargo",
306            "8435b10f6e71c2e3d4d3b7573003a8ce4bfc6386", // v2
307        )
308        .add_with(("command", "machete"))
309    }
310
311    fn check_cargo_lock() -> Step<Run> {
312        named::bash("cargo update --locked --workspace")
313    }
314
315    fn check_vulnerable_dependencies() -> Step<Use> {
316        named::uses(
317            "actions",
318            "dependency-review-action",
319            "67d4f4bd7a9b17a0db54d2a7519187c65e339de8", // v4
320        )
321        .if_condition(Expression::new("github.event_name == 'pull_request'"))
322        .with(("license-check", false))
323    }
324
325    named::job(
326        release_job(&[])
327            .runs_on(runners::LINUX_SMALL)
328            .add_step(steps::checkout_repo())
329            .add_step(steps::cache_rust_dependencies_namespace())
330            .add_step(install_cargo_machete())
331            .add_step(run_cargo_machete())
332            .add_step(check_cargo_lock())
333            .add_step(check_vulnerable_dependencies()),
334    )
335}
336
337fn check_workspace_binaries() -> NamedJob {
338    named::job(
339        release_job(&[])
340            .runs_on(runners::LINUX_LARGE)
341            .add_step(steps::checkout_repo())
342            .add_step(steps::setup_cargo_config(Platform::Linux))
343            .add_step(steps::cache_rust_dependencies_namespace())
344            .map(steps::install_linux_dependencies)
345            .add_step(steps::setup_sccache(Platform::Linux))
346            .add_step(steps::script("cargo build -p collab"))
347            .add_step(steps::script("cargo build --workspace --bins --examples"))
348            .add_step(steps::show_sccache_stats(Platform::Linux))
349            .add_step(steps::cleanup_cargo_config(Platform::Linux)),
350    )
351}
352
353pub(crate) fn clippy(platform: Platform) -> NamedJob {
354    let runner = match platform {
355        Platform::Windows => runners::WINDOWS_DEFAULT,
356        Platform::Linux => runners::LINUX_DEFAULT,
357        Platform::Mac => runners::MAC_DEFAULT,
358    };
359    NamedJob {
360        name: format!("clippy_{platform}"),
361        job: release_job(&[])
362            .runs_on(runner)
363            .add_step(steps::checkout_repo())
364            .add_step(steps::setup_cargo_config(platform))
365            .when(
366                platform == Platform::Linux || platform == Platform::Mac,
367                |this| this.add_step(steps::cache_rust_dependencies_namespace()),
368            )
369            .when(
370                platform == Platform::Linux,
371                steps::install_linux_dependencies,
372            )
373            .add_step(steps::setup_sccache(platform))
374            .add_step(steps::clippy(platform))
375            .add_step(steps::show_sccache_stats(platform)),
376    }
377}
378
379pub(crate) fn run_platform_tests(platform: Platform) -> NamedJob {
380    run_platform_tests_impl(platform, true)
381}
382
383pub(crate) fn run_platform_tests_no_filter(platform: Platform) -> NamedJob {
384    run_platform_tests_impl(platform, false)
385}
386
387fn run_platform_tests_impl(platform: Platform, filter_packages: bool) -> NamedJob {
388    let runner = match platform {
389        Platform::Windows => runners::WINDOWS_DEFAULT,
390        Platform::Linux => runners::LINUX_DEFAULT,
391        Platform::Mac => runners::MAC_DEFAULT,
392    };
393    NamedJob {
394        name: format!("run_tests_{platform}"),
395        job: release_job(&[])
396            .runs_on(runner)
397            .when(platform == Platform::Linux, |job| {
398                job.add_service(
399                    "postgres",
400                    Container::new("postgres:15")
401                        .add_env(("POSTGRES_HOST_AUTH_METHOD", "trust"))
402                        .ports(vec![Port::Name("5432:5432".into())])
403                        .options(
404                            "--health-cmd pg_isready \
405                             --health-interval 500ms \
406                             --health-timeout 5s \
407                             --health-retries 10",
408                        ),
409                )
410            })
411            .add_step(steps::checkout_repo())
412            .add_step(steps::setup_cargo_config(platform))
413            .when(
414                platform == Platform::Linux || platform == Platform::Mac,
415                |this| this.add_step(steps::cache_rust_dependencies_namespace()),
416            )
417            .when(
418                platform == Platform::Linux,
419                steps::install_linux_dependencies,
420            )
421            .add_step(steps::setup_node())
422            .when(
423                platform == Platform::Linux || platform == Platform::Mac,
424                |job| job.add_step(steps::cargo_install_nextest()),
425            )
426            .add_step(steps::clear_target_dir_if_large(platform))
427            .add_step(steps::setup_sccache(platform))
428            .when(filter_packages, |job| {
429                job.add_step(
430                    steps::cargo_nextest(platform).with_changed_packages_filter("orchestrate"),
431                )
432            })
433            .when(!filter_packages, |job| {
434                job.add_step(steps::cargo_nextest(platform))
435            })
436            .add_step(steps::show_sccache_stats(platform))
437            .add_step(steps::cleanup_cargo_config(platform)),
438    }
439}
440
441pub(crate) fn check_postgres_and_protobuf_migrations() -> NamedJob {
442    fn ensure_fresh_merge() -> Step<Run> {
443        named::bash(indoc::indoc! {r#"
444            if [ -z "$GITHUB_BASE_REF" ];
445            then
446              echo "BUF_BASE_BRANCH=$(git merge-base origin/main HEAD)" >> "$GITHUB_ENV"
447            else
448              git checkout -B temp
449              git merge -q "origin/$GITHUB_BASE_REF" -m "merge main into temp"
450              echo "BUF_BASE_BRANCH=$GITHUB_BASE_REF" >> "$GITHUB_ENV"
451            fi
452        "#})
453    }
454
455    fn bufbuild_setup_action() -> Step<Use> {
456        named::uses("bufbuild", "buf-setup-action", "v1")
457            .add_with(("version", "v1.29.0"))
458            .add_with(("github_token", vars::GITHUB_TOKEN))
459    }
460
461    fn bufbuild_breaking_action() -> Step<Use> {
462        named::uses("bufbuild", "buf-breaking-action", "v1").add_with(("input", "crates/proto/proto/"))
463            .add_with(("against", "https://github.com/${GITHUB_REPOSITORY}.git#branch=${BUF_BASE_BRANCH},subdir=crates/proto/proto/"))
464    }
465
466    named::job(
467        release_job(&[])
468            .runs_on(runners::LINUX_DEFAULT)
469            .add_env(("GIT_AUTHOR_NAME", "Protobuf Action"))
470            .add_env(("GIT_AUTHOR_EMAIL", "ci@zed.dev"))
471            .add_env(("GIT_COMMITTER_NAME", "Protobuf Action"))
472            .add_env(("GIT_COMMITTER_EMAIL", "ci@zed.dev"))
473            .add_step(steps::checkout_repo().with_full_history())
474            .add_step(ensure_fresh_merge())
475            .add_step(bufbuild_setup_action())
476            .add_step(bufbuild_breaking_action()),
477    )
478}
479
480fn doctests() -> NamedJob {
481    fn run_doctests() -> Step<Run> {
482        named::bash(indoc::indoc! {r#"
483            cargo test --workspace --doc --no-fail-fast
484        "#})
485        .id("run_doctests")
486    }
487
488    named::job(
489        release_job(&[])
490            .runs_on(runners::LINUX_DEFAULT)
491            .add_step(steps::checkout_repo())
492            .add_step(steps::cache_rust_dependencies_namespace())
493            .map(steps::install_linux_dependencies)
494            .add_step(steps::setup_cargo_config(Platform::Linux))
495            .add_step(steps::setup_sccache(Platform::Linux))
496            .add_step(run_doctests())
497            .add_step(steps::show_sccache_stats(Platform::Linux))
498            .add_step(steps::cleanup_cargo_config(Platform::Linux)),
499    )
500}
501
502fn check_licenses() -> NamedJob {
503    named::job(
504        Job::default()
505            .runs_on(runners::LINUX_SMALL)
506            .add_step(steps::checkout_repo())
507            .add_step(steps::cache_rust_dependencies_namespace())
508            .add_step(steps::script("./script/check-licenses"))
509            .add_step(steps::script("./script/generate-licenses")),
510    )
511}
512
513fn check_docs() -> NamedJob {
514    fn lychee_link_check(dir: &str) -> Step<Use> {
515        named::uses(
516            "lycheeverse",
517            "lychee-action",
518            "82202e5e9c2f4ef1a55a3d02563e1cb6041e5332",
519        ) // v2.4.1
520        .add_with(("args", format!("--no-progress --exclude '^http' '{dir}'")))
521        .add_with(("fail", true))
522        .add_with(("jobSummary", false))
523    }
524
525    fn install_mdbook() -> Step<Use> {
526        named::uses(
527            "peaceiris",
528            "actions-mdbook",
529            "ee69d230fe19748b7abf22df32acaa93833fad08", // v2
530        )
531        .with(("mdbook-version", "0.4.37"))
532    }
533
534    fn build_docs() -> Step<Run> {
535        named::bash(indoc::indoc! {r#"
536            mkdir -p target/deploy
537            mdbook build ./docs --dest-dir=../target/deploy/docs/
538        "#})
539    }
540
541    named::job(
542        release_job(&[])
543            .runs_on(runners::LINUX_LARGE)
544            .add_step(steps::checkout_repo())
545            .add_step(steps::setup_cargo_config(Platform::Linux))
546            // todo(ci): un-inline build_docs/action.yml here
547            .add_step(steps::cache_rust_dependencies_namespace())
548            .add_step(
549                lychee_link_check("./docs/src/**/*"), // check markdown links
550            )
551            .map(steps::install_linux_dependencies)
552            .add_step(steps::script("./script/generate-action-metadata"))
553            .add_step(install_mdbook())
554            .add_step(build_docs())
555            .add_step(
556                lychee_link_check("target/deploy/docs"), // check links in generated html
557            ),
558    )
559}
560
561pub(crate) fn check_scripts() -> NamedJob {
562    fn download_actionlint() -> Step<Run> {
563        named::bash(
564            "bash <(curl https://raw.githubusercontent.com/rhysd/actionlint/main/scripts/download-actionlint.bash)",
565        )
566    }
567
568    fn run_actionlint() -> Step<Run> {
569        named::bash(indoc::indoc! {r#"
570            ${{ steps.get_actionlint.outputs.executable }} -color
571        "#})
572    }
573
574    fn run_shellcheck() -> Step<Run> {
575        named::bash("./script/shellcheck-scripts error")
576    }
577
578    fn check_xtask_workflows() -> Step<Run> {
579        named::bash(indoc::indoc! {r#"
580            cargo xtask workflows
581            if ! git diff --exit-code .github; then
582              echo "Error: .github directory has uncommitted changes after running 'cargo xtask workflows'"
583              echo "Please run 'cargo xtask workflows' locally and commit the changes"
584              exit 1
585            fi
586        "#})
587    }
588
589    named::job(
590        release_job(&[])
591            .runs_on(runners::LINUX_SMALL)
592            .add_step(steps::checkout_repo())
593            .add_step(run_shellcheck())
594            .add_step(download_actionlint().id("get_actionlint"))
595            .add_step(run_actionlint())
596            .add_step(check_xtask_workflows()),
597    )
598}