run_tests.rs

  1use gh_workflow::{
  2    Concurrency, Container, Event, Expression, Job, Port, PullRequest, Push, Run, Step, Use,
  3    Workflow,
  4};
  5use indexmap::IndexMap;
  6
  7use crate::tasks::workflows::{
  8    steps::{CommonJobConditions, repository_owner_guard_expression},
  9    vars::{self, PathCondition},
 10};
 11
 12use super::{
 13    runners::{self, Platform},
 14    steps::{self, FluentBuilder, NamedJob, named, release_job},
 15};
 16
 17pub(crate) fn run_tests() -> Workflow {
 18    // Specify anything which should potentially skip full test suite in this regex:
 19    // - docs/
 20    // - script/update_top_ranking_issues/
 21    // - .github/ISSUE_TEMPLATE/
 22    // - .github/workflows/  (except .github/workflows/ci.yml)
 23    let should_run_tests = PathCondition::inverted(
 24        "run_tests",
 25        r"^(docs/|script/update_top_ranking_issues/|\.github/(ISSUE_TEMPLATE|workflows/(?!run_tests)))",
 26    );
 27    let should_check_docs = PathCondition::new("run_docs", r"^(docs/|crates/.*\.rs)");
 28    let should_check_scripts = PathCondition::new(
 29        "run_action_checks",
 30        r"^\.github/(workflows/|actions/|actionlint.yml)|tooling/xtask|script/",
 31    );
 32    let should_check_licences =
 33        PathCondition::new("run_licenses", r"^(Cargo.lock|script/.*licenses)");
 34
 35    let orchestrate = orchestrate(&[
 36        &should_check_scripts,
 37        &should_check_docs,
 38        &should_check_licences,
 39        &should_run_tests,
 40    ]);
 41
 42    let mut jobs = vec![
 43        orchestrate,
 44        check_style(),
 45        should_run_tests.guard(clippy(Platform::Windows)),
 46        should_run_tests.guard(clippy(Platform::Linux)),
 47        should_run_tests.guard(clippy(Platform::Mac)),
 48        should_run_tests.guard(run_platform_tests(Platform::Windows)),
 49        should_run_tests.guard(run_platform_tests(Platform::Linux)),
 50        should_run_tests.guard(run_platform_tests(Platform::Mac)),
 51        should_run_tests.guard(doctests()),
 52        should_run_tests.guard(check_workspace_binaries()),
 53        should_run_tests.guard(check_dependencies()), // could be more specific here?
 54        should_check_docs.guard(check_docs()),
 55        should_check_licences.guard(check_licenses()),
 56        should_check_scripts.guard(check_scripts()),
 57    ];
 58    let tests_pass = tests_pass(&jobs);
 59
 60    jobs.push(should_run_tests.guard(check_postgres_and_protobuf_migrations())); // could be more specific here?
 61
 62    named::workflow()
 63        .add_event(
 64            Event::default()
 65                .push(
 66                    Push::default()
 67                        .add_branch("main")
 68                        .add_branch("v[0-9]+.[0-9]+.x"),
 69                )
 70                .pull_request(PullRequest::default().add_branch("**")),
 71        )
 72        .concurrency(
 73            Concurrency::default()
 74                .group(concat!(
 75                    "${{ github.workflow }}-${{ github.ref_name }}-",
 76                    "${{ github.ref_name == 'main' && github.sha || 'anysha' }}"
 77                ))
 78                .cancel_in_progress(true),
 79        )
 80        .add_env(("CARGO_TERM_COLOR", "always"))
 81        .add_env(("RUST_BACKTRACE", 1))
 82        .add_env(("CARGO_INCREMENTAL", 0))
 83        .map(|mut workflow| {
 84            for job in jobs {
 85                workflow = workflow.add_job(job.name, job.job)
 86            }
 87            workflow
 88        })
 89        .add_job(tests_pass.name, tests_pass.job)
 90}
 91
 92// Generates a bash script that checks changed files against regex patterns
 93// and sets GitHub output variables accordingly
 94pub fn orchestrate(rules: &[&PathCondition]) -> NamedJob {
 95    orchestrate_impl(rules, true)
 96}
 97
 98pub fn orchestrate_without_package_filter(rules: &[&PathCondition]) -> NamedJob {
 99    orchestrate_impl(rules, false)
100}
101
102fn orchestrate_impl(rules: &[&PathCondition], include_package_filter: bool) -> NamedJob {
103    let name = "orchestrate".to_owned();
104    let step_name = "filter".to_owned();
105    let mut script = String::new();
106
107    script.push_str(indoc::indoc! {r#"
108        if [ -z "$GITHUB_BASE_REF" ]; then
109          echo "Not in a PR context (i.e., push to main/stable/preview)"
110          COMPARE_REV="$(git rev-parse HEAD~1)"
111        else
112          echo "In a PR context comparing to pull_request.base.ref"
113          git fetch origin "$GITHUB_BASE_REF" --depth=350
114          COMPARE_REV="$(git merge-base "origin/${GITHUB_BASE_REF}" HEAD)"
115        fi
116        CHANGED_FILES="$(git diff --name-only "$COMPARE_REV" ${{ github.sha }})"
117
118        check_pattern() {
119          local output_name="$1"
120          local pattern="$2"
121          local grep_arg="$3"
122
123          echo "$CHANGED_FILES" | grep "$grep_arg" "$pattern" && \
124            echo "${output_name}=true" >> "$GITHUB_OUTPUT" || \
125            echo "${output_name}=false" >> "$GITHUB_OUTPUT"
126        }
127
128    "#});
129
130    let mut outputs = IndexMap::new();
131
132    if include_package_filter {
133        script.push_str(indoc::indoc! {r#"
134        # Check for changes that require full rebuild (no filter)
135        # Direct pushes to main/stable/preview always run full suite
136        if [ -z "$GITHUB_BASE_REF" ]; then
137          echo "Not a PR, running full test suite"
138          echo "changed_packages=" >> "$GITHUB_OUTPUT"
139        elif echo "$CHANGED_FILES" | grep -qP '^(rust-toolchain\.toml|\.cargo/|\.github/|Cargo\.(toml|lock)$)'; then
140          echo "Toolchain, cargo config, or root Cargo files changed, will run all tests"
141          echo "changed_packages=" >> "$GITHUB_OUTPUT"
142        else
143          # Extract changed directories from file paths
144          CHANGED_DIRS=$(echo "$CHANGED_FILES" | \
145            grep -oP '^(crates|tooling)/\K[^/]+' | \
146            sort -u || true)
147
148          # Build directory-to-package mapping using cargo metadata
149          DIR_TO_PKG=$(cargo metadata --format-version=1 --no-deps 2>/dev/null | \
150            jq -r '.packages[] | select(.manifest_path | test("crates/|tooling/")) | "\(.manifest_path | capture("(crates|tooling)/(?<dir>[^/]+)") | .dir)=\(.name)"')
151
152          # Map directory names to package names
153          FILE_CHANGED_PKGS=""
154          for dir in $CHANGED_DIRS; do
155            pkg=$(echo "$DIR_TO_PKG" | grep "^${dir}=" | cut -d= -f2 | head -1)
156            if [ -n "$pkg" ]; then
157              FILE_CHANGED_PKGS=$(printf '%s\n%s' "$FILE_CHANGED_PKGS" "$pkg")
158            else
159              # Fall back to directory name if no mapping found
160              FILE_CHANGED_PKGS=$(printf '%s\n%s' "$FILE_CHANGED_PKGS" "$dir")
161            fi
162          done
163          FILE_CHANGED_PKGS=$(echo "$FILE_CHANGED_PKGS" | grep -v '^$' | sort -u || true)
164
165          # If assets/ changed, add crates that depend on those assets
166          if echo "$CHANGED_FILES" | grep -qP '^assets/'; then
167            FILE_CHANGED_PKGS=$(printf '%s\n%s\n%s\n%s' "$FILE_CHANGED_PKGS" "settings" "storybook" "assets" | sort -u)
168          fi
169
170          # Combine all changed packages
171          ALL_CHANGED_PKGS=$(echo "$FILE_CHANGED_PKGS" | grep -v '^$' || true)
172
173          if [ -z "$ALL_CHANGED_PKGS" ]; then
174            echo "No package changes detected, will run all tests"
175            echo "changed_packages=" >> "$GITHUB_OUTPUT"
176          else
177            # Build nextest filterset with rdeps for each package
178            FILTERSET=$(echo "$ALL_CHANGED_PKGS" | \
179              sed 's/.*/rdeps(&)/' | \
180              tr '\n' '|' | \
181              sed 's/|$//')
182            echo "Changed packages filterset: $FILTERSET"
183            echo "changed_packages=$FILTERSET" >> "$GITHUB_OUTPUT"
184          fi
185        fi
186
187    "#});
188
189        outputs.insert(
190            "changed_packages".to_owned(),
191            format!("${{{{ steps.{}.outputs.changed_packages }}}}", step_name),
192        );
193    }
194
195    for rule in rules {
196        assert!(
197            rule.set_by_step
198                .borrow_mut()
199                .replace(name.clone())
200                .is_none()
201        );
202        assert!(
203            outputs
204                .insert(
205                    rule.name.to_owned(),
206                    format!("${{{{ steps.{}.outputs.{} }}}}", step_name, rule.name)
207                )
208                .is_none()
209        );
210
211        let grep_arg = if rule.invert { "-qvP" } else { "-qP" };
212        script.push_str(&format!(
213            "check_pattern \"{}\" '{}' {}\n",
214            rule.name, rule.pattern, grep_arg
215        ));
216    }
217
218    let job = Job::default()
219        .runs_on(runners::LINUX_SMALL)
220        .with_repository_owner_guard()
221        .outputs(outputs)
222        .add_step(steps::checkout_repo().add_with((
223            "fetch-depth",
224            "${{ github.ref == 'refs/heads/main' && 2 || 350 }}",
225        )))
226        .add_step(Step::new(step_name.clone()).run(script).id(step_name));
227
228    NamedJob { name, job }
229}
230
231pub fn tests_pass(jobs: &[NamedJob]) -> NamedJob {
232    let mut script = String::from(indoc::indoc! {r#"
233        set +x
234        EXIT_CODE=0
235
236        check_result() {
237          echo "* $1: $2"
238          if [[ "$2" != "skipped" && "$2" != "success" ]]; then EXIT_CODE=1; fi
239        }
240
241    "#});
242
243    script.push_str(
244        &jobs
245            .iter()
246            .map(|job| {
247                format!(
248                    "check_result \"{}\" \"${{{{ needs.{}.result }}}}\"",
249                    job.name, job.name
250                )
251            })
252            .collect::<Vec<_>>()
253            .join("\n"),
254    );
255
256    script.push_str("\n\nexit $EXIT_CODE\n");
257
258    let job = Job::default()
259        .runs_on(runners::LINUX_SMALL)
260        .needs(
261            jobs.iter()
262                .map(|j| j.name.to_string())
263                .collect::<Vec<String>>(),
264        )
265        .cond(repository_owner_guard_expression(true))
266        .add_step(named::bash(&script));
267
268    named::job(job)
269}
270
271fn check_style() -> NamedJob {
272    fn check_for_typos() -> Step<Use> {
273        named::uses(
274            "crate-ci",
275            "typos",
276            "2d0ce569feab1f8752f1dde43cc2f2aa53236e06",
277        ) // v1.40.0
278        .with(("config", "./typos.toml"))
279    }
280    named::job(
281        release_job(&[])
282            .runs_on(runners::LINUX_MEDIUM)
283            .add_step(steps::checkout_repo())
284            .add_step(steps::cache_rust_dependencies_namespace())
285            .add_step(steps::setup_pnpm())
286            .add_step(steps::prettier())
287            .add_step(steps::cargo_fmt())
288            .add_step(steps::script("./script/check-todos"))
289            .add_step(steps::script("./script/check-keymaps"))
290            .add_step(check_for_typos()),
291    )
292}
293
294fn check_dependencies() -> NamedJob {
295    fn install_cargo_machete() -> Step<Use> {
296        named::uses(
297            "clechasseur",
298            "rs-cargo",
299            "8435b10f6e71c2e3d4d3b7573003a8ce4bfc6386", // v2
300        )
301        .add_with(("command", "install"))
302        .add_with(("args", "cargo-machete@0.7.0"))
303    }
304
305    fn run_cargo_machete() -> Step<Use> {
306        named::uses(
307            "clechasseur",
308            "rs-cargo",
309            "8435b10f6e71c2e3d4d3b7573003a8ce4bfc6386", // v2
310        )
311        .add_with(("command", "machete"))
312    }
313
314    fn check_cargo_lock() -> Step<Run> {
315        named::bash("cargo update --locked --workspace")
316    }
317
318    fn check_vulnerable_dependencies() -> Step<Use> {
319        named::uses(
320            "actions",
321            "dependency-review-action",
322            "67d4f4bd7a9b17a0db54d2a7519187c65e339de8", // v4
323        )
324        .if_condition(Expression::new("github.event_name == 'pull_request'"))
325        .with(("license-check", false))
326    }
327
328    named::job(
329        release_job(&[])
330            .runs_on(runners::LINUX_SMALL)
331            .add_step(steps::checkout_repo())
332            .add_step(steps::cache_rust_dependencies_namespace())
333            .add_step(install_cargo_machete())
334            .add_step(run_cargo_machete())
335            .add_step(check_cargo_lock())
336            .add_step(check_vulnerable_dependencies()),
337    )
338}
339
340fn check_workspace_binaries() -> NamedJob {
341    named::job(
342        release_job(&[])
343            .runs_on(runners::LINUX_LARGE)
344            .add_step(steps::checkout_repo())
345            .add_step(steps::setup_cargo_config(Platform::Linux))
346            .add_step(steps::cache_rust_dependencies_namespace())
347            .map(steps::install_linux_dependencies)
348            .add_step(steps::script("cargo build -p collab"))
349            .add_step(steps::script("cargo build --workspace --bins --examples"))
350            .add_step(steps::cleanup_cargo_config(Platform::Linux)),
351    )
352}
353
354pub(crate) fn clippy(platform: Platform) -> NamedJob {
355    let runner = match platform {
356        Platform::Windows => runners::WINDOWS_DEFAULT,
357        Platform::Linux => runners::LINUX_DEFAULT,
358        Platform::Mac => runners::MAC_DEFAULT,
359    };
360    NamedJob {
361        name: format!("clippy_{platform}"),
362        job: release_job(&[])
363            .runs_on(runner)
364            .add_step(steps::checkout_repo())
365            .add_step(steps::setup_cargo_config(platform))
366            .when(
367                platform == Platform::Linux || platform == Platform::Mac,
368                |this| this.add_step(steps::cache_rust_dependencies_namespace()),
369            )
370            .when(
371                platform == Platform::Linux,
372                steps::install_linux_dependencies,
373            )
374            .add_step(steps::clippy(platform)),
375    }
376}
377
378pub(crate) fn run_platform_tests(platform: Platform) -> NamedJob {
379    run_platform_tests_impl(platform, true)
380}
381
382pub(crate) fn run_platform_tests_no_filter(platform: Platform) -> NamedJob {
383    run_platform_tests_impl(platform, false)
384}
385
386fn run_platform_tests_impl(platform: Platform, filter_packages: bool) -> NamedJob {
387    let runner = match platform {
388        Platform::Windows => runners::WINDOWS_DEFAULT,
389        Platform::Linux => runners::LINUX_DEFAULT,
390        Platform::Mac => runners::MAC_DEFAULT,
391    };
392    NamedJob {
393        name: format!("run_tests_{platform}"),
394        job: release_job(&[])
395            .runs_on(runner)
396            .when(platform == Platform::Linux, |job| {
397                job.add_service(
398                    "postgres",
399                    Container::new("postgres:15")
400                        .add_env(("POSTGRES_HOST_AUTH_METHOD", "trust"))
401                        .ports(vec![Port::Name("5432:5432".into())])
402                        .options(
403                            "--health-cmd pg_isready \
404                             --health-interval 500ms \
405                             --health-timeout 5s \
406                             --health-retries 10",
407                        ),
408                )
409            })
410            .add_step(steps::checkout_repo())
411            .add_step(steps::setup_cargo_config(platform))
412            .when(
413                platform == Platform::Linux || platform == Platform::Mac,
414                |this| this.add_step(steps::cache_rust_dependencies_namespace()),
415            )
416            .when(
417                platform == Platform::Linux,
418                steps::install_linux_dependencies,
419            )
420            .add_step(steps::setup_node())
421            .when(
422                platform == Platform::Linux || platform == Platform::Mac,
423                |job| job.add_step(steps::cargo_install_nextest()),
424            )
425            .add_step(steps::clear_target_dir_if_large(platform))
426            .when(filter_packages, |job| {
427                job.add_step(
428                    steps::cargo_nextest(platform).with_changed_packages_filter("orchestrate"),
429                )
430            })
431            .when(!filter_packages, |job| {
432                job.add_step(steps::cargo_nextest(platform))
433            })
434            .add_step(steps::cleanup_cargo_config(platform)),
435    }
436}
437
438pub(crate) fn check_postgres_and_protobuf_migrations() -> NamedJob {
439    fn remove_untracked_files() -> Step<Run> {
440        named::bash("git clean -df")
441    }
442
443    fn ensure_fresh_merge() -> Step<Run> {
444        named::bash(indoc::indoc! {r#"
445            if [ -z "$GITHUB_BASE_REF" ];
446            then
447              echo "BUF_BASE_BRANCH=$(git merge-base origin/main HEAD)" >> "$GITHUB_ENV"
448            else
449              git checkout -B temp
450              git merge -q "origin/$GITHUB_BASE_REF" -m "merge main into temp"
451              echo "BUF_BASE_BRANCH=$GITHUB_BASE_REF" >> "$GITHUB_ENV"
452            fi
453        "#})
454    }
455
456    fn bufbuild_setup_action() -> Step<Use> {
457        named::uses("bufbuild", "buf-setup-action", "v1")
458            .add_with(("version", "v1.29.0"))
459            .add_with(("github_token", vars::GITHUB_TOKEN))
460    }
461
462    fn bufbuild_breaking_action() -> Step<Use> {
463        named::uses("bufbuild", "buf-breaking-action", "v1").add_with(("input", "crates/proto/proto/"))
464            .add_with(("against", "https://github.com/${GITHUB_REPOSITORY}.git#branch=${BUF_BASE_BRANCH},subdir=crates/proto/proto/"))
465    }
466
467    named::job(
468        release_job(&[])
469            .runs_on(runners::LINUX_DEFAULT)
470            .add_env(("GIT_AUTHOR_NAME", "Protobuf Action"))
471            .add_env(("GIT_AUTHOR_EMAIL", "ci@zed.dev"))
472            .add_env(("GIT_COMMITTER_NAME", "Protobuf Action"))
473            .add_env(("GIT_COMMITTER_EMAIL", "ci@zed.dev"))
474            .add_step(steps::checkout_repo().with(("fetch-depth", 0))) // fetch full history
475            .add_step(remove_untracked_files())
476            .add_step(ensure_fresh_merge())
477            .add_step(bufbuild_setup_action())
478            .add_step(bufbuild_breaking_action()),
479    )
480}
481
482fn doctests() -> NamedJob {
483    fn run_doctests() -> Step<Run> {
484        named::bash(indoc::indoc! {r#"
485            cargo test --workspace --doc --no-fail-fast
486        "#})
487        .id("run_doctests")
488    }
489
490    named::job(
491        release_job(&[])
492            .runs_on(runners::LINUX_DEFAULT)
493            .add_step(steps::checkout_repo())
494            .add_step(steps::cache_rust_dependencies_namespace())
495            .map(steps::install_linux_dependencies)
496            .add_step(steps::setup_cargo_config(Platform::Linux))
497            .add_step(run_doctests())
498            .add_step(steps::cleanup_cargo_config(Platform::Linux)),
499    )
500}
501
502fn check_licenses() -> NamedJob {
503    named::job(
504        Job::default()
505            .runs_on(runners::LINUX_SMALL)
506            .add_step(steps::checkout_repo())
507            .add_step(steps::cache_rust_dependencies_namespace())
508            .add_step(steps::script("./script/check-licenses"))
509            .add_step(steps::script("./script/generate-licenses")),
510    )
511}
512
513fn check_docs() -> NamedJob {
514    fn lychee_link_check(dir: &str) -> Step<Use> {
515        named::uses(
516            "lycheeverse",
517            "lychee-action",
518            "82202e5e9c2f4ef1a55a3d02563e1cb6041e5332",
519        ) // v2.4.1
520        .add_with(("args", format!("--no-progress --exclude '^http' '{dir}'")))
521        .add_with(("fail", true))
522        .add_with(("jobSummary", false))
523    }
524
525    fn install_mdbook() -> Step<Use> {
526        named::uses(
527            "peaceiris",
528            "actions-mdbook",
529            "ee69d230fe19748b7abf22df32acaa93833fad08", // v2
530        )
531        .with(("mdbook-version", "0.4.37"))
532    }
533
534    fn build_docs() -> Step<Run> {
535        named::bash(indoc::indoc! {r#"
536            mkdir -p target/deploy
537            mdbook build ./docs --dest-dir=../target/deploy/docs/
538        "#})
539    }
540
541    named::job(
542        release_job(&[])
543            .runs_on(runners::LINUX_LARGE)
544            .add_step(steps::checkout_repo())
545            .add_step(steps::setup_cargo_config(Platform::Linux))
546            // todo(ci): un-inline build_docs/action.yml here
547            .add_step(steps::cache_rust_dependencies_namespace())
548            .add_step(
549                lychee_link_check("./docs/src/**/*"), // check markdown links
550            )
551            .map(steps::install_linux_dependencies)
552            .add_step(steps::script("./script/generate-action-metadata"))
553            .add_step(install_mdbook())
554            .add_step(build_docs())
555            .add_step(
556                lychee_link_check("target/deploy/docs"), // check links in generated html
557            ),
558    )
559}
560
561pub(crate) fn check_scripts() -> NamedJob {
562    fn download_actionlint() -> Step<Run> {
563        named::bash(
564            "bash <(curl https://raw.githubusercontent.com/rhysd/actionlint/main/scripts/download-actionlint.bash)",
565        )
566    }
567
568    fn run_actionlint() -> Step<Run> {
569        named::bash(indoc::indoc! {r#"
570            ${{ steps.get_actionlint.outputs.executable }} -color
571        "#})
572    }
573
574    fn run_shellcheck() -> Step<Run> {
575        named::bash("./script/shellcheck-scripts error")
576    }
577
578    fn check_xtask_workflows() -> Step<Run> {
579        named::bash(indoc::indoc! {r#"
580            cargo xtask workflows
581            if ! git diff --exit-code .github; then
582              echo "Error: .github directory has uncommitted changes after running 'cargo xtask workflows'"
583              echo "Please run 'cargo xtask workflows' locally and commit the changes"
584              exit 1
585            fi
586        "#})
587    }
588
589    named::job(
590        release_job(&[])
591            .runs_on(runners::LINUX_SMALL)
592            .add_step(steps::checkout_repo())
593            .add_step(run_shellcheck())
594            .add_step(download_actionlint().id("get_actionlint"))
595            .add_step(run_actionlint())
596            .add_step(check_xtask_workflows()),
597    )
598}