run_tests.rs

  1use gh_workflow::{
  2    Concurrency, Event, Expression, Job, PullRequest, Push, Run, Step, Use, Workflow,
  3};
  4use indexmap::IndexMap;
  5
  6use crate::tasks::workflows::{
  7    nix_build::build_nix,
  8    runners::Arch,
  9    steps::{BASH_SHELL, CommonJobConditions, repository_owner_guard_expression},
 10    vars::{self, PathCondition},
 11};
 12
 13use super::{
 14    runners::{self, Platform},
 15    steps::{self, FluentBuilder, NamedJob, named, release_job},
 16};
 17
 18pub(crate) fn run_tests() -> Workflow {
 19    // Specify anything which should potentially skip full test suite in this regex:
 20    // - docs/
 21    // - script/update_top_ranking_issues/
 22    // - .github/ISSUE_TEMPLATE/
 23    // - .github/workflows/  (except .github/workflows/ci.yml)
 24    let should_run_tests = PathCondition::inverted(
 25        "run_tests",
 26        r"^(docs/|script/update_top_ranking_issues/|\.github/(ISSUE_TEMPLATE|workflows/(?!run_tests)))",
 27    );
 28    let should_check_docs = PathCondition::new("run_docs", r"^(docs/|crates/.*\.rs)");
 29    let should_check_scripts = PathCondition::new(
 30        "run_action_checks",
 31        r"^\.github/(workflows/|actions/|actionlint.yml)|tooling/xtask|script/",
 32    );
 33    let should_check_licences =
 34        PathCondition::new("run_licenses", r"^(Cargo.lock|script/.*licenses)");
 35    let should_build_nix = PathCondition::new(
 36        "run_nix",
 37        r"^(nix/|flake\.|Cargo\.|rust-toolchain.toml|\.cargo/config.toml)",
 38    );
 39
 40    let orchestrate = orchestrate(&[
 41        &should_check_scripts,
 42        &should_check_docs,
 43        &should_check_licences,
 44        &should_build_nix,
 45        &should_run_tests,
 46    ]);
 47
 48    let mut jobs = vec![
 49        orchestrate,
 50        check_style(),
 51        should_run_tests.guard(run_platform_tests(Platform::Windows)),
 52        should_run_tests.guard(run_platform_tests(Platform::Linux)),
 53        should_run_tests.guard(run_platform_tests(Platform::Mac)),
 54        should_run_tests.guard(doctests()),
 55        should_run_tests.guard(check_workspace_binaries()),
 56        should_run_tests.guard(check_dependencies()), // could be more specific here?
 57        should_check_docs.guard(check_docs()),
 58        should_check_licences.guard(check_licenses()),
 59        should_check_scripts.guard(check_scripts()),
 60        should_build_nix.guard(build_nix(
 61            Platform::Linux,
 62            Arch::X86_64,
 63            "debug",
 64            // *don't* cache the built output
 65            Some("-zed-editor-[0-9.]*-nightly"),
 66            &[],
 67        )),
 68        should_build_nix.guard(build_nix(
 69            Platform::Mac,
 70            Arch::AARCH64,
 71            "debug",
 72            // *don't* cache the built output
 73            Some("-zed-editor-[0-9.]*-nightly"),
 74            &[],
 75        )),
 76    ];
 77    let tests_pass = tests_pass(&jobs);
 78
 79    jobs.push(should_run_tests.guard(check_postgres_and_protobuf_migrations())); // could be more specific here?
 80
 81    named::workflow()
 82        .add_event(
 83            Event::default()
 84                .push(
 85                    Push::default()
 86                        .add_branch("main")
 87                        .add_branch("v[0-9]+.[0-9]+.x"),
 88                )
 89                .pull_request(PullRequest::default().add_branch("**")),
 90        )
 91        .concurrency(
 92            Concurrency::default()
 93                .group(concat!(
 94                    "${{ github.workflow }}-${{ github.ref_name }}-",
 95                    "${{ github.ref_name == 'main' && github.sha || 'anysha' }}"
 96                ))
 97                .cancel_in_progress(true),
 98        )
 99        .add_env(("CARGO_TERM_COLOR", "always"))
100        .add_env(("RUST_BACKTRACE", 1))
101        .add_env(("CARGO_INCREMENTAL", 0))
102        .map(|mut workflow| {
103            for job in jobs {
104                workflow = workflow.add_job(job.name, job.job)
105            }
106            workflow
107        })
108        .add_job(tests_pass.name, tests_pass.job)
109}
110
111// Generates a bash script that checks changed files against regex patterns
112// and sets GitHub output variables accordingly
113pub fn orchestrate(rules: &[&PathCondition]) -> NamedJob {
114    let name = "orchestrate".to_owned();
115    let step_name = "filter".to_owned();
116    let mut script = String::new();
117
118    script.push_str(indoc::indoc! {r#"
119        if [ -z "$GITHUB_BASE_REF" ]; then
120          echo "Not in a PR context (i.e., push to main/stable/preview)"
121          COMPARE_REV="$(git rev-parse HEAD~1)"
122        else
123          echo "In a PR context comparing to pull_request.base.ref"
124          git fetch origin "$GITHUB_BASE_REF" --depth=350
125          COMPARE_REV="$(git merge-base "origin/${GITHUB_BASE_REF}" HEAD)"
126        fi
127        CHANGED_FILES="$(git diff --name-only "$COMPARE_REV" ${{ github.sha }})"
128
129        check_pattern() {
130          local output_name="$1"
131          local pattern="$2"
132          local grep_arg="$3"
133
134          echo "$CHANGED_FILES" | grep "$grep_arg" "$pattern" && \
135            echo "${output_name}=true" >> "$GITHUB_OUTPUT" || \
136            echo "${output_name}=false" >> "$GITHUB_OUTPUT"
137        }
138
139    "#});
140
141    let mut outputs = IndexMap::new();
142
143    for rule in rules {
144        assert!(
145            rule.set_by_step
146                .borrow_mut()
147                .replace(name.clone())
148                .is_none()
149        );
150        assert!(
151            outputs
152                .insert(
153                    rule.name.to_owned(),
154                    format!("${{{{ steps.{}.outputs.{} }}}}", step_name, rule.name)
155                )
156                .is_none()
157        );
158
159        let grep_arg = if rule.invert { "-qvP" } else { "-qP" };
160        script.push_str(&format!(
161            "check_pattern \"{}\" '{}' {}\n",
162            rule.name, rule.pattern, grep_arg
163        ));
164    }
165
166    let job = Job::default()
167        .runs_on(runners::LINUX_SMALL)
168        .with_repository_owner_guard()
169        .outputs(outputs)
170        .add_step(steps::checkout_repo().add_with((
171            "fetch-depth",
172            "${{ github.ref == 'refs/heads/main' && 2 || 350 }}",
173        )))
174        .add_step(
175            Step::new(step_name.clone())
176                .run(script)
177                .id(step_name)
178                .shell(BASH_SHELL),
179        );
180
181    NamedJob { name, job }
182}
183
184pub fn tests_pass(jobs: &[NamedJob]) -> NamedJob {
185    let mut script = String::from(indoc::indoc! {r#"
186        set +x
187        EXIT_CODE=0
188
189        check_result() {
190          echo "* $1: $2"
191          if [[ "$2" != "skipped" && "$2" != "success" ]]; then EXIT_CODE=1; fi
192        }
193
194    "#});
195
196    script.push_str(
197        &jobs
198            .iter()
199            .map(|job| {
200                format!(
201                    "check_result \"{}\" \"${{{{ needs.{}.result }}}}\"",
202                    job.name, job.name
203                )
204            })
205            .collect::<Vec<_>>()
206            .join("\n"),
207    );
208
209    script.push_str("\n\nexit $EXIT_CODE\n");
210
211    let job = Job::default()
212        .runs_on(runners::LINUX_SMALL)
213        .needs(
214            jobs.iter()
215                .map(|j| j.name.to_string())
216                .collect::<Vec<String>>(),
217        )
218        .cond(repository_owner_guard_expression(true))
219        .add_step(named::bash(&script));
220
221    named::job(job)
222}
223
224fn check_style() -> NamedJob {
225    fn check_for_typos() -> Step<Use> {
226        named::uses(
227            "crate-ci",
228            "typos",
229            "2d0ce569feab1f8752f1dde43cc2f2aa53236e06",
230        ) // v1.40.0
231        .with(("config", "./typos.toml"))
232    }
233    named::job(
234        release_job(&[])
235            .runs_on(runners::LINUX_MEDIUM)
236            .add_step(steps::checkout_repo())
237            .add_step(steps::cache_rust_dependencies_namespace())
238            .add_step(steps::setup_pnpm())
239            .add_step(steps::script("./script/prettier"))
240            .add_step(steps::cargo_fmt())
241            .add_step(steps::trigger_autofix(false))
242            .add_step(steps::script("./script/check-todos"))
243            .add_step(steps::script("./script/check-keymaps"))
244            .add_step(check_for_typos()),
245    )
246}
247
248fn check_dependencies() -> NamedJob {
249    fn install_cargo_machete() -> Step<Use> {
250        named::uses(
251            "clechasseur",
252            "rs-cargo",
253            "8435b10f6e71c2e3d4d3b7573003a8ce4bfc6386", // v2
254        )
255        .add_with(("command", "install"))
256        .add_with(("args", "cargo-machete@0.7.0"))
257    }
258
259    fn run_cargo_machete() -> Step<Use> {
260        named::uses(
261            "clechasseur",
262            "rs-cargo",
263            "8435b10f6e71c2e3d4d3b7573003a8ce4bfc6386", // v2
264        )
265        .add_with(("command", "machete"))
266    }
267
268    fn check_cargo_lock() -> Step<Run> {
269        named::bash("cargo update --locked --workspace")
270    }
271
272    fn check_vulnerable_dependencies() -> Step<Use> {
273        named::uses(
274            "actions",
275            "dependency-review-action",
276            "67d4f4bd7a9b17a0db54d2a7519187c65e339de8", // v4
277        )
278        .if_condition(Expression::new("github.event_name == 'pull_request'"))
279        .with(("license-check", false))
280    }
281
282    named::job(
283        release_job(&[])
284            .runs_on(runners::LINUX_SMALL)
285            .add_step(steps::checkout_repo())
286            .add_step(steps::cache_rust_dependencies_namespace())
287            .add_step(install_cargo_machete())
288            .add_step(run_cargo_machete())
289            .add_step(check_cargo_lock())
290            .add_step(check_vulnerable_dependencies()),
291    )
292}
293
294fn check_workspace_binaries() -> NamedJob {
295    named::job(
296        release_job(&[])
297            .runs_on(runners::LINUX_LARGE)
298            .add_step(steps::checkout_repo())
299            .add_step(steps::setup_cargo_config(Platform::Linux))
300            .add_step(steps::cache_rust_dependencies_namespace())
301            .map(steps::install_linux_dependencies)
302            .add_step(steps::script("cargo build -p collab"))
303            .add_step(steps::script("cargo build --workspace --bins --examples"))
304            .add_step(steps::cleanup_cargo_config(Platform::Linux)),
305    )
306}
307
308pub(crate) fn run_platform_tests(platform: Platform) -> NamedJob {
309    let runner = match platform {
310        Platform::Windows => runners::WINDOWS_DEFAULT,
311        Platform::Linux => runners::LINUX_DEFAULT,
312        Platform::Mac => runners::MAC_DEFAULT,
313    };
314    NamedJob {
315        name: format!("run_tests_{platform}"),
316        job: release_job(&[])
317            .runs_on(runner)
318            .add_step(steps::checkout_repo())
319            .add_step(steps::setup_cargo_config(platform))
320            .when(platform == Platform::Linux, |this| {
321                this.add_step(steps::cache_rust_dependencies_namespace())
322            })
323            .when(
324                platform == Platform::Linux,
325                steps::install_linux_dependencies,
326            )
327            .add_step(steps::setup_node())
328            .add_step(steps::clippy(platform))
329            .when(platform == Platform::Linux, |job| {
330                job.add_step(steps::trigger_autofix(true))
331                    .add_step(steps::cargo_install_nextest())
332            })
333            .add_step(steps::clear_target_dir_if_large(platform))
334            .add_step(steps::cargo_nextest(platform))
335            .add_step(steps::cleanup_cargo_config(platform)),
336    }
337}
338
339pub(crate) fn check_postgres_and_protobuf_migrations() -> NamedJob {
340    fn remove_untracked_files() -> Step<Run> {
341        named::bash("git clean -df")
342    }
343
344    fn ensure_fresh_merge() -> Step<Run> {
345        named::bash(indoc::indoc! {r#"
346            if [ -z "$GITHUB_BASE_REF" ];
347            then
348              echo "BUF_BASE_BRANCH=$(git merge-base origin/main HEAD)" >> "$GITHUB_ENV"
349            else
350              git checkout -B temp
351              git merge -q "origin/$GITHUB_BASE_REF" -m "merge main into temp"
352              echo "BUF_BASE_BRANCH=$GITHUB_BASE_REF" >> "$GITHUB_ENV"
353            fi
354        "#})
355    }
356
357    fn bufbuild_setup_action() -> Step<Use> {
358        named::uses("bufbuild", "buf-setup-action", "v1")
359            .add_with(("version", "v1.29.0"))
360            .add_with(("github_token", vars::GITHUB_TOKEN))
361    }
362
363    fn bufbuild_breaking_action() -> Step<Use> {
364        named::uses("bufbuild", "buf-breaking-action", "v1").add_with(("input", "crates/proto/proto/"))
365            .add_with(("against", "https://github.com/${GITHUB_REPOSITORY}.git#branch=${BUF_BASE_BRANCH},subdir=crates/proto/proto/"))
366    }
367
368    named::job(
369        release_job(&[])
370            .runs_on(runners::LINUX_DEFAULT)
371            .add_env(("GIT_AUTHOR_NAME", "Protobuf Action"))
372            .add_env(("GIT_AUTHOR_EMAIL", "ci@zed.dev"))
373            .add_env(("GIT_COMMITTER_NAME", "Protobuf Action"))
374            .add_env(("GIT_COMMITTER_EMAIL", "ci@zed.dev"))
375            .add_step(steps::checkout_repo().with(("fetch-depth", 0))) // fetch full history
376            .add_step(remove_untracked_files())
377            .add_step(ensure_fresh_merge())
378            .add_step(bufbuild_setup_action())
379            .add_step(bufbuild_breaking_action()),
380    )
381}
382
383fn doctests() -> NamedJob {
384    fn run_doctests() -> Step<Run> {
385        named::bash(indoc::indoc! {r#"
386            cargo test --workspace --doc --no-fail-fast
387        "#})
388        .id("run_doctests")
389    }
390
391    named::job(
392        release_job(&[])
393            .runs_on(runners::LINUX_DEFAULT)
394            .add_step(steps::checkout_repo())
395            .add_step(steps::cache_rust_dependencies_namespace())
396            .map(steps::install_linux_dependencies)
397            .add_step(steps::setup_cargo_config(Platform::Linux))
398            .add_step(run_doctests())
399            .add_step(steps::cleanup_cargo_config(Platform::Linux)),
400    )
401}
402
403fn check_licenses() -> NamedJob {
404    named::job(
405        Job::default()
406            .runs_on(runners::LINUX_SMALL)
407            .add_step(steps::checkout_repo())
408            .add_step(steps::cache_rust_dependencies_namespace())
409            .add_step(steps::script("./script/check-licenses"))
410            .add_step(steps::script("./script/generate-licenses")),
411    )
412}
413
414fn check_docs() -> NamedJob {
415    fn lychee_link_check(dir: &str) -> Step<Use> {
416        named::uses(
417            "lycheeverse",
418            "lychee-action",
419            "82202e5e9c2f4ef1a55a3d02563e1cb6041e5332",
420        ) // v2.4.1
421        .add_with(("args", format!("--no-progress --exclude '^http' '{dir}'")))
422        .add_with(("fail", true))
423        .add_with(("jobSummary", false))
424    }
425
426    fn install_mdbook() -> Step<Use> {
427        named::uses(
428            "peaceiris",
429            "actions-mdbook",
430            "ee69d230fe19748b7abf22df32acaa93833fad08", // v2
431        )
432        .with(("mdbook-version", "0.4.37"))
433    }
434
435    fn build_docs() -> Step<Run> {
436        named::bash(indoc::indoc! {r#"
437            mkdir -p target/deploy
438            mdbook build ./docs --dest-dir=../target/deploy/docs/
439        "#})
440    }
441
442    named::job(
443        release_job(&[])
444            .runs_on(runners::LINUX_LARGE)
445            .add_step(steps::checkout_repo())
446            .add_step(steps::setup_cargo_config(Platform::Linux))
447            // todo(ci): un-inline build_docs/action.yml here
448            .add_step(steps::cache_rust_dependencies_namespace())
449            .add_step(
450                lychee_link_check("./docs/src/**/*"), // check markdown links
451            )
452            .map(steps::install_linux_dependencies)
453            .add_step(install_mdbook())
454            .add_step(build_docs())
455            .add_step(
456                lychee_link_check("target/deploy/docs"), // check links in generated html
457            ),
458    )
459}
460
461pub(crate) fn check_scripts() -> NamedJob {
462    fn download_actionlint() -> Step<Run> {
463        named::bash(
464            "bash <(curl https://raw.githubusercontent.com/rhysd/actionlint/main/scripts/download-actionlint.bash)",
465        )
466    }
467
468    fn run_actionlint() -> Step<Run> {
469        named::bash(indoc::indoc! {r#"
470            ${{ steps.get_actionlint.outputs.executable }} -color
471        "#})
472    }
473
474    fn run_shellcheck() -> Step<Run> {
475        named::bash("./script/shellcheck-scripts error")
476    }
477
478    fn check_xtask_workflows() -> Step<Run> {
479        named::bash(indoc::indoc! {r#"
480            cargo xtask workflows
481            if ! git diff --exit-code .github; then
482              echo "Error: .github directory has uncommitted changes after running 'cargo xtask workflows'"
483              echo "Please run 'cargo xtask workflows' locally and commit the changes"
484              exit 1
485            fi
486        "#})
487    }
488
489    named::job(
490        release_job(&[])
491            .runs_on(runners::LINUX_SMALL)
492            .add_step(steps::checkout_repo())
493            .add_step(run_shellcheck())
494            .add_step(download_actionlint().id("get_actionlint"))
495            .add_step(run_actionlint())
496            .add_step(check_xtask_workflows()),
497    )
498}