run_tests.rs

  1use gh_workflow::{
  2    Concurrency, Container, Event, Expression, Job, Port, PullRequest, Push, Run, Step, Use,
  3    Workflow,
  4};
  5use indexmap::IndexMap;
  6
  7use crate::tasks::workflows::{
  8    nix_build::build_nix,
  9    runners::Arch,
 10    steps::{BASH_SHELL, CommonJobConditions, repository_owner_guard_expression},
 11    vars::{self, PathCondition},
 12};
 13
 14use super::{
 15    runners::{self, Platform},
 16    steps::{self, FluentBuilder, NamedJob, named, release_job},
 17};
 18
 19pub(crate) fn run_tests() -> Workflow {
 20    // Specify anything which should potentially skip full test suite in this regex:
 21    // - docs/
 22    // - script/update_top_ranking_issues/
 23    // - .github/ISSUE_TEMPLATE/
 24    // - .github/workflows/  (except .github/workflows/ci.yml)
 25    let should_run_tests = PathCondition::inverted(
 26        "run_tests",
 27        r"^(docs/|script/update_top_ranking_issues/|\.github/(ISSUE_TEMPLATE|workflows/(?!run_tests)))",
 28    );
 29    let should_check_docs = PathCondition::new("run_docs", r"^(docs/|crates/.*\.rs)");
 30    let should_check_scripts = PathCondition::new(
 31        "run_action_checks",
 32        r"^\.github/(workflows/|actions/|actionlint.yml)|tooling/xtask|script/",
 33    );
 34    let should_check_licences =
 35        PathCondition::new("run_licenses", r"^(Cargo.lock|script/.*licenses)");
 36    let should_build_nix = PathCondition::new(
 37        "run_nix",
 38        r"^(nix/|flake\.|Cargo\.|rust-toolchain.toml|\.cargo/config.toml)",
 39    );
 40
 41    let orchestrate = orchestrate(&[
 42        &should_check_scripts,
 43        &should_check_docs,
 44        &should_check_licences,
 45        &should_build_nix,
 46        &should_run_tests,
 47    ]);
 48
 49    let mut jobs = vec![
 50        orchestrate,
 51        check_style(),
 52        should_run_tests.guard(clippy(Platform::Windows)),
 53        should_run_tests.guard(clippy(Platform::Linux)),
 54        should_run_tests.guard(clippy(Platform::Mac)),
 55        should_run_tests.guard(run_platform_tests(Platform::Windows)),
 56        should_run_tests.guard(run_platform_tests(Platform::Linux)),
 57        should_run_tests.guard(run_platform_tests(Platform::Mac)),
 58        should_run_tests.guard(doctests()),
 59        should_run_tests.guard(check_workspace_binaries()),
 60        should_run_tests.guard(check_dependencies()), // could be more specific here?
 61        should_check_docs.guard(check_docs()),
 62        should_check_licences.guard(check_licenses()),
 63        should_check_scripts.guard(check_scripts()),
 64        should_build_nix.guard(build_nix(
 65            Platform::Linux,
 66            Arch::X86_64,
 67            "debug",
 68            // *don't* cache the built output
 69            Some("-zed-editor-[0-9.]*-nightly"),
 70            &[],
 71        )),
 72        should_build_nix.guard(build_nix(
 73            Platform::Mac,
 74            Arch::AARCH64,
 75            "debug",
 76            // *don't* cache the built output
 77            Some("-zed-editor-[0-9.]*-nightly"),
 78            &[],
 79        )),
 80    ];
 81    let tests_pass = tests_pass(&jobs);
 82
 83    jobs.push(should_run_tests.guard(check_postgres_and_protobuf_migrations())); // could be more specific here?
 84
 85    named::workflow()
 86        .add_event(
 87            Event::default()
 88                .push(
 89                    Push::default()
 90                        .add_branch("main")
 91                        .add_branch("v[0-9]+.[0-9]+.x"),
 92                )
 93                .pull_request(PullRequest::default().add_branch("**")),
 94        )
 95        .concurrency(
 96            Concurrency::default()
 97                .group(concat!(
 98                    "${{ github.workflow }}-${{ github.ref_name }}-",
 99                    "${{ github.ref_name == 'main' && github.sha || 'anysha' }}"
100                ))
101                .cancel_in_progress(true),
102        )
103        .add_env(("CARGO_TERM_COLOR", "always"))
104        .add_env(("RUST_BACKTRACE", 1))
105        .add_env(("CARGO_INCREMENTAL", 0))
106        .map(|mut workflow| {
107            for job in jobs {
108                workflow = workflow.add_job(job.name, job.job)
109            }
110            workflow
111        })
112        .add_job(tests_pass.name, tests_pass.job)
113}
114
115// Generates a bash script that checks changed files against regex patterns
116// and sets GitHub output variables accordingly
117pub fn orchestrate(rules: &[&PathCondition]) -> NamedJob {
118    let name = "orchestrate".to_owned();
119    let step_name = "filter".to_owned();
120    let mut script = String::new();
121
122    script.push_str(indoc::indoc! {r#"
123        if [ -z "$GITHUB_BASE_REF" ]; then
124          echo "Not in a PR context (i.e., push to main/stable/preview)"
125          COMPARE_REV="$(git rev-parse HEAD~1)"
126        else
127          echo "In a PR context comparing to pull_request.base.ref"
128          git fetch origin "$GITHUB_BASE_REF" --depth=350
129          COMPARE_REV="$(git merge-base "origin/${GITHUB_BASE_REF}" HEAD)"
130        fi
131        CHANGED_FILES="$(git diff --name-only "$COMPARE_REV" ${{ github.sha }})"
132
133        check_pattern() {
134          local output_name="$1"
135          local pattern="$2"
136          local grep_arg="$3"
137
138          echo "$CHANGED_FILES" | grep "$grep_arg" "$pattern" && \
139            echo "${output_name}=true" >> "$GITHUB_OUTPUT" || \
140            echo "${output_name}=false" >> "$GITHUB_OUTPUT"
141        }
142
143    "#});
144
145    let mut outputs = IndexMap::new();
146
147    for rule in rules {
148        assert!(
149            rule.set_by_step
150                .borrow_mut()
151                .replace(name.clone())
152                .is_none()
153        );
154        assert!(
155            outputs
156                .insert(
157                    rule.name.to_owned(),
158                    format!("${{{{ steps.{}.outputs.{} }}}}", step_name, rule.name)
159                )
160                .is_none()
161        );
162
163        let grep_arg = if rule.invert { "-qvP" } else { "-qP" };
164        script.push_str(&format!(
165            "check_pattern \"{}\" '{}' {}\n",
166            rule.name, rule.pattern, grep_arg
167        ));
168    }
169
170    let job = Job::default()
171        .runs_on(runners::LINUX_SMALL)
172        .with_repository_owner_guard()
173        .outputs(outputs)
174        .add_step(steps::checkout_repo().add_with((
175            "fetch-depth",
176            "${{ github.ref == 'refs/heads/main' && 2 || 350 }}",
177        )))
178        .add_step(
179            Step::new(step_name.clone())
180                .run(script)
181                .id(step_name)
182                .shell(BASH_SHELL),
183        );
184
185    NamedJob { name, job }
186}
187
188pub fn tests_pass(jobs: &[NamedJob]) -> NamedJob {
189    let mut script = String::from(indoc::indoc! {r#"
190        set +x
191        EXIT_CODE=0
192
193        check_result() {
194          echo "* $1: $2"
195          if [[ "$2" != "skipped" && "$2" != "success" ]]; then EXIT_CODE=1; fi
196        }
197
198    "#});
199
200    script.push_str(
201        &jobs
202            .iter()
203            .map(|job| {
204                format!(
205                    "check_result \"{}\" \"${{{{ needs.{}.result }}}}\"",
206                    job.name, job.name
207                )
208            })
209            .collect::<Vec<_>>()
210            .join("\n"),
211    );
212
213    script.push_str("\n\nexit $EXIT_CODE\n");
214
215    let job = Job::default()
216        .runs_on(runners::LINUX_SMALL)
217        .needs(
218            jobs.iter()
219                .map(|j| j.name.to_string())
220                .collect::<Vec<String>>(),
221        )
222        .cond(repository_owner_guard_expression(true))
223        .add_step(named::bash(&script));
224
225    named::job(job)
226}
227
228fn check_style() -> NamedJob {
229    fn check_for_typos() -> Step<Use> {
230        named::uses(
231            "crate-ci",
232            "typos",
233            "2d0ce569feab1f8752f1dde43cc2f2aa53236e06",
234        ) // v1.40.0
235        .with(("config", "./typos.toml"))
236    }
237    named::job(
238        release_job(&[])
239            .runs_on(runners::LINUX_MEDIUM)
240            .add_step(steps::checkout_repo())
241            .add_step(steps::cache_rust_dependencies_namespace())
242            .add_step(steps::setup_pnpm())
243            .add_step(steps::prettier())
244            .add_step(steps::cargo_fmt())
245            .add_step(steps::script("./script/check-todos"))
246            .add_step(steps::script("./script/check-keymaps"))
247            .add_step(check_for_typos()),
248    )
249}
250
251fn check_dependencies() -> NamedJob {
252    fn install_cargo_machete() -> Step<Use> {
253        named::uses(
254            "clechasseur",
255            "rs-cargo",
256            "8435b10f6e71c2e3d4d3b7573003a8ce4bfc6386", // v2
257        )
258        .add_with(("command", "install"))
259        .add_with(("args", "cargo-machete@0.7.0"))
260    }
261
262    fn run_cargo_machete() -> Step<Use> {
263        named::uses(
264            "clechasseur",
265            "rs-cargo",
266            "8435b10f6e71c2e3d4d3b7573003a8ce4bfc6386", // v2
267        )
268        .add_with(("command", "machete"))
269    }
270
271    fn check_cargo_lock() -> Step<Run> {
272        named::bash("cargo update --locked --workspace")
273    }
274
275    fn check_vulnerable_dependencies() -> Step<Use> {
276        named::uses(
277            "actions",
278            "dependency-review-action",
279            "67d4f4bd7a9b17a0db54d2a7519187c65e339de8", // v4
280        )
281        .if_condition(Expression::new("github.event_name == 'pull_request'"))
282        .with(("license-check", false))
283    }
284
285    named::job(
286        release_job(&[])
287            .runs_on(runners::LINUX_SMALL)
288            .add_step(steps::checkout_repo())
289            .add_step(steps::cache_rust_dependencies_namespace())
290            .add_step(install_cargo_machete())
291            .add_step(run_cargo_machete())
292            .add_step(check_cargo_lock())
293            .add_step(check_vulnerable_dependencies()),
294    )
295}
296
297fn check_workspace_binaries() -> NamedJob {
298    named::job(
299        release_job(&[])
300            .runs_on(runners::LINUX_LARGE)
301            .add_step(steps::checkout_repo())
302            .add_step(steps::setup_cargo_config(Platform::Linux))
303            .add_step(steps::cache_rust_dependencies_namespace())
304            .map(steps::install_linux_dependencies)
305            .add_step(steps::script("cargo build -p collab"))
306            .add_step(steps::script("cargo build --workspace --bins --examples"))
307            .add_step(steps::cleanup_cargo_config(Platform::Linux)),
308    )
309}
310
311pub(crate) fn clippy(platform: Platform) -> NamedJob {
312    let runner = match platform {
313        Platform::Windows => runners::WINDOWS_DEFAULT,
314        Platform::Linux => runners::LINUX_DEFAULT,
315        Platform::Mac => runners::MAC_DEFAULT,
316    };
317    NamedJob {
318        name: format!("clippy_{platform}"),
319        job: release_job(&[])
320            .runs_on(runner)
321            .add_step(steps::checkout_repo())
322            .add_step(steps::setup_cargo_config(platform))
323            .when(platform == Platform::Linux, |this| {
324                this.add_step(steps::cache_rust_dependencies_namespace())
325            })
326            .when(
327                platform == Platform::Linux,
328                steps::install_linux_dependencies,
329            )
330            .add_step(steps::clippy(platform)),
331    }
332}
333
334pub(crate) fn run_platform_tests(platform: Platform) -> NamedJob {
335    let runner = match platform {
336        Platform::Windows => runners::WINDOWS_DEFAULT,
337        Platform::Linux => runners::LINUX_DEFAULT,
338        Platform::Mac => runners::MAC_DEFAULT,
339    };
340    NamedJob {
341        name: format!("run_tests_{platform}"),
342        job: release_job(&[])
343            .runs_on(runner)
344            .when(platform == Platform::Linux, |job| {
345                job.add_service(
346                    "postgres",
347                    Container::new("postgres:15")
348                        .add_env(("POSTGRES_HOST_AUTH_METHOD", "trust"))
349                        .ports(vec![Port::Name("5432:5432".into())])
350                        .options(
351                            "--health-cmd pg_isready \
352                             --health-interval 500ms \
353                             --health-timeout 5s \
354                             --health-retries 10",
355                        ),
356                )
357            })
358            .add_step(steps::checkout_repo())
359            .add_step(steps::setup_cargo_config(platform))
360            .when(
361                platform == Platform::Linux || platform == Platform::Mac,
362                |this| this.add_step(steps::cache_rust_dependencies_namespace()),
363            )
364            .when(
365                platform == Platform::Linux,
366                steps::install_linux_dependencies,
367            )
368            .add_step(steps::setup_node())
369            .when(
370                platform == Platform::Linux || platform == Platform::Mac,
371                |job| job.add_step(steps::cargo_install_nextest()),
372            )
373            .add_step(steps::clear_target_dir_if_large(platform))
374            .add_step(steps::cargo_nextest(platform))
375            .add_step(steps::cleanup_cargo_config(platform)),
376    }
377}
378
379pub(crate) fn check_postgres_and_protobuf_migrations() -> NamedJob {
380    fn remove_untracked_files() -> Step<Run> {
381        named::bash("git clean -df")
382    }
383
384    fn ensure_fresh_merge() -> Step<Run> {
385        named::bash(indoc::indoc! {r#"
386            if [ -z "$GITHUB_BASE_REF" ];
387            then
388              echo "BUF_BASE_BRANCH=$(git merge-base origin/main HEAD)" >> "$GITHUB_ENV"
389            else
390              git checkout -B temp
391              git merge -q "origin/$GITHUB_BASE_REF" -m "merge main into temp"
392              echo "BUF_BASE_BRANCH=$GITHUB_BASE_REF" >> "$GITHUB_ENV"
393            fi
394        "#})
395    }
396
397    fn bufbuild_setup_action() -> Step<Use> {
398        named::uses("bufbuild", "buf-setup-action", "v1")
399            .add_with(("version", "v1.29.0"))
400            .add_with(("github_token", vars::GITHUB_TOKEN))
401    }
402
403    fn bufbuild_breaking_action() -> Step<Use> {
404        named::uses("bufbuild", "buf-breaking-action", "v1").add_with(("input", "crates/proto/proto/"))
405            .add_with(("against", "https://github.com/${GITHUB_REPOSITORY}.git#branch=${BUF_BASE_BRANCH},subdir=crates/proto/proto/"))
406    }
407
408    named::job(
409        release_job(&[])
410            .runs_on(runners::LINUX_DEFAULT)
411            .add_env(("GIT_AUTHOR_NAME", "Protobuf Action"))
412            .add_env(("GIT_AUTHOR_EMAIL", "ci@zed.dev"))
413            .add_env(("GIT_COMMITTER_NAME", "Protobuf Action"))
414            .add_env(("GIT_COMMITTER_EMAIL", "ci@zed.dev"))
415            .add_step(steps::checkout_repo().with(("fetch-depth", 0))) // fetch full history
416            .add_step(remove_untracked_files())
417            .add_step(ensure_fresh_merge())
418            .add_step(bufbuild_setup_action())
419            .add_step(bufbuild_breaking_action()),
420    )
421}
422
423fn doctests() -> NamedJob {
424    fn run_doctests() -> Step<Run> {
425        named::bash(indoc::indoc! {r#"
426            cargo test --workspace --doc --no-fail-fast
427        "#})
428        .id("run_doctests")
429    }
430
431    named::job(
432        release_job(&[])
433            .runs_on(runners::LINUX_DEFAULT)
434            .add_step(steps::checkout_repo())
435            .add_step(steps::cache_rust_dependencies_namespace())
436            .map(steps::install_linux_dependencies)
437            .add_step(steps::setup_cargo_config(Platform::Linux))
438            .add_step(run_doctests())
439            .add_step(steps::cleanup_cargo_config(Platform::Linux)),
440    )
441}
442
443fn check_licenses() -> NamedJob {
444    named::job(
445        Job::default()
446            .runs_on(runners::LINUX_SMALL)
447            .add_step(steps::checkout_repo())
448            .add_step(steps::cache_rust_dependencies_namespace())
449            .add_step(steps::script("./script/check-licenses"))
450            .add_step(steps::script("./script/generate-licenses")),
451    )
452}
453
454fn check_docs() -> NamedJob {
455    fn lychee_link_check(dir: &str) -> Step<Use> {
456        named::uses(
457            "lycheeverse",
458            "lychee-action",
459            "82202e5e9c2f4ef1a55a3d02563e1cb6041e5332",
460        ) // v2.4.1
461        .add_with(("args", format!("--no-progress --exclude '^http' '{dir}'")))
462        .add_with(("fail", true))
463        .add_with(("jobSummary", false))
464    }
465
466    fn install_mdbook() -> Step<Use> {
467        named::uses(
468            "peaceiris",
469            "actions-mdbook",
470            "ee69d230fe19748b7abf22df32acaa93833fad08", // v2
471        )
472        .with(("mdbook-version", "0.4.37"))
473    }
474
475    fn build_docs() -> Step<Run> {
476        named::bash(indoc::indoc! {r#"
477            mkdir -p target/deploy
478            mdbook build ./docs --dest-dir=../target/deploy/docs/
479        "#})
480    }
481
482    named::job(
483        release_job(&[])
484            .runs_on(runners::LINUX_LARGE)
485            .add_step(steps::checkout_repo())
486            .add_step(steps::setup_cargo_config(Platform::Linux))
487            // todo(ci): un-inline build_docs/action.yml here
488            .add_step(steps::cache_rust_dependencies_namespace())
489            .add_step(
490                lychee_link_check("./docs/src/**/*"), // check markdown links
491            )
492            .map(steps::install_linux_dependencies)
493            .add_step(steps::script("./script/generate-action-metadata"))
494            .add_step(install_mdbook())
495            .add_step(build_docs())
496            .add_step(
497                lychee_link_check("target/deploy/docs"), // check links in generated html
498            ),
499    )
500}
501
502pub(crate) fn check_scripts() -> NamedJob {
503    fn download_actionlint() -> Step<Run> {
504        named::bash(
505            "bash <(curl https://raw.githubusercontent.com/rhysd/actionlint/main/scripts/download-actionlint.bash)",
506        )
507    }
508
509    fn run_actionlint() -> Step<Run> {
510        named::bash(indoc::indoc! {r#"
511            ${{ steps.get_actionlint.outputs.executable }} -color
512        "#})
513    }
514
515    fn run_shellcheck() -> Step<Run> {
516        named::bash("./script/shellcheck-scripts error")
517    }
518
519    fn check_xtask_workflows() -> Step<Run> {
520        named::bash(indoc::indoc! {r#"
521            cargo xtask workflows
522            if ! git diff --exit-code .github; then
523              echo "Error: .github directory has uncommitted changes after running 'cargo xtask workflows'"
524              echo "Please run 'cargo xtask workflows' locally and commit the changes"
525              exit 1
526            fi
527        "#})
528    }
529
530    named::job(
531        release_job(&[])
532            .runs_on(runners::LINUX_SMALL)
533            .add_step(steps::checkout_repo())
534            .add_step(run_shellcheck())
535            .add_step(download_actionlint().id("get_actionlint"))
536            .add_step(run_actionlint())
537            .add_step(check_xtask_workflows()),
538    )
539}