run_tests.rs

  1use gh_workflow::{
  2    Concurrency, Container, Event, Expression, Job, Port, PullRequest, Push, Run, Step, Use,
  3    Workflow,
  4};
  5use indexmap::IndexMap;
  6use indoc::formatdoc;
  7
  8use crate::tasks::workflows::{
  9    steps::{
 10        CommonJobConditions, cache_rust_dependencies_namespace, repository_owner_guard_expression,
 11        use_clang,
 12    },
 13    vars::{self, PathCondition},
 14};
 15
 16use super::{
 17    runners::{self, Platform},
 18    steps::{self, FluentBuilder, NamedJob, named, release_job},
 19};
 20
 21pub(crate) fn run_tests() -> Workflow {
 22    // Specify anything which should potentially skip full test suite in this regex:
 23    // - docs/
 24    // - script/update_top_ranking_issues/
 25    // - .github/ISSUE_TEMPLATE/
 26    // - .github/workflows/  (except .github/workflows/ci.yml)
 27    let should_run_tests = PathCondition::inverted(
 28        "run_tests",
 29        r"^(docs/|script/update_top_ranking_issues/|\.github/(ISSUE_TEMPLATE|workflows/(?!run_tests)))",
 30    );
 31    let should_check_docs = PathCondition::new("run_docs", r"^(docs/|crates/.*\.rs)");
 32    let should_check_scripts = PathCondition::new(
 33        "run_action_checks",
 34        r"^\.github/(workflows/|actions/|actionlint.yml)|tooling/xtask|script/",
 35    );
 36    let should_check_licences =
 37        PathCondition::new("run_licenses", r"^(Cargo.lock|script/.*licenses)");
 38
 39    let orchestrate = orchestrate(&[
 40        &should_check_scripts,
 41        &should_check_docs,
 42        &should_check_licences,
 43        &should_run_tests,
 44    ]);
 45
 46    let mut jobs = vec![
 47        orchestrate,
 48        check_style(),
 49        should_run_tests.guard(clippy(Platform::Windows)),
 50        should_run_tests.guard(clippy(Platform::Linux)),
 51        should_run_tests.guard(clippy(Platform::Mac)),
 52        should_run_tests.guard(run_platform_tests(Platform::Windows)),
 53        should_run_tests.guard(run_platform_tests(Platform::Linux)),
 54        should_run_tests.guard(run_platform_tests(Platform::Mac)),
 55        should_run_tests.guard(doctests()),
 56        should_run_tests.guard(check_workspace_binaries()),
 57        should_run_tests.guard(check_wasm()),
 58        should_run_tests.guard(check_dependencies()), // could be more specific here?
 59        should_check_docs.guard(check_docs()),
 60        should_check_licences.guard(check_licenses()),
 61        should_check_scripts.guard(check_scripts()),
 62    ];
 63    let tests_pass = tests_pass(&jobs);
 64
 65    jobs.push(should_run_tests.guard(check_postgres_and_protobuf_migrations())); // could be more specific here?
 66
 67    named::workflow()
 68        .add_event(
 69            Event::default()
 70                .push(
 71                    Push::default()
 72                        .add_branch("main")
 73                        .add_branch("v[0-9]+.[0-9]+.x"),
 74                )
 75                .pull_request(PullRequest::default().add_branch("**")),
 76        )
 77        .concurrency(
 78            Concurrency::default()
 79                .group(concat!(
 80                    "${{ github.workflow }}-${{ github.ref_name }}-",
 81                    "${{ github.ref_name == 'main' && github.sha || 'anysha' }}"
 82                ))
 83                .cancel_in_progress(true),
 84        )
 85        .add_env(("CARGO_TERM_COLOR", "always"))
 86        .add_env(("RUST_BACKTRACE", 1))
 87        .add_env(("CARGO_INCREMENTAL", 0))
 88        .map(|mut workflow| {
 89            for job in jobs {
 90                workflow = workflow.add_job(job.name, job.job)
 91            }
 92            workflow
 93        })
 94        .add_job(tests_pass.name, tests_pass.job)
 95}
 96
 97// Generates a bash script that checks changed files against regex patterns
 98// and sets GitHub output variables accordingly
 99pub fn orchestrate(rules: &[&PathCondition]) -> NamedJob {
100    orchestrate_impl(rules, true)
101}
102
103pub fn orchestrate_without_package_filter(rules: &[&PathCondition]) -> NamedJob {
104    orchestrate_impl(rules, false)
105}
106
107fn orchestrate_impl(rules: &[&PathCondition], include_package_filter: bool) -> NamedJob {
108    let name = "orchestrate".to_owned();
109    let step_name = "filter".to_owned();
110    let mut script = String::new();
111
112    script.push_str(indoc::indoc! {r#"
113        set -euo pipefail
114        if [ -z "$GITHUB_BASE_REF" ]; then
115          echo "Not in a PR context (i.e., push to main/stable/preview)"
116          COMPARE_REV="$(git rev-parse HEAD~1)"
117        else
118          echo "In a PR context comparing to pull_request.base.ref"
119          git fetch origin "$GITHUB_BASE_REF" --depth=350
120          COMPARE_REV="$(git merge-base "origin/${GITHUB_BASE_REF}" HEAD)"
121        fi
122        CHANGED_FILES="$(git diff --name-only "$COMPARE_REV" "$GITHUB_SHA")"
123
124        check_pattern() {
125          local output_name="$1"
126          local pattern="$2"
127          local grep_arg="$3"
128
129          echo "$CHANGED_FILES" | grep "$grep_arg" "$pattern" && \
130            echo "${output_name}=true" >> "$GITHUB_OUTPUT" || \
131            echo "${output_name}=false" >> "$GITHUB_OUTPUT"
132        }
133
134    "#});
135
136    let mut outputs = IndexMap::new();
137
138    if include_package_filter {
139        script.push_str(indoc::indoc! {r#"
140        # Check for changes that require full rebuild (no filter)
141        # Direct pushes to main/stable/preview always run full suite
142        if [ -z "$GITHUB_BASE_REF" ]; then
143          echo "Not a PR, running full test suite"
144          echo "changed_packages=" >> "$GITHUB_OUTPUT"
145        elif echo "$CHANGED_FILES" | grep -qP '^(rust-toolchain\.toml|\.cargo/|\.github/|Cargo\.(toml|lock)$)'; then
146          echo "Toolchain, cargo config, or root Cargo files changed, will run all tests"
147          echo "changed_packages=" >> "$GITHUB_OUTPUT"
148        else
149          # Extract changed directories from file paths
150          CHANGED_DIRS=$(echo "$CHANGED_FILES" | \
151            grep -oP '^(crates|tooling)/\K[^/]+' | \
152            sort -u || true)
153
154          # Build directory-to-package mapping using cargo metadata
155          DIR_TO_PKG=$(cargo metadata --format-version=1 --no-deps 2>/dev/null | \
156            jq -r '.packages[] | select(.manifest_path | test("crates/|tooling/")) | "\(.manifest_path | capture("(crates|tooling)/(?<dir>[^/]+)") | .dir)=\(.name)"')
157
158          # Map directory names to package names
159          FILE_CHANGED_PKGS=""
160          for dir in $CHANGED_DIRS; do
161            pkg=$(echo "$DIR_TO_PKG" | grep "^${dir}=" | cut -d= -f2 | head -1)
162            if [ -n "$pkg" ]; then
163              FILE_CHANGED_PKGS=$(printf '%s\n%s' "$FILE_CHANGED_PKGS" "$pkg")
164            else
165              # Fall back to directory name if no mapping found
166              FILE_CHANGED_PKGS=$(printf '%s\n%s' "$FILE_CHANGED_PKGS" "$dir")
167            fi
168          done
169          FILE_CHANGED_PKGS=$(echo "$FILE_CHANGED_PKGS" | grep -v '^$' | sort -u || true)
170
171          # If assets/ changed, add crates that depend on those assets
172          if echo "$CHANGED_FILES" | grep -qP '^assets/'; then
173            FILE_CHANGED_PKGS=$(printf '%s\n%s\n%s\n%s' "$FILE_CHANGED_PKGS" "settings" "storybook" "assets" | sort -u)
174          fi
175
176          # Combine all changed packages
177          ALL_CHANGED_PKGS=$(echo "$FILE_CHANGED_PKGS" | grep -v '^$' || true)
178
179          if [ -z "$ALL_CHANGED_PKGS" ]; then
180            echo "No package changes detected, will run all tests"
181            echo "changed_packages=" >> "$GITHUB_OUTPUT"
182          else
183            # Build nextest filterset with rdeps for each package
184            FILTERSET=$(echo "$ALL_CHANGED_PKGS" | \
185              sed 's/.*/rdeps(&)/' | \
186              tr '\n' '|' | \
187              sed 's/|$//')
188            echo "Changed packages filterset: $FILTERSET"
189            echo "changed_packages=$FILTERSET" >> "$GITHUB_OUTPUT"
190          fi
191        fi
192
193    "#});
194
195        outputs.insert(
196            "changed_packages".to_owned(),
197            format!("${{{{ steps.{}.outputs.changed_packages }}}}", step_name),
198        );
199    }
200
201    for rule in rules {
202        assert!(
203            rule.set_by_step
204                .borrow_mut()
205                .replace(name.clone())
206                .is_none()
207        );
208        assert!(
209            outputs
210                .insert(
211                    rule.name.to_owned(),
212                    format!("${{{{ steps.{}.outputs.{} }}}}", step_name, rule.name)
213                )
214                .is_none()
215        );
216
217        let grep_arg = if rule.invert { "-qvP" } else { "-qP" };
218        script.push_str(&format!(
219            "check_pattern \"{}\" '{}' {}\n",
220            rule.name, rule.pattern, grep_arg
221        ));
222    }
223
224    let job = Job::default()
225        .runs_on(runners::LINUX_SMALL)
226        .with_repository_owner_guard()
227        .outputs(outputs)
228        .add_step(steps::checkout_repo().with_deep_history_on_non_main())
229        .add_step(Step::new(step_name.clone()).run(script).id(step_name));
230
231    NamedJob { name, job }
232}
233
234pub fn tests_pass(jobs: &[NamedJob]) -> NamedJob {
235    let mut script = String::from(indoc::indoc! {r#"
236        set +x
237        EXIT_CODE=0
238
239        check_result() {
240          echo "* $1: $2"
241          if [[ "$2" != "skipped" && "$2" != "success" ]]; then EXIT_CODE=1; fi
242        }
243
244    "#});
245
246    let env_entries: Vec<_> = jobs
247        .iter()
248        .map(|job| {
249            let env_name = format!("RESULT_{}", job.name.to_uppercase());
250            let env_value = format!("${{{{ needs.{}.result }}}}", job.name);
251            (env_name, env_value)
252        })
253        .collect();
254
255    script.push_str(
256        &jobs
257            .iter()
258            .zip(env_entries.iter())
259            .map(|(job, (env_name, _))| format!("check_result \"{}\" \"${}\"", job.name, env_name))
260            .collect::<Vec<_>>()
261            .join("\n"),
262    );
263
264    script.push_str("\n\nexit $EXIT_CODE\n");
265
266    let job = Job::default()
267        .runs_on(runners::LINUX_SMALL)
268        .needs(
269            jobs.iter()
270                .map(|j| j.name.to_string())
271                .collect::<Vec<String>>(),
272        )
273        .cond(repository_owner_guard_expression(true))
274        .add_step(
275            env_entries
276                .into_iter()
277                .fold(named::bash(&script), |step, env_item| {
278                    step.add_env(env_item)
279                }),
280        );
281
282    named::job(job)
283}
284
285const TS_QUERY_LS_FILE: &str = "ts_query_ls-x86_64-unknown-linux-gnu.tar.gz";
286const CI_TS_QUERY_RELEASE: &str = "tags/v3.15.1";
287
288pub(crate) fn fetch_ts_query_ls() -> Step<Use> {
289    named::uses(
290        "dsaltares",
291        "fetch-gh-release-asset",
292        "aa37ae5c44d3c9820bc12fe675e8670ecd93bd1c",
293    ) // v1.1.1
294    .add_with(("repo", "ribru17/ts_query_ls"))
295    .add_with(("version", CI_TS_QUERY_RELEASE))
296    .add_with(("file", TS_QUERY_LS_FILE))
297}
298
299pub(crate) fn run_ts_query_ls() -> Step<Run> {
300    named::bash(formatdoc!(
301        r#"tar -xf {TS_QUERY_LS_FILE}
302        ./ts_query_ls format --check . || {{
303            echo "Found unformatted queries, please format them with ts_query_ls."
304            echo "For easy use, install the Tree-sitter query extension:"
305            echo "zed://extension/tree-sitter-query"
306            false
307        }}"#
308    ))
309}
310
311fn check_style() -> NamedJob {
312    fn check_for_typos() -> Step<Use> {
313        named::uses(
314            "crate-ci",
315            "typos",
316            "2d0ce569feab1f8752f1dde43cc2f2aa53236e06",
317        ) // v1.40.0
318        .with(("config", "./typos.toml"))
319    }
320
321    named::job(
322        release_job(&[])
323            .runs_on(runners::LINUX_MEDIUM)
324            .add_step(steps::checkout_repo())
325            .add_step(steps::cache_rust_dependencies_namespace())
326            .add_step(steps::setup_pnpm())
327            .add_step(steps::prettier())
328            .add_step(steps::cargo_fmt())
329            .add_step(steps::script("./script/check-todos"))
330            .add_step(steps::script("./script/check-keymaps"))
331            .add_step(check_for_typos())
332            .add_step(fetch_ts_query_ls())
333            .add_step(run_ts_query_ls()),
334    )
335}
336
337fn check_dependencies() -> NamedJob {
338    fn install_cargo_machete() -> Step<Use> {
339        named::uses(
340            "clechasseur",
341            "rs-cargo",
342            "8435b10f6e71c2e3d4d3b7573003a8ce4bfc6386", // v2
343        )
344        .add_with(("command", "install"))
345        .add_with(("args", "cargo-machete@0.7.0"))
346    }
347
348    fn run_cargo_machete() -> Step<Use> {
349        named::uses(
350            "clechasseur",
351            "rs-cargo",
352            "8435b10f6e71c2e3d4d3b7573003a8ce4bfc6386", // v2
353        )
354        .add_with(("command", "machete"))
355    }
356
357    fn check_cargo_lock() -> Step<Run> {
358        named::bash("cargo update --locked --workspace")
359    }
360
361    fn check_vulnerable_dependencies() -> Step<Use> {
362        named::uses(
363            "actions",
364            "dependency-review-action",
365            "67d4f4bd7a9b17a0db54d2a7519187c65e339de8", // v4
366        )
367        .if_condition(Expression::new("github.event_name == 'pull_request'"))
368        .with(("license-check", false))
369    }
370
371    named::job(use_clang(
372        release_job(&[])
373            .runs_on(runners::LINUX_SMALL)
374            .add_step(steps::checkout_repo())
375            .add_step(steps::cache_rust_dependencies_namespace())
376            .add_step(install_cargo_machete())
377            .add_step(run_cargo_machete())
378            .add_step(check_cargo_lock())
379            .add_step(check_vulnerable_dependencies()),
380    ))
381}
382
383fn check_wasm() -> NamedJob {
384    fn install_nightly_wasm_toolchain() -> Step<Run> {
385        named::bash(
386            "rustup toolchain install nightly --component rust-src --target wasm32-unknown-unknown",
387        )
388    }
389
390    fn cargo_check_wasm() -> Step<Run> {
391        named::bash(concat!(
392            "cargo +nightly -Zbuild-std=std,panic_abort ",
393            "check --target wasm32-unknown-unknown -p gpui_platform",
394        ))
395        .add_env((
396            "CARGO_TARGET_WASM32_UNKNOWN_UNKNOWN_RUSTFLAGS",
397            "-C target-feature=+atomics,+bulk-memory,+mutable-globals",
398        ))
399    }
400
401    named::job(
402        release_job(&[])
403            .runs_on(runners::LINUX_LARGE)
404            .add_step(steps::checkout_repo())
405            .add_step(steps::setup_cargo_config(Platform::Linux))
406            .add_step(steps::cache_rust_dependencies_namespace())
407            .add_step(install_nightly_wasm_toolchain())
408            .add_step(steps::setup_sccache(Platform::Linux))
409            .add_step(cargo_check_wasm())
410            .add_step(steps::show_sccache_stats(Platform::Linux))
411            .add_step(steps::cleanup_cargo_config(Platform::Linux)),
412    )
413}
414
415fn check_workspace_binaries() -> NamedJob {
416    named::job(use_clang(
417        release_job(&[])
418            .runs_on(runners::LINUX_LARGE)
419            .add_step(steps::checkout_repo())
420            .add_step(steps::setup_cargo_config(Platform::Linux))
421            .add_step(steps::cache_rust_dependencies_namespace())
422            .map(steps::install_linux_dependencies)
423            .add_step(steps::setup_sccache(Platform::Linux))
424            .add_step(steps::script("cargo build -p collab"))
425            .add_step(steps::script("cargo build --workspace --bins --examples"))
426            .add_step(steps::show_sccache_stats(Platform::Linux))
427            .add_step(steps::cleanup_cargo_config(Platform::Linux)),
428    ))
429}
430
431pub(crate) fn clippy(platform: Platform) -> NamedJob {
432    let runner = match platform {
433        Platform::Windows => runners::WINDOWS_DEFAULT,
434        Platform::Linux => runners::LINUX_DEFAULT,
435        Platform::Mac => runners::MAC_DEFAULT,
436    };
437    let mut job = release_job(&[])
438        .runs_on(runner)
439        .add_step(steps::checkout_repo())
440        .add_step(steps::setup_cargo_config(platform))
441        .when(
442            platform == Platform::Linux || platform == Platform::Mac,
443            |this| this.add_step(steps::cache_rust_dependencies_namespace()),
444        )
445        .when(
446            platform == Platform::Linux,
447            steps::install_linux_dependencies,
448        )
449        .add_step(steps::setup_sccache(platform))
450        .add_step(steps::clippy(platform))
451        .add_step(steps::show_sccache_stats(platform));
452    if platform == Platform::Linux {
453        job = use_clang(job);
454    }
455    NamedJob {
456        name: format!("clippy_{platform}"),
457        job,
458    }
459}
460
461pub(crate) fn run_platform_tests(platform: Platform) -> NamedJob {
462    run_platform_tests_impl(platform, true)
463}
464
465pub(crate) fn run_platform_tests_no_filter(platform: Platform) -> NamedJob {
466    run_platform_tests_impl(platform, false)
467}
468
469fn run_platform_tests_impl(platform: Platform, filter_packages: bool) -> NamedJob {
470    let runner = match platform {
471        Platform::Windows => runners::WINDOWS_DEFAULT,
472        Platform::Linux => runners::LINUX_DEFAULT,
473        Platform::Mac => runners::MAC_DEFAULT,
474    };
475    NamedJob {
476        name: format!("run_tests_{platform}"),
477        job: release_job(&[])
478            .runs_on(runner)
479            .when(platform == Platform::Linux, |job| {
480                job.add_service(
481                    "postgres",
482                    Container::new("postgres:15")
483                        .add_env(("POSTGRES_HOST_AUTH_METHOD", "trust"))
484                        .ports(vec![Port::Name("5432:5432".into())])
485                        .options(
486                            "--health-cmd pg_isready \
487                             --health-interval 500ms \
488                             --health-timeout 5s \
489                             --health-retries 10",
490                        ),
491                )
492            })
493            .add_step(steps::checkout_repo())
494            .add_step(steps::setup_cargo_config(platform))
495            .when(platform == Platform::Mac, |this| {
496                this.add_step(steps::cache_rust_dependencies_namespace())
497            })
498            .when(platform == Platform::Linux, |this| {
499                use_clang(this.add_step(steps::cache_rust_dependencies_namespace()))
500            })
501            .when(
502                platform == Platform::Linux,
503                steps::install_linux_dependencies,
504            )
505            .add_step(steps::setup_node())
506            .when(
507                platform == Platform::Linux || platform == Platform::Mac,
508                |job| job.add_step(steps::cargo_install_nextest()),
509            )
510            .add_step(steps::clear_target_dir_if_large(platform))
511            .add_step(steps::setup_sccache(platform))
512            .when(filter_packages, |job| {
513                job.add_step(
514                    steps::cargo_nextest(platform).with_changed_packages_filter("orchestrate"),
515                )
516            })
517            .when(!filter_packages, |job| {
518                job.add_step(steps::cargo_nextest(platform))
519            })
520            .add_step(steps::show_sccache_stats(platform))
521            .add_step(steps::cleanup_cargo_config(platform)),
522    }
523}
524
525pub(crate) fn check_postgres_and_protobuf_migrations() -> NamedJob {
526    fn ensure_fresh_merge() -> Step<Run> {
527        named::bash(indoc::indoc! {r#"
528            if [ -z "$GITHUB_BASE_REF" ];
529            then
530              echo "BUF_BASE_BRANCH=$(git merge-base origin/main HEAD)" >> "$GITHUB_ENV"
531            else
532              git checkout -B temp
533              git merge -q "origin/$GITHUB_BASE_REF" -m "merge main into temp"
534              echo "BUF_BASE_BRANCH=$GITHUB_BASE_REF" >> "$GITHUB_ENV"
535            fi
536        "#})
537    }
538
539    fn bufbuild_setup_action() -> Step<Use> {
540        named::uses("bufbuild", "buf-setup-action", "v1")
541            .add_with(("version", "v1.29.0"))
542            .add_with(("github_token", vars::GITHUB_TOKEN))
543    }
544
545    fn bufbuild_breaking_action() -> Step<Use> {
546        named::uses("bufbuild", "buf-breaking-action", "v1").add_with(("input", "crates/proto/proto/"))
547            .add_with(("against", "https://github.com/${GITHUB_REPOSITORY}.git#branch=${BUF_BASE_BRANCH},subdir=crates/proto/proto/"))
548    }
549
550    fn buf_lint() -> Step<Run> {
551        named::bash("buf lint crates/proto/proto")
552    }
553
554    fn check_protobuf_formatting() -> Step<Run> {
555        named::bash("buf format --diff --exit-code crates/proto/proto")
556    }
557
558    named::job(
559        release_job(&[])
560            .runs_on(runners::LINUX_DEFAULT)
561            .add_env(("GIT_AUTHOR_NAME", "Protobuf Action"))
562            .add_env(("GIT_AUTHOR_EMAIL", "ci@zed.dev"))
563            .add_env(("GIT_COMMITTER_NAME", "Protobuf Action"))
564            .add_env(("GIT_COMMITTER_EMAIL", "ci@zed.dev"))
565            .add_step(steps::checkout_repo().with_full_history())
566            .add_step(ensure_fresh_merge())
567            .add_step(bufbuild_setup_action())
568            .add_step(bufbuild_breaking_action())
569            .add_step(buf_lint())
570            .add_step(check_protobuf_formatting()),
571    )
572}
573
574fn doctests() -> NamedJob {
575    fn run_doctests() -> Step<Run> {
576        named::bash(indoc::indoc! {r#"
577            cargo test --workspace --doc --no-fail-fast
578        "#})
579        .id("run_doctests")
580    }
581
582    named::job(use_clang(
583        release_job(&[])
584            .runs_on(runners::LINUX_DEFAULT)
585            .add_step(steps::checkout_repo())
586            .add_step(steps::cache_rust_dependencies_namespace())
587            .map(steps::install_linux_dependencies)
588            .add_step(steps::setup_cargo_config(Platform::Linux))
589            .add_step(steps::setup_sccache(Platform::Linux))
590            .add_step(run_doctests())
591            .add_step(steps::show_sccache_stats(Platform::Linux))
592            .add_step(steps::cleanup_cargo_config(Platform::Linux)),
593    ))
594}
595
596fn check_licenses() -> NamedJob {
597    named::job(
598        Job::default()
599            .runs_on(runners::LINUX_SMALL)
600            .add_step(steps::checkout_repo())
601            .add_step(steps::cache_rust_dependencies_namespace())
602            .add_step(steps::script("./script/check-licenses"))
603            .add_step(steps::script("./script/generate-licenses")),
604    )
605}
606
607fn check_docs() -> NamedJob {
608    fn lychee_link_check(dir: &str) -> Step<Use> {
609        named::uses(
610            "lycheeverse",
611            "lychee-action",
612            "82202e5e9c2f4ef1a55a3d02563e1cb6041e5332",
613        ) // v2.4.1
614        .add_with(("args", format!("--no-progress --exclude '^http' '{dir}'")))
615        .add_with(("fail", true))
616        .add_with(("jobSummary", false))
617    }
618
619    fn install_mdbook() -> Step<Use> {
620        named::uses(
621            "peaceiris",
622            "actions-mdbook",
623            "ee69d230fe19748b7abf22df32acaa93833fad08", // v2
624        )
625        .with(("mdbook-version", "0.4.37"))
626    }
627
628    fn build_docs() -> Step<Run> {
629        named::bash(indoc::indoc! {r#"
630            mkdir -p target/deploy
631            mdbook build ./docs --dest-dir=../target/deploy/docs/
632        "#})
633    }
634
635    named::job(use_clang(
636        release_job(&[])
637            .runs_on(runners::LINUX_LARGE)
638            .add_step(steps::checkout_repo())
639            .add_step(steps::setup_cargo_config(Platform::Linux))
640            // todo(ci): un-inline build_docs/action.yml here
641            .add_step(steps::cache_rust_dependencies_namespace())
642            .add_step(
643                lychee_link_check("./docs/src/**/*"), // check markdown links
644            )
645            .map(steps::install_linux_dependencies)
646            .add_step(steps::script("./script/generate-action-metadata"))
647            .add_step(install_mdbook())
648            .add_step(build_docs())
649            .add_step(
650                lychee_link_check("target/deploy/docs"), // check links in generated html
651            ),
652    ))
653}
654
655pub(crate) fn check_scripts() -> NamedJob {
656    fn download_actionlint() -> Step<Run> {
657        named::bash(
658            "bash <(curl https://raw.githubusercontent.com/rhysd/actionlint/main/scripts/download-actionlint.bash)",
659        )
660    }
661
662    fn run_actionlint() -> Step<Run> {
663        named::bash(r#""$ACTIONLINT_BIN" -color"#).add_env((
664            "ACTIONLINT_BIN",
665            "${{ steps.get_actionlint.outputs.executable }}",
666        ))
667    }
668
669    fn run_shellcheck() -> Step<Run> {
670        named::bash("./script/shellcheck-scripts error")
671    }
672
673    fn check_xtask_workflows() -> Step<Run> {
674        named::bash(indoc::indoc! {r#"
675            cargo xtask workflows
676            if ! git diff --exit-code .github; then
677              echo "Error: .github directory has uncommitted changes after running 'cargo xtask workflows'"
678              echo "Please run 'cargo xtask workflows' locally and commit the changes"
679              exit 1
680            fi
681        "#})
682    }
683
684    named::job(
685        release_job(&[])
686            .runs_on(runners::LINUX_SMALL)
687            .add_step(steps::checkout_repo())
688            .add_step(run_shellcheck())
689            .add_step(download_actionlint().id("get_actionlint"))
690            .add_step(run_actionlint())
691            .add_step(cache_rust_dependencies_namespace())
692            .add_step(check_xtask_workflows()),
693    )
694}