run_tests.rs

  1use gh_workflow::{
  2    Concurrency, Container, Env, Event, Expression, Job, Port, PullRequest, Push, Run, Step, Use,
  3    Workflow,
  4};
  5use indexmap::IndexMap;
  6use indoc::formatdoc;
  7
  8use crate::tasks::workflows::{
  9    steps::{CommonJobConditions, repository_owner_guard_expression},
 10    vars::{self, PathCondition},
 11};
 12
 13use super::{
 14    runners::{self, Platform},
 15    steps::{self, FluentBuilder, NamedJob, named, release_job},
 16};
 17
 18fn use_clang(job: Job) -> Job {
 19    job.add_env(Env::new("CC", "clang"))
 20        .add_env(Env::new("CXX", "clang++"))
 21}
 22
 23pub(crate) fn run_tests() -> Workflow {
 24    // Specify anything which should potentially skip full test suite in this regex:
 25    // - docs/
 26    // - script/update_top_ranking_issues/
 27    // - .github/ISSUE_TEMPLATE/
 28    // - .github/workflows/  (except .github/workflows/ci.yml)
 29    let should_run_tests = PathCondition::inverted(
 30        "run_tests",
 31        r"^(docs/|script/update_top_ranking_issues/|\.github/(ISSUE_TEMPLATE|workflows/(?!run_tests)))",
 32    );
 33    let should_check_docs = PathCondition::new("run_docs", r"^(docs/|crates/.*\.rs)");
 34    let should_check_scripts = PathCondition::new(
 35        "run_action_checks",
 36        r"^\.github/(workflows/|actions/|actionlint.yml)|tooling/xtask|script/",
 37    );
 38    let should_check_licences =
 39        PathCondition::new("run_licenses", r"^(Cargo.lock|script/.*licenses)");
 40
 41    let orchestrate = orchestrate(&[
 42        &should_check_scripts,
 43        &should_check_docs,
 44        &should_check_licences,
 45        &should_run_tests,
 46    ]);
 47
 48    let mut jobs = vec![
 49        orchestrate,
 50        check_style(),
 51        should_run_tests.guard(clippy(Platform::Windows)),
 52        should_run_tests.guard(clippy(Platform::Linux)),
 53        should_run_tests.guard(clippy(Platform::Mac)),
 54        should_run_tests.guard(run_platform_tests(Platform::Windows)),
 55        should_run_tests.guard(run_platform_tests(Platform::Linux)),
 56        should_run_tests.guard(run_platform_tests(Platform::Mac)),
 57        should_run_tests.guard(doctests()),
 58        should_run_tests.guard(check_workspace_binaries()),
 59        should_run_tests.guard(check_wasm()),
 60        should_run_tests.guard(check_dependencies()), // could be more specific here?
 61        should_check_docs.guard(check_docs()),
 62        should_check_licences.guard(check_licenses()),
 63        should_check_scripts.guard(check_scripts()),
 64    ];
 65    let tests_pass = tests_pass(&jobs);
 66
 67    jobs.push(should_run_tests.guard(check_postgres_and_protobuf_migrations())); // could be more specific here?
 68
 69    named::workflow()
 70        .add_event(
 71            Event::default()
 72                .push(
 73                    Push::default()
 74                        .add_branch("main")
 75                        .add_branch("v[0-9]+.[0-9]+.x"),
 76                )
 77                .pull_request(PullRequest::default().add_branch("**")),
 78        )
 79        .concurrency(
 80            Concurrency::default()
 81                .group(concat!(
 82                    "${{ github.workflow }}-${{ github.ref_name }}-",
 83                    "${{ github.ref_name == 'main' && github.sha || 'anysha' }}"
 84                ))
 85                .cancel_in_progress(true),
 86        )
 87        .add_env(("CARGO_TERM_COLOR", "always"))
 88        .add_env(("RUST_BACKTRACE", 1))
 89        .add_env(("CARGO_INCREMENTAL", 0))
 90        .map(|mut workflow| {
 91            for job in jobs {
 92                workflow = workflow.add_job(job.name, job.job)
 93            }
 94            workflow
 95        })
 96        .add_job(tests_pass.name, tests_pass.job)
 97}
 98
 99// Generates a bash script that checks changed files against regex patterns
100// and sets GitHub output variables accordingly
101pub fn orchestrate(rules: &[&PathCondition]) -> NamedJob {
102    orchestrate_impl(rules, true)
103}
104
105pub fn orchestrate_without_package_filter(rules: &[&PathCondition]) -> NamedJob {
106    orchestrate_impl(rules, false)
107}
108
109fn orchestrate_impl(rules: &[&PathCondition], include_package_filter: bool) -> NamedJob {
110    let name = "orchestrate".to_owned();
111    let step_name = "filter".to_owned();
112    let mut script = String::new();
113
114    script.push_str(indoc::indoc! {r#"
115        set -euo pipefail
116        if [ -z "$GITHUB_BASE_REF" ]; then
117          echo "Not in a PR context (i.e., push to main/stable/preview)"
118          COMPARE_REV="$(git rev-parse HEAD~1)"
119        else
120          echo "In a PR context comparing to pull_request.base.ref"
121          git fetch origin "$GITHUB_BASE_REF" --depth=350
122          COMPARE_REV="$(git merge-base "origin/${GITHUB_BASE_REF}" HEAD)"
123        fi
124        CHANGED_FILES="$(git diff --name-only "$COMPARE_REV" ${{ github.sha }})"
125
126        check_pattern() {
127          local output_name="$1"
128          local pattern="$2"
129          local grep_arg="$3"
130
131          echo "$CHANGED_FILES" | grep "$grep_arg" "$pattern" && \
132            echo "${output_name}=true" >> "$GITHUB_OUTPUT" || \
133            echo "${output_name}=false" >> "$GITHUB_OUTPUT"
134        }
135
136    "#});
137
138    let mut outputs = IndexMap::new();
139
140    if include_package_filter {
141        script.push_str(indoc::indoc! {r#"
142        # Check for changes that require full rebuild (no filter)
143        # Direct pushes to main/stable/preview always run full suite
144        if [ -z "$GITHUB_BASE_REF" ]; then
145          echo "Not a PR, running full test suite"
146          echo "changed_packages=" >> "$GITHUB_OUTPUT"
147        elif echo "$CHANGED_FILES" | grep -qP '^(rust-toolchain\.toml|\.cargo/|\.github/|Cargo\.(toml|lock)$)'; then
148          echo "Toolchain, cargo config, or root Cargo files changed, will run all tests"
149          echo "changed_packages=" >> "$GITHUB_OUTPUT"
150        else
151          # Extract changed directories from file paths
152          CHANGED_DIRS=$(echo "$CHANGED_FILES" | \
153            grep -oP '^(crates|tooling)/\K[^/]+' | \
154            sort -u || true)
155
156          # Build directory-to-package mapping using cargo metadata
157          DIR_TO_PKG=$(cargo metadata --format-version=1 --no-deps 2>/dev/null | \
158            jq -r '.packages[] | select(.manifest_path | test("crates/|tooling/")) | "\(.manifest_path | capture("(crates|tooling)/(?<dir>[^/]+)") | .dir)=\(.name)"')
159
160          # Map directory names to package names
161          FILE_CHANGED_PKGS=""
162          for dir in $CHANGED_DIRS; do
163            pkg=$(echo "$DIR_TO_PKG" | grep "^${dir}=" | cut -d= -f2 | head -1)
164            if [ -n "$pkg" ]; then
165              FILE_CHANGED_PKGS=$(printf '%s\n%s' "$FILE_CHANGED_PKGS" "$pkg")
166            else
167              # Fall back to directory name if no mapping found
168              FILE_CHANGED_PKGS=$(printf '%s\n%s' "$FILE_CHANGED_PKGS" "$dir")
169            fi
170          done
171          FILE_CHANGED_PKGS=$(echo "$FILE_CHANGED_PKGS" | grep -v '^$' | sort -u || true)
172
173          # If assets/ changed, add crates that depend on those assets
174          if echo "$CHANGED_FILES" | grep -qP '^assets/'; then
175            FILE_CHANGED_PKGS=$(printf '%s\n%s\n%s\n%s' "$FILE_CHANGED_PKGS" "settings" "storybook" "assets" | sort -u)
176          fi
177
178          # Combine all changed packages
179          ALL_CHANGED_PKGS=$(echo "$FILE_CHANGED_PKGS" | grep -v '^$' || true)
180
181          if [ -z "$ALL_CHANGED_PKGS" ]; then
182            echo "No package changes detected, will run all tests"
183            echo "changed_packages=" >> "$GITHUB_OUTPUT"
184          else
185            # Build nextest filterset with rdeps for each package
186            FILTERSET=$(echo "$ALL_CHANGED_PKGS" | \
187              sed 's/.*/rdeps(&)/' | \
188              tr '\n' '|' | \
189              sed 's/|$//')
190            echo "Changed packages filterset: $FILTERSET"
191            echo "changed_packages=$FILTERSET" >> "$GITHUB_OUTPUT"
192          fi
193        fi
194
195    "#});
196
197        outputs.insert(
198            "changed_packages".to_owned(),
199            format!("${{{{ steps.{}.outputs.changed_packages }}}}", step_name),
200        );
201    }
202
203    for rule in rules {
204        assert!(
205            rule.set_by_step
206                .borrow_mut()
207                .replace(name.clone())
208                .is_none()
209        );
210        assert!(
211            outputs
212                .insert(
213                    rule.name.to_owned(),
214                    format!("${{{{ steps.{}.outputs.{} }}}}", step_name, rule.name)
215                )
216                .is_none()
217        );
218
219        let grep_arg = if rule.invert { "-qvP" } else { "-qP" };
220        script.push_str(&format!(
221            "check_pattern \"{}\" '{}' {}\n",
222            rule.name, rule.pattern, grep_arg
223        ));
224    }
225
226    let job = Job::default()
227        .runs_on(runners::LINUX_SMALL)
228        .with_repository_owner_guard()
229        .outputs(outputs)
230        .add_step(steps::checkout_repo().with_deep_history_on_non_main())
231        .add_step(Step::new(step_name.clone()).run(script).id(step_name));
232
233    NamedJob { name, job }
234}
235
236pub fn tests_pass(jobs: &[NamedJob]) -> NamedJob {
237    let mut script = String::from(indoc::indoc! {r#"
238        set +x
239        EXIT_CODE=0
240
241        check_result() {
242          echo "* $1: $2"
243          if [[ "$2" != "skipped" && "$2" != "success" ]]; then EXIT_CODE=1; fi
244        }
245
246    "#});
247
248    script.push_str(
249        &jobs
250            .iter()
251            .map(|job| {
252                format!(
253                    "check_result \"{}\" \"${{{{ needs.{}.result }}}}\"",
254                    job.name, job.name
255                )
256            })
257            .collect::<Vec<_>>()
258            .join("\n"),
259    );
260
261    script.push_str("\n\nexit $EXIT_CODE\n");
262
263    let job = Job::default()
264        .runs_on(runners::LINUX_SMALL)
265        .needs(
266            jobs.iter()
267                .map(|j| j.name.to_string())
268                .collect::<Vec<String>>(),
269        )
270        .cond(repository_owner_guard_expression(true))
271        .add_step(named::bash(&script));
272
273    named::job(job)
274}
275
276const TS_QUERY_LS_FILE: &str = "ts_query_ls-x86_64-unknown-linux-gnu.tar.gz";
277const CI_TS_QUERY_RELEASE: &str = "tags/v3.15.1";
278
279fn check_style() -> NamedJob {
280    fn check_for_typos() -> Step<Use> {
281        named::uses(
282            "crate-ci",
283            "typos",
284            "2d0ce569feab1f8752f1dde43cc2f2aa53236e06",
285        ) // v1.40.0
286        .with(("config", "./typos.toml"))
287    }
288
289    fn fetch_ts_query_ls() -> Step<Use> {
290        named::uses(
291            "dsaltares",
292            "fetch-gh-release-asset",
293            "aa37ae5c44d3c9820bc12fe675e8670ecd93bd1c",
294        ) // v1.1.1
295        .add_with(("repo", "ribru17/ts_query_ls"))
296        .add_with(("version", CI_TS_QUERY_RELEASE))
297        .add_with(("file", TS_QUERY_LS_FILE))
298    }
299
300    fn run_ts_query_ls() -> Step<Run> {
301        named::bash(formatdoc!(
302            r#"tar -xf {TS_QUERY_LS_FILE}
303            ./ts_query_ls format --check . || {{
304                echo "Found unformatted queries, please format them with ts_query_ls."
305                echo "For easy use, install the Tree-sitter query extension:"
306                echo "zed://extension/tree-sitter-query"
307                false
308            }}"#
309        ))
310    }
311
312    named::job(
313        release_job(&[])
314            .runs_on(runners::LINUX_MEDIUM)
315            .add_step(steps::checkout_repo())
316            .add_step(steps::cache_rust_dependencies_namespace())
317            .add_step(steps::setup_pnpm())
318            .add_step(steps::prettier())
319            .add_step(steps::cargo_fmt())
320            .add_step(steps::script("./script/check-todos"))
321            .add_step(steps::script("./script/check-keymaps"))
322            .add_step(check_for_typos())
323            .add_step(fetch_ts_query_ls())
324            .add_step(run_ts_query_ls()),
325    )
326}
327
328fn check_dependencies() -> NamedJob {
329    fn install_cargo_machete() -> Step<Use> {
330        named::uses(
331            "clechasseur",
332            "rs-cargo",
333            "8435b10f6e71c2e3d4d3b7573003a8ce4bfc6386", // v2
334        )
335        .add_with(("command", "install"))
336        .add_with(("args", "cargo-machete@0.7.0"))
337    }
338
339    fn run_cargo_machete() -> Step<Use> {
340        named::uses(
341            "clechasseur",
342            "rs-cargo",
343            "8435b10f6e71c2e3d4d3b7573003a8ce4bfc6386", // v2
344        )
345        .add_with(("command", "machete"))
346    }
347
348    fn check_cargo_lock() -> Step<Run> {
349        named::bash("cargo update --locked --workspace")
350    }
351
352    fn check_vulnerable_dependencies() -> Step<Use> {
353        named::uses(
354            "actions",
355            "dependency-review-action",
356            "67d4f4bd7a9b17a0db54d2a7519187c65e339de8", // v4
357        )
358        .if_condition(Expression::new("github.event_name == 'pull_request'"))
359        .with(("license-check", false))
360    }
361
362    named::job(use_clang(
363        release_job(&[])
364            .runs_on(runners::LINUX_SMALL)
365            .add_step(steps::checkout_repo())
366            .add_step(steps::cache_rust_dependencies_namespace())
367            .add_step(install_cargo_machete())
368            .add_step(run_cargo_machete())
369            .add_step(check_cargo_lock())
370            .add_step(check_vulnerable_dependencies()),
371    ))
372}
373
374fn check_wasm() -> NamedJob {
375    fn install_nightly_wasm_toolchain() -> Step<Run> {
376        named::bash(
377            "rustup toolchain install nightly --component rust-src --target wasm32-unknown-unknown",
378        )
379    }
380
381    fn cargo_check_wasm() -> Step<Run> {
382        named::bash(concat!(
383            "cargo +nightly -Zbuild-std=std,panic_abort ",
384            "check --target wasm32-unknown-unknown -p gpui_platform",
385        ))
386        .add_env((
387            "CARGO_TARGET_WASM32_UNKNOWN_UNKNOWN_RUSTFLAGS",
388            "-C target-feature=+atomics,+bulk-memory,+mutable-globals",
389        ))
390    }
391
392    named::job(
393        release_job(&[])
394            .runs_on(runners::LINUX_LARGE)
395            .add_step(steps::checkout_repo())
396            .add_step(steps::setup_cargo_config(Platform::Linux))
397            .add_step(steps::cache_rust_dependencies_namespace())
398            .add_step(install_nightly_wasm_toolchain())
399            .add_step(steps::setup_sccache(Platform::Linux))
400            .add_step(cargo_check_wasm())
401            .add_step(steps::show_sccache_stats(Platform::Linux))
402            .add_step(steps::cleanup_cargo_config(Platform::Linux)),
403    )
404}
405
406fn check_workspace_binaries() -> NamedJob {
407    named::job(use_clang(
408        release_job(&[])
409            .runs_on(runners::LINUX_LARGE)
410            .add_step(steps::checkout_repo())
411            .add_step(steps::setup_cargo_config(Platform::Linux))
412            .add_step(steps::cache_rust_dependencies_namespace())
413            .map(steps::install_linux_dependencies)
414            .add_step(steps::setup_sccache(Platform::Linux))
415            .add_step(steps::script("cargo build -p collab"))
416            .add_step(steps::script("cargo build --workspace --bins --examples"))
417            .add_step(steps::show_sccache_stats(Platform::Linux))
418            .add_step(steps::cleanup_cargo_config(Platform::Linux)),
419    ))
420}
421
422pub(crate) fn clippy(platform: Platform) -> NamedJob {
423    let runner = match platform {
424        Platform::Windows => runners::WINDOWS_DEFAULT,
425        Platform::Linux => runners::LINUX_DEFAULT,
426        Platform::Mac => runners::MAC_DEFAULT,
427    };
428    let mut job = release_job(&[])
429        .runs_on(runner)
430        .add_step(steps::checkout_repo())
431        .add_step(steps::setup_cargo_config(platform))
432        .when(
433            platform == Platform::Linux || platform == Platform::Mac,
434            |this| this.add_step(steps::cache_rust_dependencies_namespace()),
435        )
436        .when(
437            platform == Platform::Linux,
438            steps::install_linux_dependencies,
439        )
440        .add_step(steps::setup_sccache(platform))
441        .add_step(steps::clippy(platform))
442        .add_step(steps::show_sccache_stats(platform));
443    if platform == Platform::Linux {
444        job = use_clang(job);
445    }
446    NamedJob {
447        name: format!("clippy_{platform}"),
448        job,
449    }
450}
451
452pub(crate) fn run_platform_tests(platform: Platform) -> NamedJob {
453    run_platform_tests_impl(platform, true)
454}
455
456pub(crate) fn run_platform_tests_no_filter(platform: Platform) -> NamedJob {
457    run_platform_tests_impl(platform, false)
458}
459
460fn run_platform_tests_impl(platform: Platform, filter_packages: bool) -> NamedJob {
461    let runner = match platform {
462        Platform::Windows => runners::WINDOWS_DEFAULT,
463        Platform::Linux => runners::LINUX_DEFAULT,
464        Platform::Mac => runners::MAC_DEFAULT,
465    };
466    NamedJob {
467        name: format!("run_tests_{platform}"),
468        job: release_job(&[])
469            .runs_on(runner)
470            .when(platform == Platform::Linux, |job| {
471                job.add_service(
472                    "postgres",
473                    Container::new("postgres:15")
474                        .add_env(("POSTGRES_HOST_AUTH_METHOD", "trust"))
475                        .ports(vec![Port::Name("5432:5432".into())])
476                        .options(
477                            "--health-cmd pg_isready \
478                             --health-interval 500ms \
479                             --health-timeout 5s \
480                             --health-retries 10",
481                        ),
482                )
483            })
484            .add_step(steps::checkout_repo())
485            .add_step(steps::setup_cargo_config(platform))
486            .when(platform == Platform::Mac, |this| {
487                this.add_step(steps::cache_rust_dependencies_namespace())
488            })
489            .when(platform == Platform::Linux, |this| {
490                use_clang(this.add_step(steps::cache_rust_dependencies_namespace()))
491            })
492            .when(
493                platform == Platform::Linux,
494                steps::install_linux_dependencies,
495            )
496            .add_step(steps::setup_node())
497            .when(
498                platform == Platform::Linux || platform == Platform::Mac,
499                |job| job.add_step(steps::cargo_install_nextest()),
500            )
501            .add_step(steps::clear_target_dir_if_large(platform))
502            .add_step(steps::setup_sccache(platform))
503            .when(filter_packages, |job| {
504                job.add_step(
505                    steps::cargo_nextest(platform).with_changed_packages_filter("orchestrate"),
506                )
507            })
508            .when(!filter_packages, |job| {
509                job.add_step(steps::cargo_nextest(platform))
510            })
511            .add_step(steps::show_sccache_stats(platform))
512            .add_step(steps::cleanup_cargo_config(platform)),
513    }
514}
515
516pub(crate) fn check_postgres_and_protobuf_migrations() -> NamedJob {
517    fn ensure_fresh_merge() -> Step<Run> {
518        named::bash(indoc::indoc! {r#"
519            if [ -z "$GITHUB_BASE_REF" ];
520            then
521              echo "BUF_BASE_BRANCH=$(git merge-base origin/main HEAD)" >> "$GITHUB_ENV"
522            else
523              git checkout -B temp
524              git merge -q "origin/$GITHUB_BASE_REF" -m "merge main into temp"
525              echo "BUF_BASE_BRANCH=$GITHUB_BASE_REF" >> "$GITHUB_ENV"
526            fi
527        "#})
528    }
529
530    fn bufbuild_setup_action() -> Step<Use> {
531        named::uses("bufbuild", "buf-setup-action", "v1")
532            .add_with(("version", "v1.29.0"))
533            .add_with(("github_token", vars::GITHUB_TOKEN))
534    }
535
536    fn bufbuild_breaking_action() -> Step<Use> {
537        named::uses("bufbuild", "buf-breaking-action", "v1").add_with(("input", "crates/proto/proto/"))
538            .add_with(("against", "https://github.com/${GITHUB_REPOSITORY}.git#branch=${BUF_BASE_BRANCH},subdir=crates/proto/proto/"))
539    }
540
541    named::job(
542        release_job(&[])
543            .runs_on(runners::LINUX_DEFAULT)
544            .add_env(("GIT_AUTHOR_NAME", "Protobuf Action"))
545            .add_env(("GIT_AUTHOR_EMAIL", "ci@zed.dev"))
546            .add_env(("GIT_COMMITTER_NAME", "Protobuf Action"))
547            .add_env(("GIT_COMMITTER_EMAIL", "ci@zed.dev"))
548            .add_step(steps::checkout_repo().with_full_history())
549            .add_step(ensure_fresh_merge())
550            .add_step(bufbuild_setup_action())
551            .add_step(bufbuild_breaking_action()),
552    )
553}
554
555fn doctests() -> NamedJob {
556    fn run_doctests() -> Step<Run> {
557        named::bash(indoc::indoc! {r#"
558            cargo test --workspace --doc --no-fail-fast
559        "#})
560        .id("run_doctests")
561    }
562
563    named::job(use_clang(
564        release_job(&[])
565            .runs_on(runners::LINUX_DEFAULT)
566            .add_step(steps::checkout_repo())
567            .add_step(steps::cache_rust_dependencies_namespace())
568            .map(steps::install_linux_dependencies)
569            .add_step(steps::setup_cargo_config(Platform::Linux))
570            .add_step(steps::setup_sccache(Platform::Linux))
571            .add_step(run_doctests())
572            .add_step(steps::show_sccache_stats(Platform::Linux))
573            .add_step(steps::cleanup_cargo_config(Platform::Linux)),
574    ))
575}
576
577fn check_licenses() -> NamedJob {
578    named::job(
579        Job::default()
580            .runs_on(runners::LINUX_SMALL)
581            .add_step(steps::checkout_repo())
582            .add_step(steps::cache_rust_dependencies_namespace())
583            .add_step(steps::script("./script/check-licenses"))
584            .add_step(steps::script("./script/generate-licenses")),
585    )
586}
587
588fn check_docs() -> NamedJob {
589    fn lychee_link_check(dir: &str) -> Step<Use> {
590        named::uses(
591            "lycheeverse",
592            "lychee-action",
593            "82202e5e9c2f4ef1a55a3d02563e1cb6041e5332",
594        ) // v2.4.1
595        .add_with(("args", format!("--no-progress --exclude '^http' '{dir}'")))
596        .add_with(("fail", true))
597        .add_with(("jobSummary", false))
598    }
599
600    fn install_mdbook() -> Step<Use> {
601        named::uses(
602            "peaceiris",
603            "actions-mdbook",
604            "ee69d230fe19748b7abf22df32acaa93833fad08", // v2
605        )
606        .with(("mdbook-version", "0.4.37"))
607    }
608
609    fn build_docs() -> Step<Run> {
610        named::bash(indoc::indoc! {r#"
611            mkdir -p target/deploy
612            mdbook build ./docs --dest-dir=../target/deploy/docs/
613        "#})
614    }
615
616    named::job(use_clang(
617        release_job(&[])
618            .runs_on(runners::LINUX_LARGE)
619            .add_step(steps::checkout_repo())
620            .add_step(steps::setup_cargo_config(Platform::Linux))
621            // todo(ci): un-inline build_docs/action.yml here
622            .add_step(steps::cache_rust_dependencies_namespace())
623            .add_step(
624                lychee_link_check("./docs/src/**/*"), // check markdown links
625            )
626            .map(steps::install_linux_dependencies)
627            .add_step(steps::script("./script/generate-action-metadata"))
628            .add_step(install_mdbook())
629            .add_step(build_docs())
630            .add_step(
631                lychee_link_check("target/deploy/docs"), // check links in generated html
632            ),
633    ))
634}
635
636pub(crate) fn check_scripts() -> NamedJob {
637    fn download_actionlint() -> Step<Run> {
638        named::bash(
639            "bash <(curl https://raw.githubusercontent.com/rhysd/actionlint/main/scripts/download-actionlint.bash)",
640        )
641    }
642
643    fn run_actionlint() -> Step<Run> {
644        named::bash(indoc::indoc! {r#"
645            ${{ steps.get_actionlint.outputs.executable }} -color
646        "#})
647    }
648
649    fn run_shellcheck() -> Step<Run> {
650        named::bash("./script/shellcheck-scripts error")
651    }
652
653    fn check_xtask_workflows() -> Step<Run> {
654        named::bash(indoc::indoc! {r#"
655            cargo xtask workflows
656            if ! git diff --exit-code .github; then
657              echo "Error: .github directory has uncommitted changes after running 'cargo xtask workflows'"
658              echo "Please run 'cargo xtask workflows' locally and commit the changes"
659              exit 1
660            fi
661        "#})
662    }
663
664    named::job(
665        release_job(&[])
666            .runs_on(runners::LINUX_SMALL)
667            .add_step(steps::checkout_repo())
668            .add_step(run_shellcheck())
669            .add_step(download_actionlint().id("get_actionlint"))
670            .add_step(run_actionlint())
671            .add_step(check_xtask_workflows()),
672    )
673}