run_tests.rs

  1use gh_workflow::{
  2    Concurrency, Container, Event, Expression, Job, Port, PullRequest, Push, Run, Step, Use,
  3    Workflow,
  4};
  5use indexmap::IndexMap;
  6use indoc::formatdoc;
  7
  8use crate::tasks::workflows::{
  9    steps::{CommonJobConditions, repository_owner_guard_expression, use_clang},
 10    vars::{self, PathCondition},
 11};
 12
 13use super::{
 14    runners::{self, Platform},
 15    steps::{self, FluentBuilder, NamedJob, named, release_job},
 16};
 17
 18pub(crate) fn run_tests() -> Workflow {
 19    // Specify anything which should potentially skip full test suite in this regex:
 20    // - docs/
 21    // - script/update_top_ranking_issues/
 22    // - .github/ISSUE_TEMPLATE/
 23    // - .github/workflows/  (except .github/workflows/ci.yml)
 24    let should_run_tests = PathCondition::inverted(
 25        "run_tests",
 26        r"^(docs/|script/update_top_ranking_issues/|\.github/(ISSUE_TEMPLATE|workflows/(?!run_tests)))",
 27    );
 28    let should_check_docs = PathCondition::new("run_docs", r"^(docs/|crates/.*\.rs)");
 29    let should_check_scripts = PathCondition::new(
 30        "run_action_checks",
 31        r"^\.github/(workflows/|actions/|actionlint.yml)|tooling/xtask|script/",
 32    );
 33    let should_check_licences =
 34        PathCondition::new("run_licenses", r"^(Cargo.lock|script/.*licenses)");
 35
 36    let orchestrate = orchestrate(&[
 37        &should_check_scripts,
 38        &should_check_docs,
 39        &should_check_licences,
 40        &should_run_tests,
 41    ]);
 42
 43    let mut jobs = vec![
 44        orchestrate,
 45        check_style(),
 46        should_run_tests.guard(clippy(Platform::Windows)),
 47        should_run_tests.guard(clippy(Platform::Linux)),
 48        should_run_tests.guard(clippy(Platform::Mac)),
 49        should_run_tests.guard(run_platform_tests(Platform::Windows)),
 50        should_run_tests.guard(run_platform_tests(Platform::Linux)),
 51        should_run_tests.guard(run_platform_tests(Platform::Mac)),
 52        should_run_tests.guard(doctests()),
 53        should_run_tests.guard(check_workspace_binaries()),
 54        should_run_tests.guard(check_wasm()),
 55        should_run_tests.guard(check_dependencies()), // could be more specific here?
 56        should_check_docs.guard(check_docs()),
 57        should_check_licences.guard(check_licenses()),
 58        should_check_scripts.guard(check_scripts()),
 59    ];
 60    let tests_pass = tests_pass(&jobs);
 61
 62    jobs.push(should_run_tests.guard(check_postgres_and_protobuf_migrations())); // could be more specific here?
 63
 64    named::workflow()
 65        .add_event(
 66            Event::default()
 67                .push(
 68                    Push::default()
 69                        .add_branch("main")
 70                        .add_branch("v[0-9]+.[0-9]+.x"),
 71                )
 72                .pull_request(PullRequest::default().add_branch("**")),
 73        )
 74        .concurrency(
 75            Concurrency::default()
 76                .group(concat!(
 77                    "${{ github.workflow }}-${{ github.ref_name }}-",
 78                    "${{ github.ref_name == 'main' && github.sha || 'anysha' }}"
 79                ))
 80                .cancel_in_progress(true),
 81        )
 82        .add_env(("CARGO_TERM_COLOR", "always"))
 83        .add_env(("RUST_BACKTRACE", 1))
 84        .add_env(("CARGO_INCREMENTAL", 0))
 85        .map(|mut workflow| {
 86            for job in jobs {
 87                workflow = workflow.add_job(job.name, job.job)
 88            }
 89            workflow
 90        })
 91        .add_job(tests_pass.name, tests_pass.job)
 92}
 93
 94// Generates a bash script that checks changed files against regex patterns
 95// and sets GitHub output variables accordingly
 96pub fn orchestrate(rules: &[&PathCondition]) -> NamedJob {
 97    orchestrate_impl(rules, true)
 98}
 99
100pub fn orchestrate_without_package_filter(rules: &[&PathCondition]) -> NamedJob {
101    orchestrate_impl(rules, false)
102}
103
104fn orchestrate_impl(rules: &[&PathCondition], include_package_filter: bool) -> NamedJob {
105    let name = "orchestrate".to_owned();
106    let step_name = "filter".to_owned();
107    let mut script = String::new();
108
109    script.push_str(indoc::indoc! {r#"
110        set -euo pipefail
111        if [ -z "$GITHUB_BASE_REF" ]; then
112          echo "Not in a PR context (i.e., push to main/stable/preview)"
113          COMPARE_REV="$(git rev-parse HEAD~1)"
114        else
115          echo "In a PR context comparing to pull_request.base.ref"
116          git fetch origin "$GITHUB_BASE_REF" --depth=350
117          COMPARE_REV="$(git merge-base "origin/${GITHUB_BASE_REF}" HEAD)"
118        fi
119        CHANGED_FILES="$(git diff --name-only "$COMPARE_REV" ${{ github.sha }})"
120
121        check_pattern() {
122          local output_name="$1"
123          local pattern="$2"
124          local grep_arg="$3"
125
126          echo "$CHANGED_FILES" | grep "$grep_arg" "$pattern" && \
127            echo "${output_name}=true" >> "$GITHUB_OUTPUT" || \
128            echo "${output_name}=false" >> "$GITHUB_OUTPUT"
129        }
130
131    "#});
132
133    let mut outputs = IndexMap::new();
134
135    if include_package_filter {
136        script.push_str(indoc::indoc! {r#"
137        # Check for changes that require full rebuild (no filter)
138        # Direct pushes to main/stable/preview always run full suite
139        if [ -z "$GITHUB_BASE_REF" ]; then
140          echo "Not a PR, running full test suite"
141          echo "changed_packages=" >> "$GITHUB_OUTPUT"
142        elif echo "$CHANGED_FILES" | grep -qP '^(rust-toolchain\.toml|\.cargo/|\.github/|Cargo\.(toml|lock)$)'; then
143          echo "Toolchain, cargo config, or root Cargo files changed, will run all tests"
144          echo "changed_packages=" >> "$GITHUB_OUTPUT"
145        else
146          # Extract changed directories from file paths
147          CHANGED_DIRS=$(echo "$CHANGED_FILES" | \
148            grep -oP '^(crates|tooling)/\K[^/]+' | \
149            sort -u || true)
150
151          # Build directory-to-package mapping using cargo metadata
152          DIR_TO_PKG=$(cargo metadata --format-version=1 --no-deps 2>/dev/null | \
153            jq -r '.packages[] | select(.manifest_path | test("crates/|tooling/")) | "\(.manifest_path | capture("(crates|tooling)/(?<dir>[^/]+)") | .dir)=\(.name)"')
154
155          # Map directory names to package names
156          FILE_CHANGED_PKGS=""
157          for dir in $CHANGED_DIRS; do
158            pkg=$(echo "$DIR_TO_PKG" | grep "^${dir}=" | cut -d= -f2 | head -1)
159            if [ -n "$pkg" ]; then
160              FILE_CHANGED_PKGS=$(printf '%s\n%s' "$FILE_CHANGED_PKGS" "$pkg")
161            else
162              # Fall back to directory name if no mapping found
163              FILE_CHANGED_PKGS=$(printf '%s\n%s' "$FILE_CHANGED_PKGS" "$dir")
164            fi
165          done
166          FILE_CHANGED_PKGS=$(echo "$FILE_CHANGED_PKGS" | grep -v '^$' | sort -u || true)
167
168          # If assets/ changed, add crates that depend on those assets
169          if echo "$CHANGED_FILES" | grep -qP '^assets/'; then
170            FILE_CHANGED_PKGS=$(printf '%s\n%s\n%s\n%s' "$FILE_CHANGED_PKGS" "settings" "storybook" "assets" | sort -u)
171          fi
172
173          # Combine all changed packages
174          ALL_CHANGED_PKGS=$(echo "$FILE_CHANGED_PKGS" | grep -v '^$' || true)
175
176          if [ -z "$ALL_CHANGED_PKGS" ]; then
177            echo "No package changes detected, will run all tests"
178            echo "changed_packages=" >> "$GITHUB_OUTPUT"
179          else
180            # Build nextest filterset with rdeps for each package
181            FILTERSET=$(echo "$ALL_CHANGED_PKGS" | \
182              sed 's/.*/rdeps(&)/' | \
183              tr '\n' '|' | \
184              sed 's/|$//')
185            echo "Changed packages filterset: $FILTERSET"
186            echo "changed_packages=$FILTERSET" >> "$GITHUB_OUTPUT"
187          fi
188        fi
189
190    "#});
191
192        outputs.insert(
193            "changed_packages".to_owned(),
194            format!("${{{{ steps.{}.outputs.changed_packages }}}}", step_name),
195        );
196    }
197
198    for rule in rules {
199        assert!(
200            rule.set_by_step
201                .borrow_mut()
202                .replace(name.clone())
203                .is_none()
204        );
205        assert!(
206            outputs
207                .insert(
208                    rule.name.to_owned(),
209                    format!("${{{{ steps.{}.outputs.{} }}}}", step_name, rule.name)
210                )
211                .is_none()
212        );
213
214        let grep_arg = if rule.invert { "-qvP" } else { "-qP" };
215        script.push_str(&format!(
216            "check_pattern \"{}\" '{}' {}\n",
217            rule.name, rule.pattern, grep_arg
218        ));
219    }
220
221    let job = Job::default()
222        .runs_on(runners::LINUX_SMALL)
223        .with_repository_owner_guard()
224        .outputs(outputs)
225        .add_step(steps::checkout_repo().with_deep_history_on_non_main())
226        .add_step(Step::new(step_name.clone()).run(script).id(step_name));
227
228    NamedJob { name, job }
229}
230
231pub fn tests_pass(jobs: &[NamedJob]) -> NamedJob {
232    let mut script = String::from(indoc::indoc! {r#"
233        set +x
234        EXIT_CODE=0
235
236        check_result() {
237          echo "* $1: $2"
238          if [[ "$2" != "skipped" && "$2" != "success" ]]; then EXIT_CODE=1; fi
239        }
240
241    "#});
242
243    script.push_str(
244        &jobs
245            .iter()
246            .map(|job| {
247                format!(
248                    "check_result \"{}\" \"${{{{ needs.{}.result }}}}\"",
249                    job.name, job.name
250                )
251            })
252            .collect::<Vec<_>>()
253            .join("\n"),
254    );
255
256    script.push_str("\n\nexit $EXIT_CODE\n");
257
258    let job = Job::default()
259        .runs_on(runners::LINUX_SMALL)
260        .needs(
261            jobs.iter()
262                .map(|j| j.name.to_string())
263                .collect::<Vec<String>>(),
264        )
265        .cond(repository_owner_guard_expression(true))
266        .add_step(named::bash(&script));
267
268    named::job(job)
269}
270
271const TS_QUERY_LS_FILE: &str = "ts_query_ls-x86_64-unknown-linux-gnu.tar.gz";
272const CI_TS_QUERY_RELEASE: &str = "tags/v3.15.1";
273
274pub(crate) fn fetch_ts_query_ls() -> Step<Use> {
275    named::uses(
276        "dsaltares",
277        "fetch-gh-release-asset",
278        "aa37ae5c44d3c9820bc12fe675e8670ecd93bd1c",
279    ) // v1.1.1
280    .add_with(("repo", "ribru17/ts_query_ls"))
281    .add_with(("version", CI_TS_QUERY_RELEASE))
282    .add_with(("file", TS_QUERY_LS_FILE))
283}
284
285pub(crate) fn run_ts_query_ls() -> Step<Run> {
286    named::bash(formatdoc!(
287        r#"tar -xf {TS_QUERY_LS_FILE}
288        ./ts_query_ls format --check . || {{
289            echo "Found unformatted queries, please format them with ts_query_ls."
290            echo "For easy use, install the Tree-sitter query extension:"
291            echo "zed://extension/tree-sitter-query"
292            false
293        }}"#
294    ))
295}
296
297fn check_style() -> NamedJob {
298    fn check_for_typos() -> Step<Use> {
299        named::uses(
300            "crate-ci",
301            "typos",
302            "2d0ce569feab1f8752f1dde43cc2f2aa53236e06",
303        ) // v1.40.0
304        .with(("config", "./typos.toml"))
305    }
306
307    named::job(
308        release_job(&[])
309            .runs_on(runners::LINUX_MEDIUM)
310            .add_step(steps::checkout_repo())
311            .add_step(steps::cache_rust_dependencies_namespace())
312            .add_step(steps::setup_pnpm())
313            .add_step(steps::prettier())
314            .add_step(steps::cargo_fmt())
315            .add_step(steps::script("./script/check-todos"))
316            .add_step(steps::script("./script/check-keymaps"))
317            .add_step(check_for_typos())
318            .add_step(fetch_ts_query_ls())
319            .add_step(run_ts_query_ls()),
320    )
321}
322
323fn check_dependencies() -> NamedJob {
324    fn install_cargo_machete() -> Step<Use> {
325        named::uses(
326            "clechasseur",
327            "rs-cargo",
328            "8435b10f6e71c2e3d4d3b7573003a8ce4bfc6386", // v2
329        )
330        .add_with(("command", "install"))
331        .add_with(("args", "cargo-machete@0.7.0"))
332    }
333
334    fn run_cargo_machete() -> Step<Use> {
335        named::uses(
336            "clechasseur",
337            "rs-cargo",
338            "8435b10f6e71c2e3d4d3b7573003a8ce4bfc6386", // v2
339        )
340        .add_with(("command", "machete"))
341    }
342
343    fn check_cargo_lock() -> Step<Run> {
344        named::bash("cargo update --locked --workspace")
345    }
346
347    fn check_vulnerable_dependencies() -> Step<Use> {
348        named::uses(
349            "actions",
350            "dependency-review-action",
351            "67d4f4bd7a9b17a0db54d2a7519187c65e339de8", // v4
352        )
353        .if_condition(Expression::new("github.event_name == 'pull_request'"))
354        .with(("license-check", false))
355    }
356
357    named::job(use_clang(
358        release_job(&[])
359            .runs_on(runners::LINUX_SMALL)
360            .add_step(steps::checkout_repo())
361            .add_step(steps::cache_rust_dependencies_namespace())
362            .add_step(install_cargo_machete())
363            .add_step(run_cargo_machete())
364            .add_step(check_cargo_lock())
365            .add_step(check_vulnerable_dependencies()),
366    ))
367}
368
369fn check_wasm() -> NamedJob {
370    fn install_nightly_wasm_toolchain() -> Step<Run> {
371        named::bash(
372            "rustup toolchain install nightly --component rust-src --target wasm32-unknown-unknown",
373        )
374    }
375
376    fn cargo_check_wasm() -> Step<Run> {
377        named::bash(concat!(
378            "cargo +nightly -Zbuild-std=std,panic_abort ",
379            "check --target wasm32-unknown-unknown -p gpui_platform",
380        ))
381        .add_env((
382            "CARGO_TARGET_WASM32_UNKNOWN_UNKNOWN_RUSTFLAGS",
383            "-C target-feature=+atomics,+bulk-memory,+mutable-globals",
384        ))
385    }
386
387    named::job(
388        release_job(&[])
389            .runs_on(runners::LINUX_LARGE)
390            .add_step(steps::checkout_repo())
391            .add_step(steps::setup_cargo_config(Platform::Linux))
392            .add_step(steps::cache_rust_dependencies_namespace())
393            .add_step(install_nightly_wasm_toolchain())
394            .add_step(steps::setup_sccache(Platform::Linux))
395            .add_step(cargo_check_wasm())
396            .add_step(steps::show_sccache_stats(Platform::Linux))
397            .add_step(steps::cleanup_cargo_config(Platform::Linux)),
398    )
399}
400
401fn check_workspace_binaries() -> NamedJob {
402    named::job(use_clang(
403        release_job(&[])
404            .runs_on(runners::LINUX_LARGE)
405            .add_step(steps::checkout_repo())
406            .add_step(steps::setup_cargo_config(Platform::Linux))
407            .add_step(steps::cache_rust_dependencies_namespace())
408            .map(steps::install_linux_dependencies)
409            .add_step(steps::setup_sccache(Platform::Linux))
410            .add_step(steps::script("cargo build -p collab"))
411            .add_step(steps::script("cargo build --workspace --bins --examples"))
412            .add_step(steps::show_sccache_stats(Platform::Linux))
413            .add_step(steps::cleanup_cargo_config(Platform::Linux)),
414    ))
415}
416
417pub(crate) fn clippy(platform: Platform) -> NamedJob {
418    let runner = match platform {
419        Platform::Windows => runners::WINDOWS_DEFAULT,
420        Platform::Linux => runners::LINUX_DEFAULT,
421        Platform::Mac => runners::MAC_DEFAULT,
422    };
423    let mut job = release_job(&[])
424        .runs_on(runner)
425        .add_step(steps::checkout_repo())
426        .add_step(steps::setup_cargo_config(platform))
427        .when(
428            platform == Platform::Linux || platform == Platform::Mac,
429            |this| this.add_step(steps::cache_rust_dependencies_namespace()),
430        )
431        .when(
432            platform == Platform::Linux,
433            steps::install_linux_dependencies,
434        )
435        .add_step(steps::setup_sccache(platform))
436        .add_step(steps::clippy(platform))
437        .add_step(steps::show_sccache_stats(platform));
438    if platform == Platform::Linux {
439        job = use_clang(job);
440    }
441    NamedJob {
442        name: format!("clippy_{platform}"),
443        job,
444    }
445}
446
447pub(crate) fn run_platform_tests(platform: Platform) -> NamedJob {
448    run_platform_tests_impl(platform, true)
449}
450
451pub(crate) fn run_platform_tests_no_filter(platform: Platform) -> NamedJob {
452    run_platform_tests_impl(platform, false)
453}
454
455fn run_platform_tests_impl(platform: Platform, filter_packages: bool) -> NamedJob {
456    let runner = match platform {
457        Platform::Windows => runners::WINDOWS_DEFAULT,
458        Platform::Linux => runners::LINUX_DEFAULT,
459        Platform::Mac => runners::MAC_DEFAULT,
460    };
461    NamedJob {
462        name: format!("run_tests_{platform}"),
463        job: release_job(&[])
464            .runs_on(runner)
465            .when(platform == Platform::Linux, |job| {
466                job.add_service(
467                    "postgres",
468                    Container::new("postgres:15")
469                        .add_env(("POSTGRES_HOST_AUTH_METHOD", "trust"))
470                        .ports(vec![Port::Name("5432:5432".into())])
471                        .options(
472                            "--health-cmd pg_isready \
473                             --health-interval 500ms \
474                             --health-timeout 5s \
475                             --health-retries 10",
476                        ),
477                )
478            })
479            .add_step(steps::checkout_repo())
480            .add_step(steps::setup_cargo_config(platform))
481            .when(platform == Platform::Mac, |this| {
482                this.add_step(steps::cache_rust_dependencies_namespace())
483            })
484            .when(platform == Platform::Linux, |this| {
485                use_clang(this.add_step(steps::cache_rust_dependencies_namespace()))
486            })
487            .when(
488                platform == Platform::Linux,
489                steps::install_linux_dependencies,
490            )
491            .add_step(steps::setup_node())
492            .when(
493                platform == Platform::Linux || platform == Platform::Mac,
494                |job| job.add_step(steps::cargo_install_nextest()),
495            )
496            .add_step(steps::clear_target_dir_if_large(platform))
497            .add_step(steps::setup_sccache(platform))
498            .when(filter_packages, |job| {
499                job.add_step(
500                    steps::cargo_nextest(platform).with_changed_packages_filter("orchestrate"),
501                )
502            })
503            .when(!filter_packages, |job| {
504                job.add_step(steps::cargo_nextest(platform))
505            })
506            .add_step(steps::show_sccache_stats(platform))
507            .add_step(steps::cleanup_cargo_config(platform)),
508    }
509}
510
511pub(crate) fn check_postgres_and_protobuf_migrations() -> NamedJob {
512    fn ensure_fresh_merge() -> Step<Run> {
513        named::bash(indoc::indoc! {r#"
514            if [ -z "$GITHUB_BASE_REF" ];
515            then
516              echo "BUF_BASE_BRANCH=$(git merge-base origin/main HEAD)" >> "$GITHUB_ENV"
517            else
518              git checkout -B temp
519              git merge -q "origin/$GITHUB_BASE_REF" -m "merge main into temp"
520              echo "BUF_BASE_BRANCH=$GITHUB_BASE_REF" >> "$GITHUB_ENV"
521            fi
522        "#})
523    }
524
525    fn bufbuild_setup_action() -> Step<Use> {
526        named::uses("bufbuild", "buf-setup-action", "v1")
527            .add_with(("version", "v1.29.0"))
528            .add_with(("github_token", vars::GITHUB_TOKEN))
529    }
530
531    fn bufbuild_breaking_action() -> Step<Use> {
532        named::uses("bufbuild", "buf-breaking-action", "v1").add_with(("input", "crates/proto/proto/"))
533            .add_with(("against", "https://github.com/${GITHUB_REPOSITORY}.git#branch=${BUF_BASE_BRANCH},subdir=crates/proto/proto/"))
534    }
535
536    fn buf_lint() -> Step<Run> {
537        named::bash("buf lint crates/proto/proto")
538    }
539
540    fn check_protobuf_formatting() -> Step<Run> {
541        named::bash("buf format --diff --exit-code crates/proto/proto")
542    }
543
544    named::job(
545        release_job(&[])
546            .runs_on(runners::LINUX_DEFAULT)
547            .add_env(("GIT_AUTHOR_NAME", "Protobuf Action"))
548            .add_env(("GIT_AUTHOR_EMAIL", "ci@zed.dev"))
549            .add_env(("GIT_COMMITTER_NAME", "Protobuf Action"))
550            .add_env(("GIT_COMMITTER_EMAIL", "ci@zed.dev"))
551            .add_step(steps::checkout_repo().with_full_history())
552            .add_step(ensure_fresh_merge())
553            .add_step(bufbuild_setup_action())
554            .add_step(bufbuild_breaking_action())
555            .add_step(buf_lint())
556            .add_step(check_protobuf_formatting()),
557    )
558}
559
560fn doctests() -> NamedJob {
561    fn run_doctests() -> Step<Run> {
562        named::bash(indoc::indoc! {r#"
563            cargo test --workspace --doc --no-fail-fast
564        "#})
565        .id("run_doctests")
566    }
567
568    named::job(use_clang(
569        release_job(&[])
570            .runs_on(runners::LINUX_DEFAULT)
571            .add_step(steps::checkout_repo())
572            .add_step(steps::cache_rust_dependencies_namespace())
573            .map(steps::install_linux_dependencies)
574            .add_step(steps::setup_cargo_config(Platform::Linux))
575            .add_step(steps::setup_sccache(Platform::Linux))
576            .add_step(run_doctests())
577            .add_step(steps::show_sccache_stats(Platform::Linux))
578            .add_step(steps::cleanup_cargo_config(Platform::Linux)),
579    ))
580}
581
582fn check_licenses() -> NamedJob {
583    named::job(
584        Job::default()
585            .runs_on(runners::LINUX_SMALL)
586            .add_step(steps::checkout_repo())
587            .add_step(steps::cache_rust_dependencies_namespace())
588            .add_step(steps::script("./script/check-licenses"))
589            .add_step(steps::script("./script/generate-licenses")),
590    )
591}
592
593fn check_docs() -> NamedJob {
594    fn lychee_link_check(dir: &str) -> Step<Use> {
595        named::uses(
596            "lycheeverse",
597            "lychee-action",
598            "82202e5e9c2f4ef1a55a3d02563e1cb6041e5332",
599        ) // v2.4.1
600        .add_with(("args", format!("--no-progress --exclude '^http' '{dir}'")))
601        .add_with(("fail", true))
602        .add_with(("jobSummary", false))
603    }
604
605    fn install_mdbook() -> Step<Use> {
606        named::uses(
607            "peaceiris",
608            "actions-mdbook",
609            "ee69d230fe19748b7abf22df32acaa93833fad08", // v2
610        )
611        .with(("mdbook-version", "0.4.37"))
612    }
613
614    fn build_docs() -> Step<Run> {
615        named::bash(indoc::indoc! {r#"
616            mkdir -p target/deploy
617            mdbook build ./docs --dest-dir=../target/deploy/docs/
618        "#})
619    }
620
621    named::job(use_clang(
622        release_job(&[])
623            .runs_on(runners::LINUX_LARGE)
624            .add_step(steps::checkout_repo())
625            .add_step(steps::setup_cargo_config(Platform::Linux))
626            // todo(ci): un-inline build_docs/action.yml here
627            .add_step(steps::cache_rust_dependencies_namespace())
628            .add_step(
629                lychee_link_check("./docs/src/**/*"), // check markdown links
630            )
631            .map(steps::install_linux_dependencies)
632            .add_step(steps::script("./script/generate-action-metadata"))
633            .add_step(install_mdbook())
634            .add_step(build_docs())
635            .add_step(
636                lychee_link_check("target/deploy/docs"), // check links in generated html
637            ),
638    ))
639}
640
641pub(crate) fn check_scripts() -> NamedJob {
642    fn download_actionlint() -> Step<Run> {
643        named::bash(
644            "bash <(curl https://raw.githubusercontent.com/rhysd/actionlint/main/scripts/download-actionlint.bash)",
645        )
646    }
647
648    fn run_actionlint() -> Step<Run> {
649        named::bash(indoc::indoc! {r#"
650            ${{ steps.get_actionlint.outputs.executable }} -color
651        "#})
652    }
653
654    fn run_shellcheck() -> Step<Run> {
655        named::bash("./script/shellcheck-scripts error")
656    }
657
658    fn check_xtask_workflows() -> Step<Run> {
659        named::bash(indoc::indoc! {r#"
660            cargo xtask workflows
661            if ! git diff --exit-code .github; then
662              echo "Error: .github directory has uncommitted changes after running 'cargo xtask workflows'"
663              echo "Please run 'cargo xtask workflows' locally and commit the changes"
664              exit 1
665            fi
666        "#})
667    }
668
669    named::job(
670        release_job(&[])
671            .runs_on(runners::LINUX_SMALL)
672            .add_step(steps::checkout_repo())
673            .add_step(run_shellcheck())
674            .add_step(download_actionlint().id("get_actionlint"))
675            .add_step(run_actionlint())
676            .add_step(check_xtask_workflows()),
677    )
678}