1use gh_workflow::{
2 Concurrency, Container, Event, Expression, Job, Port, PullRequest, Push, Run, Step, Use,
3 Workflow,
4};
5use indexmap::IndexMap;
6use indoc::formatdoc;
7
8use crate::tasks::workflows::{
9 steps::{
10 CommonJobConditions, cache_rust_dependencies_namespace, repository_owner_guard_expression,
11 use_clang,
12 },
13 vars::{self, PathCondition},
14};
15
16use super::{
17 runners::{self, Platform},
18 steps::{self, FluentBuilder, NamedJob, named, release_job},
19};
20
21pub(crate) fn run_tests() -> Workflow {
22 // Specify anything which should potentially skip full test suite in this regex:
23 // - docs/
24 // - script/update_top_ranking_issues/
25 // - .github/ISSUE_TEMPLATE/
26 // - .github/workflows/ (except .github/workflows/ci.yml)
27 let should_run_tests = PathCondition::inverted(
28 "run_tests",
29 r"^(docs/|script/update_top_ranking_issues/|\.github/(ISSUE_TEMPLATE|workflows/(?!run_tests)))",
30 );
31 let should_check_docs = PathCondition::new("run_docs", r"^(docs/|crates/.*\.rs)");
32 let should_check_scripts = PathCondition::new(
33 "run_action_checks",
34 r"^\.github/(workflows/|actions/|actionlint.yml)|tooling/xtask|script/",
35 );
36 let should_check_licences =
37 PathCondition::new("run_licenses", r"^(Cargo.lock|script/.*licenses)");
38
39 let orchestrate = orchestrate(&[
40 &should_check_scripts,
41 &should_check_docs,
42 &should_check_licences,
43 &should_run_tests,
44 ]);
45
46 let mut jobs = vec![
47 orchestrate,
48 check_style(),
49 should_run_tests.guard(clippy(Platform::Windows)),
50 should_run_tests.guard(clippy(Platform::Linux)),
51 should_run_tests.guard(clippy(Platform::Mac)),
52 should_run_tests.guard(run_platform_tests(Platform::Windows)),
53 should_run_tests.guard(run_platform_tests(Platform::Linux)),
54 should_run_tests.guard(run_platform_tests(Platform::Mac)),
55 should_run_tests.guard(doctests()),
56 should_run_tests.guard(check_workspace_binaries()),
57 should_run_tests.guard(check_wasm()),
58 should_run_tests.guard(check_dependencies()), // could be more specific here?
59 should_check_docs.guard(check_docs()),
60 should_check_licences.guard(check_licenses()),
61 should_check_scripts.guard(check_scripts()),
62 ];
63 let tests_pass = tests_pass(&jobs);
64
65 jobs.push(should_run_tests.guard(check_postgres_and_protobuf_migrations())); // could be more specific here?
66
67 named::workflow()
68 .add_event(
69 Event::default()
70 .push(
71 Push::default()
72 .add_branch("main")
73 .add_branch("v[0-9]+.[0-9]+.x"),
74 )
75 .pull_request(PullRequest::default().add_branch("**")),
76 )
77 .concurrency(
78 Concurrency::default()
79 .group(concat!(
80 "${{ github.workflow }}-${{ github.ref_name }}-",
81 "${{ github.ref_name == 'main' && github.sha || 'anysha' }}"
82 ))
83 .cancel_in_progress(true),
84 )
85 .add_env(("CARGO_TERM_COLOR", "always"))
86 .add_env(("RUST_BACKTRACE", 1))
87 .add_env(("CARGO_INCREMENTAL", 0))
88 .map(|mut workflow| {
89 for job in jobs {
90 workflow = workflow.add_job(job.name, job.job)
91 }
92 workflow
93 })
94 .add_job(tests_pass.name, tests_pass.job)
95}
96
97// Generates a bash script that checks changed files against regex patterns
98// and sets GitHub output variables accordingly
99pub fn orchestrate(rules: &[&PathCondition]) -> NamedJob {
100 orchestrate_impl(rules, true, false)
101}
102
103pub fn orchestrate_for_extension(rules: &[&PathCondition]) -> NamedJob {
104 orchestrate_impl(rules, false, true)
105}
106
107fn orchestrate_impl(
108 rules: &[&PathCondition],
109 include_package_filter: bool,
110 filter_by_working_directory: bool,
111) -> NamedJob {
112 let name = "orchestrate".to_owned();
113 let step_name = "filter".to_owned();
114 let mut script = String::new();
115
116 script.push_str(indoc::indoc! {r#"
117 set -euo pipefail
118 if [ -z "$GITHUB_BASE_REF" ]; then
119 echo "Not in a PR context (i.e., push to main/stable/preview)"
120 COMPARE_REV="$(git rev-parse HEAD~1)"
121 else
122 echo "In a PR context comparing to pull_request.base.ref"
123 git fetch origin "$GITHUB_BASE_REF" --depth=350
124 COMPARE_REV="$(git merge-base "origin/${GITHUB_BASE_REF}" HEAD)"
125 fi
126 CHANGED_FILES="$(git diff --name-only "$COMPARE_REV" "$GITHUB_SHA")"
127
128 "#});
129
130 if filter_by_working_directory {
131 script.push_str(indoc::indoc! {r#"
132 # When running from a subdirectory, git diff returns repo-root-relative paths.
133 # Filter to only files within the current working directory and strip the prefix.
134 REPO_SUBDIR="$(git rev-parse --show-prefix)"
135 REPO_SUBDIR="${REPO_SUBDIR%/}"
136 if [ -n "$REPO_SUBDIR" ]; then
137 CHANGED_FILES="$(echo "$CHANGED_FILES" | grep "^${REPO_SUBDIR}/" | sed "s|^${REPO_SUBDIR}/||" || true)"
138 fi
139
140 "#});
141 }
142
143 script.push_str(indoc::indoc! {r#"
144 check_pattern() {
145 local output_name="$1"
146 local pattern="$2"
147 local grep_arg="$3"
148
149 echo "$CHANGED_FILES" | grep "$grep_arg" "$pattern" && \
150 echo "${output_name}=true" >> "$GITHUB_OUTPUT" || \
151 echo "${output_name}=false" >> "$GITHUB_OUTPUT"
152 }
153
154 "#});
155
156 let mut outputs = IndexMap::new();
157
158 if include_package_filter {
159 script.push_str(indoc::indoc! {r#"
160 # Check for changes that require full rebuild (no filter)
161 # Direct pushes to main/stable/preview always run full suite
162 if [ -z "$GITHUB_BASE_REF" ]; then
163 echo "Not a PR, running full test suite"
164 echo "changed_packages=" >> "$GITHUB_OUTPUT"
165 elif echo "$CHANGED_FILES" | grep -qP '^(rust-toolchain\.toml|\.cargo/|\.github/|Cargo\.(toml|lock)$)'; then
166 echo "Toolchain, cargo config, or root Cargo files changed, will run all tests"
167 echo "changed_packages=" >> "$GITHUB_OUTPUT"
168 else
169 # Extract changed directories from file paths
170 CHANGED_DIRS=$(echo "$CHANGED_FILES" | \
171 grep -oP '^(crates|tooling)/\K[^/]+' | \
172 sort -u || true)
173
174 # Build directory-to-package mapping using cargo metadata
175 DIR_TO_PKG=$(cargo metadata --format-version=1 --no-deps 2>/dev/null | \
176 jq -r '.packages[] | select(.manifest_path | test("crates/|tooling/")) | "\(.manifest_path | capture("(crates|tooling)/(?<dir>[^/]+)") | .dir)=\(.name)"')
177
178 # Map directory names to package names
179 FILE_CHANGED_PKGS=""
180 for dir in $CHANGED_DIRS; do
181 pkg=$(echo "$DIR_TO_PKG" | grep "^${dir}=" | cut -d= -f2 | head -1)
182 if [ -n "$pkg" ]; then
183 FILE_CHANGED_PKGS=$(printf '%s\n%s' "$FILE_CHANGED_PKGS" "$pkg")
184 else
185 # Fall back to directory name if no mapping found
186 FILE_CHANGED_PKGS=$(printf '%s\n%s' "$FILE_CHANGED_PKGS" "$dir")
187 fi
188 done
189 FILE_CHANGED_PKGS=$(echo "$FILE_CHANGED_PKGS" | grep -v '^$' | sort -u || true)
190
191 # If assets/ changed, add crates that depend on those assets
192 if echo "$CHANGED_FILES" | grep -qP '^assets/'; then
193 FILE_CHANGED_PKGS=$(printf '%s\n%s\n%s\n%s' "$FILE_CHANGED_PKGS" "settings" "storybook" "assets" | sort -u)
194 fi
195
196 # Combine all changed packages
197 ALL_CHANGED_PKGS=$(echo "$FILE_CHANGED_PKGS" | grep -v '^$' || true)
198
199 if [ -z "$ALL_CHANGED_PKGS" ]; then
200 echo "No package changes detected, will run all tests"
201 echo "changed_packages=" >> "$GITHUB_OUTPUT"
202 else
203 # Build nextest filterset with rdeps for each package
204 FILTERSET=$(echo "$ALL_CHANGED_PKGS" | \
205 sed 's/.*/rdeps(&)/' | \
206 tr '\n' '|' | \
207 sed 's/|$//')
208 echo "Changed packages filterset: $FILTERSET"
209 echo "changed_packages=$FILTERSET" >> "$GITHUB_OUTPUT"
210 fi
211 fi
212
213 "#});
214
215 outputs.insert(
216 "changed_packages".to_owned(),
217 format!("${{{{ steps.{}.outputs.changed_packages }}}}", step_name),
218 );
219 }
220
221 for rule in rules {
222 assert!(
223 rule.set_by_step
224 .borrow_mut()
225 .replace(name.clone())
226 .is_none()
227 );
228 assert!(
229 outputs
230 .insert(
231 rule.name.to_owned(),
232 format!("${{{{ steps.{}.outputs.{} }}}}", step_name, rule.name)
233 )
234 .is_none()
235 );
236
237 let grep_arg = if rule.invert { "-qvP" } else { "-qP" };
238 script.push_str(&format!(
239 "check_pattern \"{}\" '{}' {}\n",
240 rule.name, rule.pattern, grep_arg
241 ));
242 }
243
244 let job = Job::default()
245 .runs_on(runners::LINUX_SMALL)
246 .with_repository_owner_guard()
247 .outputs(outputs)
248 .add_step(steps::checkout_repo().with_deep_history_on_non_main())
249 .add_step(Step::new(step_name.clone()).run(script).id(step_name));
250
251 NamedJob { name, job }
252}
253
254pub fn tests_pass(jobs: &[NamedJob]) -> NamedJob {
255 let mut script = String::from(indoc::indoc! {r#"
256 set +x
257 EXIT_CODE=0
258
259 check_result() {
260 echo "* $1: $2"
261 if [[ "$2" != "skipped" && "$2" != "success" ]]; then EXIT_CODE=1; fi
262 }
263
264 "#});
265
266 let env_entries: Vec<_> = jobs
267 .iter()
268 .map(|job| {
269 let env_name = format!("RESULT_{}", job.name.to_uppercase());
270 let env_value = format!("${{{{ needs.{}.result }}}}", job.name);
271 (env_name, env_value)
272 })
273 .collect();
274
275 script.push_str(
276 &jobs
277 .iter()
278 .zip(env_entries.iter())
279 .map(|(job, (env_name, _))| format!("check_result \"{}\" \"${}\"", job.name, env_name))
280 .collect::<Vec<_>>()
281 .join("\n"),
282 );
283
284 script.push_str("\n\nexit $EXIT_CODE\n");
285
286 let job = Job::default()
287 .runs_on(runners::LINUX_SMALL)
288 .needs(
289 jobs.iter()
290 .map(|j| j.name.to_string())
291 .collect::<Vec<String>>(),
292 )
293 .cond(repository_owner_guard_expression(true))
294 .add_step(
295 env_entries
296 .into_iter()
297 .fold(named::bash(&script), |step, env_item| {
298 step.add_env(env_item)
299 }),
300 );
301
302 named::job(job)
303}
304
305const TS_QUERY_LS_FILE: &str = "ts_query_ls-x86_64-unknown-linux-gnu.tar.gz";
306const CI_TS_QUERY_RELEASE: &str = "tags/v3.15.1";
307
308pub(crate) fn fetch_ts_query_ls() -> Step<Use> {
309 named::uses(
310 "dsaltares",
311 "fetch-gh-release-asset",
312 "aa37ae5c44d3c9820bc12fe675e8670ecd93bd1c",
313 ) // v1.1.1
314 .add_with(("repo", "ribru17/ts_query_ls"))
315 .add_with(("version", CI_TS_QUERY_RELEASE))
316 .add_with(("file", TS_QUERY_LS_FILE))
317}
318
319pub(crate) fn run_ts_query_ls() -> Step<Run> {
320 named::bash(formatdoc!(
321 r#"tar -xf "$GITHUB_WORKSPACE/{TS_QUERY_LS_FILE}" -C "$GITHUB_WORKSPACE"
322 "$GITHUB_WORKSPACE/ts_query_ls" format --check . || {{
323 echo "Found unformatted queries, please format them with ts_query_ls."
324 echo "For easy use, install the Tree-sitter query extension:"
325 echo "zed://extension/tree-sitter-query"
326 false
327 }}"#
328 ))
329}
330
331fn check_style() -> NamedJob {
332 fn check_for_typos() -> Step<Use> {
333 named::uses(
334 "crate-ci",
335 "typos",
336 "2d0ce569feab1f8752f1dde43cc2f2aa53236e06",
337 ) // v1.40.0
338 .with(("config", "./typos.toml"))
339 }
340
341 named::job(
342 release_job(&[])
343 .runs_on(runners::LINUX_MEDIUM)
344 .add_step(steps::checkout_repo())
345 .add_step(steps::cache_rust_dependencies_namespace())
346 .add_step(steps::setup_pnpm())
347 .add_step(steps::prettier())
348 .add_step(steps::cargo_fmt())
349 .add_step(steps::script("./script/check-todos"))
350 .add_step(steps::script("./script/check-keymaps"))
351 .add_step(check_for_typos())
352 .add_step(fetch_ts_query_ls())
353 .add_step(run_ts_query_ls()),
354 )
355}
356
357fn check_dependencies() -> NamedJob {
358 fn install_cargo_machete() -> Step<Use> {
359 named::uses(
360 "clechasseur",
361 "rs-cargo",
362 "8435b10f6e71c2e3d4d3b7573003a8ce4bfc6386", // v2
363 )
364 .add_with(("command", "install"))
365 .add_with(("args", "cargo-machete@0.7.0"))
366 }
367
368 fn run_cargo_machete() -> Step<Use> {
369 named::uses(
370 "clechasseur",
371 "rs-cargo",
372 "8435b10f6e71c2e3d4d3b7573003a8ce4bfc6386", // v2
373 )
374 .add_with(("command", "machete"))
375 }
376
377 fn check_cargo_lock() -> Step<Run> {
378 named::bash("cargo update --locked --workspace")
379 }
380
381 fn check_vulnerable_dependencies() -> Step<Use> {
382 named::uses(
383 "actions",
384 "dependency-review-action",
385 "67d4f4bd7a9b17a0db54d2a7519187c65e339de8", // v4
386 )
387 .if_condition(Expression::new("github.event_name == 'pull_request'"))
388 .with(("license-check", false))
389 }
390
391 named::job(use_clang(
392 release_job(&[])
393 .runs_on(runners::LINUX_SMALL)
394 .add_step(steps::checkout_repo())
395 .add_step(steps::cache_rust_dependencies_namespace())
396 .add_step(install_cargo_machete())
397 .add_step(run_cargo_machete())
398 .add_step(check_cargo_lock())
399 .add_step(check_vulnerable_dependencies()),
400 ))
401}
402
403fn check_wasm() -> NamedJob {
404 fn install_nightly_wasm_toolchain() -> Step<Run> {
405 named::bash(
406 "rustup toolchain install nightly --component rust-src --target wasm32-unknown-unknown",
407 )
408 }
409
410 fn cargo_check_wasm() -> Step<Run> {
411 named::bash(concat!(
412 "cargo +nightly -Zbuild-std=std,panic_abort ",
413 "check --target wasm32-unknown-unknown -p gpui_platform",
414 ))
415 .add_env((
416 "CARGO_TARGET_WASM32_UNKNOWN_UNKNOWN_RUSTFLAGS",
417 "-C target-feature=+atomics,+bulk-memory,+mutable-globals",
418 ))
419 }
420
421 named::job(
422 release_job(&[])
423 .runs_on(runners::LINUX_LARGE)
424 .add_step(steps::checkout_repo())
425 .add_step(steps::setup_cargo_config(Platform::Linux))
426 .add_step(steps::cache_rust_dependencies_namespace())
427 .add_step(install_nightly_wasm_toolchain())
428 .add_step(steps::setup_sccache(Platform::Linux))
429 .add_step(cargo_check_wasm())
430 .add_step(steps::show_sccache_stats(Platform::Linux))
431 .add_step(steps::cleanup_cargo_config(Platform::Linux)),
432 )
433}
434
435fn check_workspace_binaries() -> NamedJob {
436 named::job(use_clang(
437 release_job(&[])
438 .runs_on(runners::LINUX_LARGE)
439 .add_step(steps::checkout_repo())
440 .add_step(steps::setup_cargo_config(Platform::Linux))
441 .add_step(steps::cache_rust_dependencies_namespace())
442 .map(steps::install_linux_dependencies)
443 .add_step(steps::setup_sccache(Platform::Linux))
444 .add_step(steps::script("cargo build -p collab"))
445 .add_step(steps::script("cargo build --workspace --bins --examples"))
446 .add_step(steps::show_sccache_stats(Platform::Linux))
447 .add_step(steps::cleanup_cargo_config(Platform::Linux)),
448 ))
449}
450
451pub(crate) fn clippy(platform: Platform) -> NamedJob {
452 let runner = match platform {
453 Platform::Windows => runners::WINDOWS_DEFAULT,
454 Platform::Linux => runners::LINUX_DEFAULT,
455 Platform::Mac => runners::MAC_DEFAULT,
456 };
457 let mut job = release_job(&[])
458 .runs_on(runner)
459 .add_step(steps::checkout_repo())
460 .add_step(steps::setup_cargo_config(platform))
461 .when(
462 platform == Platform::Linux || platform == Platform::Mac,
463 |this| this.add_step(steps::cache_rust_dependencies_namespace()),
464 )
465 .when(
466 platform == Platform::Linux,
467 steps::install_linux_dependencies,
468 )
469 .add_step(steps::setup_sccache(platform))
470 .add_step(steps::clippy(platform))
471 .add_step(steps::show_sccache_stats(platform));
472 if platform == Platform::Linux {
473 job = use_clang(job);
474 }
475 NamedJob {
476 name: format!("clippy_{platform}"),
477 job,
478 }
479}
480
481pub(crate) fn run_platform_tests(platform: Platform) -> NamedJob {
482 run_platform_tests_impl(platform, true)
483}
484
485pub(crate) fn run_platform_tests_no_filter(platform: Platform) -> NamedJob {
486 run_platform_tests_impl(platform, false)
487}
488
489fn run_platform_tests_impl(platform: Platform, filter_packages: bool) -> NamedJob {
490 let runner = match platform {
491 Platform::Windows => runners::WINDOWS_DEFAULT,
492 Platform::Linux => runners::LINUX_DEFAULT,
493 Platform::Mac => runners::MAC_DEFAULT,
494 };
495 NamedJob {
496 name: format!("run_tests_{platform}"),
497 job: release_job(&[])
498 .runs_on(runner)
499 .when(platform == Platform::Linux, |job| {
500 job.add_service(
501 "postgres",
502 Container::new("postgres:15")
503 .add_env(("POSTGRES_HOST_AUTH_METHOD", "trust"))
504 .ports(vec![Port::Name("5432:5432".into())])
505 .options(
506 "--health-cmd pg_isready \
507 --health-interval 500ms \
508 --health-timeout 5s \
509 --health-retries 10",
510 ),
511 )
512 })
513 .add_step(steps::checkout_repo())
514 .add_step(steps::setup_cargo_config(platform))
515 .when(platform == Platform::Mac, |this| {
516 this.add_step(steps::cache_rust_dependencies_namespace())
517 })
518 .when(platform == Platform::Linux, |this| {
519 use_clang(this.add_step(steps::cache_rust_dependencies_namespace()))
520 })
521 .when(
522 platform == Platform::Linux,
523 steps::install_linux_dependencies,
524 )
525 .add_step(steps::setup_node())
526 .when(
527 platform == Platform::Linux || platform == Platform::Mac,
528 |job| job.add_step(steps::cargo_install_nextest()),
529 )
530 .add_step(steps::clear_target_dir_if_large(platform))
531 .add_step(steps::setup_sccache(platform))
532 .when(filter_packages, |job| {
533 job.add_step(
534 steps::cargo_nextest(platform).with_changed_packages_filter("orchestrate"),
535 )
536 })
537 .when(!filter_packages, |job| {
538 job.add_step(steps::cargo_nextest(platform))
539 })
540 .add_step(steps::show_sccache_stats(platform))
541 .add_step(steps::cleanup_cargo_config(platform)),
542 }
543}
544
545pub(crate) fn check_postgres_and_protobuf_migrations() -> NamedJob {
546 fn ensure_fresh_merge() -> Step<Run> {
547 named::bash(indoc::indoc! {r#"
548 if [ -z "$GITHUB_BASE_REF" ];
549 then
550 echo "BUF_BASE_BRANCH=$(git merge-base origin/main HEAD)" >> "$GITHUB_ENV"
551 else
552 git checkout -B temp
553 git merge -q "origin/$GITHUB_BASE_REF" -m "merge main into temp"
554 echo "BUF_BASE_BRANCH=$GITHUB_BASE_REF" >> "$GITHUB_ENV"
555 fi
556 "#})
557 }
558
559 fn bufbuild_setup_action() -> Step<Use> {
560 named::uses("bufbuild", "buf-setup-action", "v1")
561 .add_with(("version", "v1.29.0"))
562 .add_with(("github_token", vars::GITHUB_TOKEN))
563 }
564
565 fn bufbuild_breaking_action() -> Step<Use> {
566 named::uses("bufbuild", "buf-breaking-action", "v1").add_with(("input", "crates/proto/proto/"))
567 .add_with(("against", "https://github.com/${GITHUB_REPOSITORY}.git#branch=${BUF_BASE_BRANCH},subdir=crates/proto/proto/"))
568 }
569
570 fn buf_lint() -> Step<Run> {
571 named::bash("buf lint crates/proto/proto")
572 }
573
574 fn check_protobuf_formatting() -> Step<Run> {
575 named::bash("buf format --diff --exit-code crates/proto/proto")
576 }
577
578 named::job(
579 release_job(&[])
580 .runs_on(runners::LINUX_DEFAULT)
581 .add_env(("GIT_AUTHOR_NAME", "Protobuf Action"))
582 .add_env(("GIT_AUTHOR_EMAIL", "ci@zed.dev"))
583 .add_env(("GIT_COMMITTER_NAME", "Protobuf Action"))
584 .add_env(("GIT_COMMITTER_EMAIL", "ci@zed.dev"))
585 .add_step(steps::checkout_repo().with_full_history())
586 .add_step(ensure_fresh_merge())
587 .add_step(bufbuild_setup_action())
588 .add_step(bufbuild_breaking_action())
589 .add_step(buf_lint())
590 .add_step(check_protobuf_formatting()),
591 )
592}
593
594fn doctests() -> NamedJob {
595 fn run_doctests() -> Step<Run> {
596 named::bash(indoc::indoc! {r#"
597 cargo test --workspace --doc --no-fail-fast
598 "#})
599 .id("run_doctests")
600 }
601
602 named::job(use_clang(
603 release_job(&[])
604 .runs_on(runners::LINUX_DEFAULT)
605 .add_step(steps::checkout_repo())
606 .add_step(steps::cache_rust_dependencies_namespace())
607 .map(steps::install_linux_dependencies)
608 .add_step(steps::setup_cargo_config(Platform::Linux))
609 .add_step(steps::setup_sccache(Platform::Linux))
610 .add_step(run_doctests())
611 .add_step(steps::show_sccache_stats(Platform::Linux))
612 .add_step(steps::cleanup_cargo_config(Platform::Linux)),
613 ))
614}
615
616fn check_licenses() -> NamedJob {
617 named::job(
618 Job::default()
619 .runs_on(runners::LINUX_SMALL)
620 .add_step(steps::checkout_repo())
621 .add_step(steps::cache_rust_dependencies_namespace())
622 .add_step(steps::script("./script/check-licenses"))
623 .add_step(steps::script("./script/generate-licenses")),
624 )
625}
626
627fn check_docs() -> NamedJob {
628 fn lychee_link_check(dir: &str) -> Step<Use> {
629 named::uses(
630 "lycheeverse",
631 "lychee-action",
632 "82202e5e9c2f4ef1a55a3d02563e1cb6041e5332",
633 ) // v2.4.1
634 .add_with(("args", format!("--no-progress --exclude '^http' '{dir}'")))
635 .add_with(("fail", true))
636 .add_with(("jobSummary", false))
637 }
638
639 fn install_mdbook() -> Step<Use> {
640 named::uses(
641 "peaceiris",
642 "actions-mdbook",
643 "ee69d230fe19748b7abf22df32acaa93833fad08", // v2
644 )
645 .with(("mdbook-version", "0.4.37"))
646 }
647
648 fn build_docs() -> Step<Run> {
649 named::bash(indoc::indoc! {r#"
650 mkdir -p target/deploy
651 mdbook build ./docs --dest-dir=../target/deploy/docs/
652 "#})
653 }
654
655 named::job(use_clang(
656 release_job(&[])
657 .runs_on(runners::LINUX_LARGE)
658 .add_step(steps::checkout_repo())
659 .add_step(steps::setup_cargo_config(Platform::Linux))
660 // todo(ci): un-inline build_docs/action.yml here
661 .add_step(steps::cache_rust_dependencies_namespace())
662 .add_step(
663 lychee_link_check("./docs/src/**/*"), // check markdown links
664 )
665 .map(steps::install_linux_dependencies)
666 .add_step(steps::script("./script/generate-action-metadata"))
667 .add_step(install_mdbook())
668 .add_step(build_docs())
669 .add_step(
670 lychee_link_check("target/deploy/docs"), // check links in generated html
671 ),
672 ))
673}
674
675pub(crate) fn check_scripts() -> NamedJob {
676 fn download_actionlint() -> Step<Run> {
677 named::bash(
678 "bash <(curl https://raw.githubusercontent.com/rhysd/actionlint/main/scripts/download-actionlint.bash)",
679 )
680 }
681
682 fn run_actionlint() -> Step<Run> {
683 named::bash(r#""$ACTIONLINT_BIN" -color"#).add_env((
684 "ACTIONLINT_BIN",
685 "${{ steps.get_actionlint.outputs.executable }}",
686 ))
687 }
688
689 fn run_shellcheck() -> Step<Run> {
690 named::bash("./script/shellcheck-scripts error")
691 }
692
693 fn check_xtask_workflows() -> Step<Run> {
694 named::bash(indoc::indoc! {r#"
695 cargo xtask workflows
696 if ! git diff --exit-code .github; then
697 echo "Error: .github directory has uncommitted changes after running 'cargo xtask workflows'"
698 echo "Please run 'cargo xtask workflows' locally and commit the changes"
699 exit 1
700 fi
701 "#})
702 }
703
704 named::job(
705 release_job(&[])
706 .runs_on(runners::LINUX_SMALL)
707 .add_step(steps::checkout_repo())
708 .add_step(run_shellcheck())
709 .add_step(download_actionlint().id("get_actionlint"))
710 .add_step(run_actionlint())
711 .add_step(cache_rust_dependencies_namespace())
712 .add_step(check_xtask_workflows()),
713 )
714}