1use gh_workflow::{
2 Container, Event, Expression, Input, Job, Level, MergeGroup, Permissions, Port, PullRequest,
3 Push, Run, Step, Strategy, Use, UsesJob, Workflow,
4};
5use indexmap::IndexMap;
6use indoc::formatdoc;
7use serde_json::json;
8
9use crate::tasks::workflows::{
10 steps::{
11 CommonJobConditions, cache_rust_dependencies_namespace, repository_owner_guard_expression,
12 use_clang,
13 },
14 vars::{self, PathCondition},
15};
16
17use super::{
18 runners::{self, Platform},
19 steps::{self, FluentBuilder, NamedJob, named, release_job},
20};
21
22pub(crate) fn run_tests() -> Workflow {
23 // Specify anything which should potentially skip full test suite in this regex:
24 // - docs/
25 // - script/update_top_ranking_issues/
26 // - .github/ISSUE_TEMPLATE/
27 // - .github/workflows/ (except .github/workflows/ci.yml)
28 // - extensions/ (these have their own test workflow)
29 let should_run_tests = PathCondition::inverted(
30 "run_tests",
31 r"^(docs/|script/update_top_ranking_issues/|\.github/(ISSUE_TEMPLATE|workflows/(?!run_tests))|extensions/)",
32 );
33 let should_check_docs = PathCondition::new("run_docs", r"^(docs/|crates/.*\.rs)");
34 let should_check_scripts = PathCondition::new(
35 "run_action_checks",
36 r"^\.github/(workflows/|actions/|actionlint.yml)|tooling/xtask|script/",
37 );
38 let should_check_licences =
39 PathCondition::new("run_licenses", r"^(Cargo.lock|script/.*licenses)");
40
41 let orchestrate = orchestrate(&[
42 &should_check_scripts,
43 &should_check_docs,
44 &should_check_licences,
45 &should_run_tests,
46 ]);
47
48 let mut jobs = vec![
49 orchestrate,
50 check_style(),
51 should_run_tests
52 .and_not_in_merge_queue()
53 .guard(clippy(Platform::Windows)),
54 should_run_tests
55 .and_not_in_merge_queue()
56 .guard(clippy(Platform::Linux)),
57 should_run_tests
58 .and_not_in_merge_queue()
59 .guard(clippy(Platform::Mac)),
60 should_run_tests
61 .and_not_in_merge_queue()
62 .guard(run_platform_tests(Platform::Windows)),
63 should_run_tests
64 .and_not_in_merge_queue()
65 .guard(run_platform_tests(Platform::Linux)),
66 should_run_tests
67 .and_not_in_merge_queue()
68 .guard(run_platform_tests(Platform::Mac)),
69 should_run_tests.and_not_in_merge_queue().guard(doctests()),
70 should_run_tests
71 .and_not_in_merge_queue()
72 .guard(check_workspace_binaries()),
73 should_run_tests
74 .and_not_in_merge_queue()
75 .guard(check_wasm()),
76 should_run_tests
77 .and_not_in_merge_queue()
78 .guard(check_dependencies()), // could be more specific here?
79 should_check_docs.and_always().guard(check_docs()),
80 should_check_licences
81 .and_not_in_merge_queue()
82 .guard(check_licenses()),
83 should_check_scripts.and_always().guard(check_scripts()),
84 ];
85 let ext_tests = extension_tests();
86 let tests_pass = tests_pass(&jobs, &[&ext_tests.name]);
87
88 // TODO: For merge queues, this should fail in the merge queue context
89 jobs.push(
90 should_run_tests
91 .and_always()
92 .guard(check_postgres_and_protobuf_migrations()),
93 ); // could be more specific here?
94
95 named::workflow()
96 .add_event(
97 Event::default()
98 .push(
99 Push::default()
100 .add_branch("main")
101 .add_branch("v[0-9]+.[0-9]+.x"),
102 )
103 .pull_request(PullRequest::default().add_branch("**"))
104 .merge_group(MergeGroup::default()),
105 )
106 .concurrency(vars::one_workflow_per_non_main_branch())
107 .add_env(("CARGO_TERM_COLOR", "always"))
108 .add_env(("RUST_BACKTRACE", 1))
109 .add_env(("CARGO_INCREMENTAL", 0))
110 .map(|mut workflow| {
111 for job in jobs {
112 workflow = workflow.add_job(job.name, job.job)
113 }
114 workflow
115 })
116 .add_job(ext_tests.name, ext_tests.job)
117 .add_job(tests_pass.name, tests_pass.job)
118}
119
120/// Controls which features `orchestrate_impl` includes in the generated script.
121#[derive(PartialEq, Eq)]
122enum OrchestrateTarget {
123 /// For the main Zed repo: includes the cargo package filter and extension
124 /// change detection, but no working-directory scoping.
125 ZedRepo,
126 /// For individual extension repos: scopes changed-file detection to the
127 /// working directory, with no package filter or extension detection.
128 Extension,
129}
130
131// Generates a bash script that checks changed files against regex patterns
132// and sets GitHub output variables accordingly
133pub fn orchestrate(rules: &[&PathCondition]) -> NamedJob {
134 orchestrate_impl(rules, OrchestrateTarget::ZedRepo)
135}
136
137pub fn orchestrate_for_extension(rules: &[&PathCondition]) -> NamedJob {
138 orchestrate_impl(rules, OrchestrateTarget::Extension)
139}
140
141fn orchestrate_impl(rules: &[&PathCondition], target: OrchestrateTarget) -> NamedJob {
142 let name = "orchestrate".to_owned();
143 let step_name = "filter".to_owned();
144 let mut script = String::new();
145
146 script.push_str(indoc::indoc! {r#"
147 set -euo pipefail
148 if [ -z "$GITHUB_BASE_REF" ]; then
149 echo "Not in a PR context (i.e., push to main/stable/preview)"
150 COMPARE_REV="$(git rev-parse HEAD~1)"
151 else
152 echo "In a PR context comparing to pull_request.base.ref"
153 git fetch origin "$GITHUB_BASE_REF" --depth=350
154 COMPARE_REV="$(git merge-base "origin/${GITHUB_BASE_REF}" HEAD)"
155 fi
156 CHANGED_FILES="$(git diff --name-only "$COMPARE_REV" "$GITHUB_SHA")"
157
158 "#});
159
160 if target == OrchestrateTarget::Extension {
161 script.push_str(indoc::indoc! {r#"
162 # When running from a subdirectory, git diff returns repo-root-relative paths.
163 # Filter to only files within the current working directory and strip the prefix.
164 REPO_SUBDIR="$(git rev-parse --show-prefix)"
165 REPO_SUBDIR="${REPO_SUBDIR%/}"
166 if [ -n "$REPO_SUBDIR" ]; then
167 CHANGED_FILES="$(echo "$CHANGED_FILES" | grep "^${REPO_SUBDIR}/" | sed "s|^${REPO_SUBDIR}/||" || true)"
168 fi
169
170 "#});
171 }
172
173 script.push_str(indoc::indoc! {r#"
174 check_pattern() {
175 local output_name="$1"
176 local pattern="$2"
177 local grep_arg="$3"
178
179 echo "$CHANGED_FILES" | grep "$grep_arg" "$pattern" && \
180 echo "${output_name}=true" >> "$GITHUB_OUTPUT" || \
181 echo "${output_name}=false" >> "$GITHUB_OUTPUT"
182 }
183
184 "#});
185
186 let mut outputs = IndexMap::new();
187
188 if target == OrchestrateTarget::ZedRepo {
189 script.push_str(indoc::indoc! {r#"
190 # Check for changes that require full rebuild (no filter)
191 # Direct pushes to main/stable/preview always run full suite
192 if [ -z "$GITHUB_BASE_REF" ]; then
193 echo "Not a PR, running full test suite"
194 echo "changed_packages=" >> "$GITHUB_OUTPUT"
195 elif echo "$CHANGED_FILES" | grep -qP '^(rust-toolchain\.toml|\.cargo/|\.github/|Cargo\.(toml|lock)$)'; then
196 echo "Toolchain, cargo config, or root Cargo files changed, will run all tests"
197 echo "changed_packages=" >> "$GITHUB_OUTPUT"
198 else
199 # Extract changed directories from file paths
200 CHANGED_DIRS=$(echo "$CHANGED_FILES" | \
201 grep -oP '^(crates|tooling)/\K[^/]+' | \
202 sort -u || true)
203
204 # Build directory-to-package mapping using cargo metadata
205 DIR_TO_PKG=$(cargo metadata --format-version=1 --no-deps 2>/dev/null | \
206 jq -r '.packages[] | select(.manifest_path | test("crates/|tooling/")) | "\(.manifest_path | capture("(crates|tooling)/(?<dir>[^/]+)") | .dir)=\(.name)"')
207
208 # Map directory names to package names
209 FILE_CHANGED_PKGS=""
210 for dir in $CHANGED_DIRS; do
211 pkg=$(echo "$DIR_TO_PKG" | grep "^${dir}=" | cut -d= -f2 | head -1)
212 if [ -n "$pkg" ]; then
213 FILE_CHANGED_PKGS=$(printf '%s\n%s' "$FILE_CHANGED_PKGS" "$pkg")
214 else
215 # Fall back to directory name if no mapping found
216 FILE_CHANGED_PKGS=$(printf '%s\n%s' "$FILE_CHANGED_PKGS" "$dir")
217 fi
218 done
219 FILE_CHANGED_PKGS=$(echo "$FILE_CHANGED_PKGS" | grep -v '^$' | sort -u || true)
220
221 # If assets/ changed, add crates that depend on those assets
222 if echo "$CHANGED_FILES" | grep -qP '^assets/'; then
223 FILE_CHANGED_PKGS=$(printf '%s\n%s\n%s\n%s' "$FILE_CHANGED_PKGS" "settings" "storybook" "assets" | sort -u)
224 fi
225
226 # Combine all changed packages
227 ALL_CHANGED_PKGS=$(echo "$FILE_CHANGED_PKGS" | grep -v '^$' || true)
228
229 if [ -z "$ALL_CHANGED_PKGS" ]; then
230 echo "No package changes detected, will run all tests"
231 echo "changed_packages=" >> "$GITHUB_OUTPUT"
232 else
233 # Build nextest filterset with rdeps for each package
234 FILTERSET=$(echo "$ALL_CHANGED_PKGS" | \
235 sed 's/.*/rdeps(&)/' | \
236 tr '\n' '|' | \
237 sed 's/|$//')
238 echo "Changed packages filterset: $FILTERSET"
239 echo "changed_packages=$FILTERSET" >> "$GITHUB_OUTPUT"
240 fi
241 fi
242
243 "#});
244
245 outputs.insert(
246 "changed_packages".to_owned(),
247 format!("${{{{ steps.{}.outputs.changed_packages }}}}", step_name),
248 );
249 }
250
251 for rule in rules {
252 assert!(
253 rule.set_by_step
254 .borrow_mut()
255 .replace(name.clone())
256 .is_none()
257 );
258 assert!(
259 outputs
260 .insert(
261 rule.name.to_owned(),
262 format!("${{{{ steps.{}.outputs.{} }}}}", step_name, rule.name)
263 )
264 .is_none()
265 );
266
267 let grep_arg = if rule.invert { "-qvP" } else { "-qP" };
268 script.push_str(&format!(
269 "check_pattern \"{}\" '{}' {}\n",
270 rule.name, rule.pattern, grep_arg
271 ));
272 }
273
274 if target == OrchestrateTarget::ZedRepo {
275 script.push_str(DETECT_CHANGED_EXTENSIONS_SCRIPT);
276 script.push_str("echo \"changed_extensions=$EXTENSIONS_JSON\" >> \"$GITHUB_OUTPUT\"\n");
277
278 outputs.insert(
279 "changed_extensions".to_owned(),
280 format!("${{{{ steps.{}.outputs.changed_extensions }}}}", step_name),
281 );
282 }
283
284 let job = Job::default()
285 .runs_on(runners::LINUX_SMALL)
286 .with_repository_owner_guard()
287 .outputs(outputs)
288 .add_step(steps::checkout_repo().with_deep_history_on_non_main())
289 .add_step(Step::new(step_name.clone()).run(script).id(step_name));
290
291 NamedJob { name, job }
292}
293
294pub fn tests_pass(jobs: &[NamedJob], extra_job_names: &[&str]) -> NamedJob {
295 let mut script = String::from(indoc::indoc! {r#"
296 set +x
297 EXIT_CODE=0
298
299 check_result() {
300 echo "* $1: $2"
301 if [[ "$2" != "skipped" && "$2" != "success" ]]; then EXIT_CODE=1; fi
302 }
303
304 "#});
305
306 let all_names: Vec<&str> = jobs
307 .iter()
308 .map(|job| job.name.as_str())
309 .chain(extra_job_names.iter().copied())
310 .collect();
311
312 let env_entries: Vec<_> = all_names
313 .iter()
314 .map(|name| {
315 let env_name = format!("RESULT_{}", name.to_uppercase());
316 let env_value = format!("${{{{ needs.{}.result }}}}", name);
317 (env_name, env_value)
318 })
319 .collect();
320
321 script.push_str(
322 &all_names
323 .iter()
324 .zip(env_entries.iter())
325 .map(|(name, (env_name, _))| format!("check_result \"{}\" \"${}\"", name, env_name))
326 .collect::<Vec<_>>()
327 .join("\n"),
328 );
329
330 script.push_str("\n\nexit $EXIT_CODE\n");
331
332 let job = Job::default()
333 .runs_on(runners::LINUX_SMALL)
334 .needs(
335 all_names
336 .iter()
337 .map(|name| name.to_string())
338 .collect::<Vec<String>>(),
339 )
340 .cond(repository_owner_guard_expression(true))
341 .add_step(
342 env_entries
343 .into_iter()
344 .fold(named::bash(&script), |step, env_item| {
345 step.add_env(env_item)
346 }),
347 );
348
349 named::job(job)
350}
351
352/// Bash script snippet that detects changed extension directories from `$CHANGED_FILES`.
353/// Assumes `$CHANGED_FILES` is already set. Sets `$EXTENSIONS_JSON` to a JSON array of
354/// changed extension paths. Callers are responsible for writing the result to `$GITHUB_OUTPUT`.
355pub(crate) const DETECT_CHANGED_EXTENSIONS_SCRIPT: &str = indoc::indoc! {r#"
356 # Detect changed extension directories (excluding extensions/workflows)
357 CHANGED_EXTENSIONS=$(echo "$CHANGED_FILES" | grep -oP '^extensions/[^/]+(?=/)' | sort -u | grep -v '^extensions/workflows$' || true)
358 if [ -n "$CHANGED_EXTENSIONS" ]; then
359 EXTENSIONS_JSON=$(echo "$CHANGED_EXTENSIONS" | jq -R -s -c 'split("\n") | map(select(length > 0))')
360 else
361 EXTENSIONS_JSON="[]"
362 fi
363"#};
364
365const TS_QUERY_LS_FILE: &str = "ts_query_ls-x86_64-unknown-linux-gnu.tar.gz";
366const CI_TS_QUERY_RELEASE: &str = "tags/v3.15.1";
367
368pub(crate) fn fetch_ts_query_ls() -> Step<Use> {
369 named::uses(
370 "dsaltares",
371 "fetch-gh-release-asset",
372 "aa37ae5c44d3c9820bc12fe675e8670ecd93bd1c",
373 ) // v1.1.1
374 .add_with(("repo", "ribru17/ts_query_ls"))
375 .add_with(("version", CI_TS_QUERY_RELEASE))
376 .add_with(("file", TS_QUERY_LS_FILE))
377}
378
379pub(crate) fn run_ts_query_ls() -> Step<Run> {
380 named::bash(formatdoc!(
381 r#"tar -xf "$GITHUB_WORKSPACE/{TS_QUERY_LS_FILE}" -C "$GITHUB_WORKSPACE"
382 "$GITHUB_WORKSPACE/ts_query_ls" format --check . || {{
383 echo "Found unformatted queries, please format them with ts_query_ls."
384 echo "For easy use, install the Tree-sitter query extension:"
385 echo "zed://extension/tree-sitter-query"
386 false
387 }}"#
388 ))
389}
390
391fn check_style() -> NamedJob {
392 fn check_for_typos() -> Step<Use> {
393 named::uses(
394 "crate-ci",
395 "typos",
396 "2d0ce569feab1f8752f1dde43cc2f2aa53236e06",
397 ) // v1.40.0
398 .with(("config", "./typos.toml"))
399 }
400
401 named::job(
402 release_job(&[])
403 .runs_on(runners::LINUX_MEDIUM)
404 .add_step(steps::checkout_repo())
405 .add_step(steps::cache_rust_dependencies_namespace())
406 .add_step(steps::setup_pnpm())
407 .add_step(steps::prettier())
408 .add_step(steps::cargo_fmt())
409 .add_step(steps::script("./script/check-todos"))
410 .add_step(steps::script("./script/check-keymaps"))
411 .add_step(check_for_typos())
412 .add_step(fetch_ts_query_ls())
413 .add_step(run_ts_query_ls()),
414 )
415}
416
417fn check_dependencies() -> NamedJob {
418 fn install_cargo_machete() -> Step<Use> {
419 named::uses(
420 "clechasseur",
421 "rs-cargo",
422 "8435b10f6e71c2e3d4d3b7573003a8ce4bfc6386", // v2
423 )
424 .add_with(("command", "install"))
425 .add_with(("args", "cargo-machete@0.7.0"))
426 }
427
428 fn run_cargo_machete() -> Step<Use> {
429 named::uses(
430 "clechasseur",
431 "rs-cargo",
432 "8435b10f6e71c2e3d4d3b7573003a8ce4bfc6386", // v2
433 )
434 .add_with(("command", "machete"))
435 }
436
437 fn check_cargo_lock() -> Step<Run> {
438 named::bash("cargo update --locked --workspace")
439 }
440
441 fn check_vulnerable_dependencies() -> Step<Use> {
442 named::uses(
443 "actions",
444 "dependency-review-action",
445 "67d4f4bd7a9b17a0db54d2a7519187c65e339de8", // v4
446 )
447 .if_condition(Expression::new("github.event_name == 'pull_request'"))
448 .with(("license-check", false))
449 }
450
451 named::job(use_clang(
452 release_job(&[])
453 .runs_on(runners::LINUX_SMALL)
454 .add_step(steps::checkout_repo())
455 .add_step(steps::cache_rust_dependencies_namespace())
456 .add_step(install_cargo_machete())
457 .add_step(run_cargo_machete())
458 .add_step(check_cargo_lock())
459 .add_step(check_vulnerable_dependencies()),
460 ))
461}
462
463fn check_wasm() -> NamedJob {
464 fn install_nightly_wasm_toolchain() -> Step<Run> {
465 named::bash(
466 "rustup toolchain install nightly --component rust-src --target wasm32-unknown-unknown",
467 )
468 }
469
470 fn cargo_check_wasm() -> Step<Run> {
471 named::bash(concat!(
472 "cargo +nightly -Zbuild-std=std,panic_abort ",
473 "check --target wasm32-unknown-unknown -p gpui_platform",
474 ))
475 .add_env((
476 "CARGO_TARGET_WASM32_UNKNOWN_UNKNOWN_RUSTFLAGS",
477 "-C target-feature=+atomics,+bulk-memory,+mutable-globals",
478 ))
479 }
480
481 named::job(
482 release_job(&[])
483 .runs_on(runners::LINUX_LARGE)
484 .add_step(steps::checkout_repo())
485 .add_step(steps::setup_cargo_config(Platform::Linux))
486 .add_step(steps::cache_rust_dependencies_namespace())
487 .add_step(install_nightly_wasm_toolchain())
488 .add_step(steps::setup_sccache(Platform::Linux))
489 .add_step(cargo_check_wasm())
490 .add_step(steps::show_sccache_stats(Platform::Linux))
491 .add_step(steps::cleanup_cargo_config(Platform::Linux)),
492 )
493}
494
495fn check_workspace_binaries() -> NamedJob {
496 named::job(use_clang(
497 release_job(&[])
498 .runs_on(runners::LINUX_LARGE)
499 .add_step(steps::checkout_repo())
500 .add_step(steps::setup_cargo_config(Platform::Linux))
501 .add_step(steps::cache_rust_dependencies_namespace())
502 .map(steps::install_linux_dependencies)
503 .add_step(steps::setup_sccache(Platform::Linux))
504 .add_step(steps::script("cargo build -p collab"))
505 .add_step(steps::script("cargo build --workspace --bins --examples"))
506 .add_step(steps::show_sccache_stats(Platform::Linux))
507 .add_step(steps::cleanup_cargo_config(Platform::Linux)),
508 ))
509}
510
511pub(crate) fn clippy(platform: Platform) -> NamedJob {
512 let runner = match platform {
513 Platform::Windows => runners::WINDOWS_DEFAULT,
514 Platform::Linux => runners::LINUX_DEFAULT,
515 Platform::Mac => runners::MAC_DEFAULT,
516 };
517 let mut job = release_job(&[])
518 .runs_on(runner)
519 .add_step(steps::checkout_repo())
520 .add_step(steps::setup_cargo_config(platform))
521 .when(
522 platform == Platform::Linux || platform == Platform::Mac,
523 |this| this.add_step(steps::cache_rust_dependencies_namespace()),
524 )
525 .when(
526 platform == Platform::Linux,
527 steps::install_linux_dependencies,
528 )
529 .add_step(steps::setup_sccache(platform))
530 .add_step(steps::clippy(platform))
531 .add_step(steps::show_sccache_stats(platform));
532 if platform == Platform::Linux {
533 job = use_clang(job);
534 }
535 NamedJob {
536 name: format!("clippy_{platform}"),
537 job,
538 }
539}
540
541pub(crate) fn run_platform_tests(platform: Platform) -> NamedJob {
542 run_platform_tests_impl(platform, true)
543}
544
545pub(crate) fn run_platform_tests_no_filter(platform: Platform) -> NamedJob {
546 run_platform_tests_impl(platform, false)
547}
548
549fn run_platform_tests_impl(platform: Platform, filter_packages: bool) -> NamedJob {
550 let runner = match platform {
551 Platform::Windows => runners::WINDOWS_DEFAULT,
552 Platform::Linux => runners::LINUX_DEFAULT,
553 Platform::Mac => runners::MAC_DEFAULT,
554 };
555 NamedJob {
556 name: format!("run_tests_{platform}"),
557 job: release_job(&[])
558 .runs_on(runner)
559 .when(platform == Platform::Linux, |job| {
560 job.add_service(
561 "postgres",
562 Container::new("postgres:15")
563 .add_env(("POSTGRES_HOST_AUTH_METHOD", "trust"))
564 .ports(vec![Port::Name("5432:5432".into())])
565 .options(
566 "--health-cmd pg_isready \
567 --health-interval 500ms \
568 --health-timeout 5s \
569 --health-retries 10",
570 ),
571 )
572 })
573 .add_step(steps::checkout_repo())
574 .add_step(steps::setup_cargo_config(platform))
575 .when(platform == Platform::Mac, |this| {
576 this.add_step(steps::cache_rust_dependencies_namespace())
577 })
578 .when(platform == Platform::Linux, |this| {
579 use_clang(this.add_step(steps::cache_rust_dependencies_namespace()))
580 })
581 .when(
582 platform == Platform::Linux,
583 steps::install_linux_dependencies,
584 )
585 .add_step(steps::setup_node())
586 .when(
587 platform == Platform::Linux || platform == Platform::Mac,
588 |job| job.add_step(steps::cargo_install_nextest()),
589 )
590 .add_step(steps::clear_target_dir_if_large(platform))
591 .add_step(steps::setup_sccache(platform))
592 .when(filter_packages, |job| {
593 job.add_step(
594 steps::cargo_nextest(platform).with_changed_packages_filter("orchestrate"),
595 )
596 })
597 .when(!filter_packages, |job| {
598 job.add_step(steps::cargo_nextest(platform))
599 })
600 .add_step(steps::show_sccache_stats(platform))
601 .add_step(steps::cleanup_cargo_config(platform)),
602 }
603}
604
605pub(crate) fn check_postgres_and_protobuf_migrations() -> NamedJob {
606 fn ensure_fresh_merge() -> Step<Run> {
607 named::bash(indoc::indoc! {r#"
608 if [ -z "$GITHUB_BASE_REF" ];
609 then
610 echo "BUF_BASE_BRANCH=$(git merge-base origin/main HEAD)" >> "$GITHUB_ENV"
611 else
612 git checkout -B temp
613 git merge -q "origin/$GITHUB_BASE_REF" -m "merge main into temp"
614 echo "BUF_BASE_BRANCH=$GITHUB_BASE_REF" >> "$GITHUB_ENV"
615 fi
616 "#})
617 }
618
619 fn bufbuild_setup_action() -> Step<Use> {
620 named::uses("bufbuild", "buf-setup-action", "v1")
621 .add_with(("version", "v1.29.0"))
622 .add_with(("github_token", vars::GITHUB_TOKEN))
623 }
624
625 fn bufbuild_breaking_action() -> Step<Use> {
626 named::uses("bufbuild", "buf-breaking-action", "v1").add_with(("input", "crates/proto/proto/"))
627 .add_with(("against", "https://github.com/${GITHUB_REPOSITORY}.git#branch=${BUF_BASE_BRANCH},subdir=crates/proto/proto/"))
628 }
629
630 fn buf_lint() -> Step<Run> {
631 named::bash("buf lint crates/proto/proto")
632 }
633
634 fn check_protobuf_formatting() -> Step<Run> {
635 named::bash("buf format --diff --exit-code crates/proto/proto")
636 }
637
638 named::job(
639 release_job(&[])
640 .runs_on(runners::LINUX_DEFAULT)
641 .add_env(("GIT_AUTHOR_NAME", "Protobuf Action"))
642 .add_env(("GIT_AUTHOR_EMAIL", "ci@zed.dev"))
643 .add_env(("GIT_COMMITTER_NAME", "Protobuf Action"))
644 .add_env(("GIT_COMMITTER_EMAIL", "ci@zed.dev"))
645 .add_step(steps::checkout_repo().with_full_history())
646 .add_step(ensure_fresh_merge())
647 .add_step(bufbuild_setup_action())
648 .add_step(bufbuild_breaking_action())
649 .add_step(buf_lint())
650 .add_step(check_protobuf_formatting()),
651 )
652}
653
654fn doctests() -> NamedJob {
655 fn run_doctests() -> Step<Run> {
656 named::bash(indoc::indoc! {r#"
657 cargo test --workspace --doc --no-fail-fast
658 "#})
659 .id("run_doctests")
660 }
661
662 named::job(use_clang(
663 release_job(&[])
664 .runs_on(runners::LINUX_DEFAULT)
665 .add_step(steps::checkout_repo())
666 .add_step(steps::cache_rust_dependencies_namespace())
667 .map(steps::install_linux_dependencies)
668 .add_step(steps::setup_cargo_config(Platform::Linux))
669 .add_step(steps::setup_sccache(Platform::Linux))
670 .add_step(run_doctests())
671 .add_step(steps::show_sccache_stats(Platform::Linux))
672 .add_step(steps::cleanup_cargo_config(Platform::Linux)),
673 ))
674}
675
676fn check_licenses() -> NamedJob {
677 named::job(
678 Job::default()
679 .runs_on(runners::LINUX_SMALL)
680 .add_step(steps::checkout_repo())
681 .add_step(steps::cache_rust_dependencies_namespace())
682 .add_step(steps::script("./script/check-licenses"))
683 .add_step(steps::script("./script/generate-licenses")),
684 )
685}
686
687fn check_docs() -> NamedJob {
688 fn lychee_link_check(dir: &str) -> Step<Use> {
689 named::uses(
690 "lycheeverse",
691 "lychee-action",
692 "82202e5e9c2f4ef1a55a3d02563e1cb6041e5332",
693 ) // v2.4.1
694 .add_with(("args", format!("--no-progress --exclude '^http' '{dir}'")))
695 .add_with(("fail", true))
696 .add_with(("jobSummary", false))
697 }
698
699 fn install_mdbook() -> Step<Use> {
700 named::uses(
701 "peaceiris",
702 "actions-mdbook",
703 "ee69d230fe19748b7abf22df32acaa93833fad08", // v2
704 )
705 .with(("mdbook-version", "0.4.37"))
706 }
707
708 fn build_docs() -> Step<Run> {
709 named::bash(indoc::indoc! {r#"
710 mkdir -p target/deploy
711 mdbook build ./docs --dest-dir=../target/deploy/docs/
712 "#})
713 }
714
715 named::job(use_clang(
716 release_job(&[])
717 .runs_on(runners::LINUX_LARGE)
718 .add_step(steps::checkout_repo())
719 .add_step(steps::setup_cargo_config(Platform::Linux))
720 // todo(ci): un-inline build_docs/action.yml here
721 .add_step(steps::cache_rust_dependencies_namespace())
722 .add_step(
723 lychee_link_check("./docs/src/**/*"), // check markdown links
724 )
725 .map(steps::install_linux_dependencies)
726 .add_step(steps::script("./script/generate-action-metadata"))
727 .add_step(install_mdbook())
728 .add_step(build_docs())
729 .add_step(
730 lychee_link_check("target/deploy/docs"), // check links in generated html
731 ),
732 ))
733}
734
735pub(crate) fn check_scripts() -> NamedJob {
736 fn download_actionlint() -> Step<Run> {
737 named::bash(
738 "bash <(curl https://raw.githubusercontent.com/rhysd/actionlint/main/scripts/download-actionlint.bash)",
739 )
740 }
741
742 fn run_actionlint() -> Step<Run> {
743 named::bash(r#""$ACTIONLINT_BIN" -color"#).add_env((
744 "ACTIONLINT_BIN",
745 "${{ steps.get_actionlint.outputs.executable }}",
746 ))
747 }
748
749 fn run_shellcheck() -> Step<Run> {
750 named::bash("./script/shellcheck-scripts error")
751 }
752
753 fn check_xtask_workflows() -> Step<Run> {
754 named::bash(indoc::indoc! {r#"
755 cargo xtask workflows
756 if ! git diff --exit-code .github; then
757 echo "Error: .github directory has uncommitted changes after running 'cargo xtask workflows'"
758 echo "Please run 'cargo xtask workflows' locally and commit the changes"
759 exit 1
760 fi
761 "#})
762 }
763
764 named::job(
765 release_job(&[])
766 .runs_on(runners::LINUX_SMALL)
767 .add_step(steps::checkout_repo())
768 .add_step(run_shellcheck())
769 .add_step(download_actionlint().id("get_actionlint"))
770 .add_step(run_actionlint())
771 .add_step(cache_rust_dependencies_namespace())
772 .add_step(check_xtask_workflows()),
773 )
774}
775
776fn extension_tests() -> NamedJob<UsesJob> {
777 let job = Job::default()
778 .needs(vec!["orchestrate".to_owned()])
779 .cond(Expression::new(
780 "needs.orchestrate.outputs.changed_extensions != '[]'",
781 ))
782 .permissions(Permissions::default().contents(Level::Read))
783 .strategy(
784 Strategy::default()
785 .fail_fast(false)
786 // TODO: Remove the limit. We currently need this to workaround the concurrency group issue
787 // where different matrix jobs would be placed in the same concurrency group and thus cancelled.
788 .max_parallel(1u32)
789 .matrix(json!({
790 "extension": "${{ fromJson(needs.orchestrate.outputs.changed_extensions) }}"
791 })),
792 )
793 .uses_local(".github/workflows/extension_tests.yml")
794 .with(Input::default().add("working-directory", "${{ matrix.extension }}"));
795
796 named::job(job)
797}