1use gh_workflow::{
2 Container, Event, Expression, Input, Job, Level, MergeGroup, Permissions, Port, PullRequest,
3 Push, Run, Step, Strategy, Use, UsesJob, Workflow,
4};
5use indexmap::IndexMap;
6use indoc::formatdoc;
7use serde_json::json;
8
9use crate::tasks::workflows::{
10 steps::{
11 CommonJobConditions, cache_rust_dependencies_namespace, repository_owner_guard_expression,
12 use_clang,
13 },
14 vars::{self, PathCondition},
15};
16
17use super::{
18 runners::{self, Arch, Platform},
19 steps::{self, FluentBuilder, NamedJob, named, release_job},
20};
21
22pub(crate) fn run_tests() -> Workflow {
23 // Specify anything which should potentially skip full test suite in this regex:
24 // - docs/
25 // - script/update_top_ranking_issues/
26 // - .github/ISSUE_TEMPLATE/
27 // - .github/workflows/ (except .github/workflows/ci.yml)
28 // - extensions/ (these have their own test workflow)
29 let should_run_tests = PathCondition::inverted(
30 "run_tests",
31 r"^(docs/|script/update_top_ranking_issues/|\.github/(ISSUE_TEMPLATE|workflows/(?!run_tests))|extensions/)",
32 );
33 let should_check_docs = PathCondition::new("run_docs", r"^(docs/|crates/.*\.rs)");
34 let should_check_scripts = PathCondition::new(
35 "run_action_checks",
36 r"^\.github/(workflows/|actions/|actionlint.yml)|tooling/xtask|script/",
37 );
38 let should_check_licences =
39 PathCondition::new("run_licenses", r"^(Cargo.lock|script/.*licenses)");
40
41 let orchestrate = orchestrate(&[
42 &should_check_scripts,
43 &should_check_docs,
44 &should_check_licences,
45 &should_run_tests,
46 ]);
47
48 let mut jobs = vec![
49 orchestrate,
50 check_style(),
51 should_run_tests
52 .and_not_in_merge_queue()
53 .then(clippy(Platform::Windows, None)),
54 should_run_tests
55 .and_always()
56 .then(clippy(Platform::Linux, None)),
57 should_run_tests
58 .and_not_in_merge_queue()
59 .then(clippy(Platform::Mac, None)),
60 should_run_tests
61 .and_not_in_merge_queue()
62 .then(clippy(Platform::Mac, Some(Arch::X86_64))),
63 should_run_tests
64 .and_not_in_merge_queue()
65 .then(run_platform_tests(Platform::Windows)),
66 should_run_tests
67 .and_not_in_merge_queue()
68 .then(run_platform_tests(Platform::Linux)),
69 should_run_tests
70 .and_not_in_merge_queue()
71 .then(run_platform_tests(Platform::Mac)),
72 should_run_tests.and_not_in_merge_queue().then(doctests()),
73 should_run_tests
74 .and_not_in_merge_queue()
75 .then(check_workspace_binaries()),
76 should_run_tests.and_not_in_merge_queue().then(check_wasm()),
77 should_run_tests
78 .and_not_in_merge_queue()
79 .then(check_dependencies()), // could be more specific here?
80 should_check_docs.and_always().then(check_docs()),
81 should_check_licences
82 .and_not_in_merge_queue()
83 .then(check_licenses()),
84 should_check_scripts.and_always().then(check_scripts()),
85 ];
86 let ext_tests = extension_tests();
87 let tests_pass = tests_pass(&jobs, &[&ext_tests.name]);
88
89 // TODO: For merge queues, this should fail in the merge queue context
90 jobs.push(
91 should_run_tests
92 .and_always()
93 .then(check_postgres_and_protobuf_migrations()),
94 ); // could be more specific here?
95
96 named::workflow()
97 .add_event(
98 Event::default()
99 .push(
100 Push::default()
101 .add_branch("main")
102 .add_branch("v[0-9]+.[0-9]+.x"),
103 )
104 .pull_request(PullRequest::default().add_branch("**"))
105 .merge_group(MergeGroup::default()),
106 )
107 .concurrency(vars::one_workflow_per_non_main_branch())
108 .add_env(("CARGO_TERM_COLOR", "always"))
109 .add_env(("RUST_BACKTRACE", 1))
110 .add_env(("CARGO_INCREMENTAL", 0))
111 .map(|mut workflow| {
112 for job in jobs {
113 workflow = workflow.add_job(job.name, job.job)
114 }
115 workflow
116 })
117 .add_job(ext_tests.name, ext_tests.job)
118 .add_job(tests_pass.name, tests_pass.job)
119}
120
121/// Controls which features `orchestrate_impl` includes in the generated script.
122#[derive(PartialEq, Eq)]
123enum OrchestrateTarget {
124 /// For the main Zed repo: includes the cargo package filter and extension
125 /// change detection, but no working-directory scoping.
126 ZedRepo,
127 /// For individual extension repos: scopes changed-file detection to the
128 /// working directory, with no package filter or extension detection.
129 Extension,
130}
131
132// Generates a bash script that checks changed files against regex patterns
133// and sets GitHub output variables accordingly
134pub fn orchestrate(rules: &[&PathCondition]) -> NamedJob {
135 orchestrate_impl(rules, OrchestrateTarget::ZedRepo)
136}
137
138pub fn orchestrate_for_extension(rules: &[&PathCondition]) -> NamedJob {
139 orchestrate_impl(rules, OrchestrateTarget::Extension)
140}
141
142fn orchestrate_impl(rules: &[&PathCondition], target: OrchestrateTarget) -> NamedJob {
143 let name = "orchestrate".to_owned();
144 let step_name = "filter".to_owned();
145 let mut script = String::new();
146
147 script.push_str(indoc::indoc! {r#"
148 set -euo pipefail
149 if [ -z "$GITHUB_BASE_REF" ]; then
150 echo "Not in a PR context (i.e., push to main/stable/preview)"
151 COMPARE_REV="$(git rev-parse HEAD~1)"
152 else
153 echo "In a PR context comparing to pull_request.base.ref"
154 git fetch origin "$GITHUB_BASE_REF" --depth=350
155 COMPARE_REV="$(git merge-base "origin/${GITHUB_BASE_REF}" HEAD)"
156 fi
157 CHANGED_FILES="$(git diff --name-only "$COMPARE_REV" "$GITHUB_SHA")"
158
159 "#});
160
161 if target == OrchestrateTarget::Extension {
162 script.push_str(indoc::indoc! {r#"
163 # When running from a subdirectory, git diff returns repo-root-relative paths.
164 # Filter to only files within the current working directory and strip the prefix.
165 REPO_SUBDIR="$(git rev-parse --show-prefix)"
166 REPO_SUBDIR="${REPO_SUBDIR%/}"
167 if [ -n "$REPO_SUBDIR" ]; then
168 CHANGED_FILES="$(echo "$CHANGED_FILES" | grep "^${REPO_SUBDIR}/" | sed "s|^${REPO_SUBDIR}/||" || true)"
169 fi
170
171 "#});
172 }
173
174 script.push_str(indoc::indoc! {r#"
175 check_pattern() {
176 local output_name="$1"
177 local pattern="$2"
178 local grep_arg="$3"
179
180 echo "$CHANGED_FILES" | grep "$grep_arg" "$pattern" && \
181 echo "${output_name}=true" >> "$GITHUB_OUTPUT" || \
182 echo "${output_name}=false" >> "$GITHUB_OUTPUT"
183 }
184
185 "#});
186
187 let mut outputs = IndexMap::new();
188
189 if target == OrchestrateTarget::ZedRepo {
190 script.push_str(indoc::indoc! {r#"
191 # Check for changes that require full rebuild (no filter)
192 # Direct pushes to main/stable/preview always run full suite
193 if [ -z "$GITHUB_BASE_REF" ]; then
194 echo "Not a PR, running full test suite"
195 echo "changed_packages=" >> "$GITHUB_OUTPUT"
196 elif echo "$CHANGED_FILES" | grep -qP '^(rust-toolchain\.toml|\.cargo/|\.github/|Cargo\.(toml|lock)$)'; then
197 echo "Toolchain, cargo config, or root Cargo files changed, will run all tests"
198 echo "changed_packages=" >> "$GITHUB_OUTPUT"
199 else
200 # Extract changed directories from file paths
201 CHANGED_DIRS=$(echo "$CHANGED_FILES" | \
202 grep -oP '^(crates|tooling)/\K[^/]+' | \
203 sort -u || true)
204
205 # Build directory-to-package mapping using cargo metadata
206 DIR_TO_PKG=$(cargo metadata --format-version=1 --no-deps 2>/dev/null | \
207 jq -r '.packages[] | select(.manifest_path | test("crates/|tooling/")) | "\(.manifest_path | capture("(crates|tooling)/(?<dir>[^/]+)") | .dir)=\(.name)"')
208
209 # Map directory names to package names
210 FILE_CHANGED_PKGS=""
211 for dir in $CHANGED_DIRS; do
212 pkg=$(echo "$DIR_TO_PKG" | grep "^${dir}=" | cut -d= -f2 | head -1)
213 if [ -n "$pkg" ]; then
214 FILE_CHANGED_PKGS=$(printf '%s\n%s' "$FILE_CHANGED_PKGS" "$pkg")
215 else
216 # Fall back to directory name if no mapping found
217 FILE_CHANGED_PKGS=$(printf '%s\n%s' "$FILE_CHANGED_PKGS" "$dir")
218 fi
219 done
220 FILE_CHANGED_PKGS=$(echo "$FILE_CHANGED_PKGS" | grep -v '^$' | sort -u || true)
221
222 # If assets/ changed, add crates that depend on those assets
223 if echo "$CHANGED_FILES" | grep -qP '^assets/'; then
224 FILE_CHANGED_PKGS=$(printf '%s\n%s\n%s\n%s' "$FILE_CHANGED_PKGS" "settings" "storybook" "assets" | sort -u)
225 fi
226
227 # Combine all changed packages
228 ALL_CHANGED_PKGS=$(echo "$FILE_CHANGED_PKGS" | grep -v '^$' || true)
229
230 if [ -z "$ALL_CHANGED_PKGS" ]; then
231 echo "No package changes detected, will run all tests"
232 echo "changed_packages=" >> "$GITHUB_OUTPUT"
233 else
234 # Build nextest filterset with rdeps for each package
235 FILTERSET=$(echo "$ALL_CHANGED_PKGS" | \
236 sed 's/.*/rdeps(&)/' | \
237 tr '\n' '|' | \
238 sed 's/|$//')
239 echo "Changed packages filterset: $FILTERSET"
240 echo "changed_packages=$FILTERSET" >> "$GITHUB_OUTPUT"
241 fi
242 fi
243
244 "#});
245
246 outputs.insert(
247 "changed_packages".to_owned(),
248 format!("${{{{ steps.{}.outputs.changed_packages }}}}", step_name),
249 );
250 }
251
252 for rule in rules {
253 assert!(
254 rule.set_by_step
255 .borrow_mut()
256 .replace(name.clone())
257 .is_none()
258 );
259 assert!(
260 outputs
261 .insert(
262 rule.name.to_owned(),
263 format!("${{{{ steps.{}.outputs.{} }}}}", step_name, rule.name)
264 )
265 .is_none()
266 );
267
268 let grep_arg = if rule.invert { "-qvP" } else { "-qP" };
269 script.push_str(&format!(
270 "check_pattern \"{}\" '{}' {}\n",
271 rule.name, rule.pattern, grep_arg
272 ));
273 }
274
275 if target == OrchestrateTarget::ZedRepo {
276 script.push_str(DETECT_CHANGED_EXTENSIONS_SCRIPT);
277 script.push_str("echo \"changed_extensions=$EXTENSIONS_JSON\" >> \"$GITHUB_OUTPUT\"\n");
278
279 outputs.insert(
280 "changed_extensions".to_owned(),
281 format!("${{{{ steps.{}.outputs.changed_extensions }}}}", step_name),
282 );
283 }
284
285 let job = Job::default()
286 .runs_on(runners::LINUX_SMALL)
287 .with_repository_owner_guard()
288 .outputs(outputs)
289 .add_step(steps::checkout_repo().with_deep_history_on_non_main())
290 .add_step(Step::new(step_name.clone()).run(script).id(step_name));
291
292 NamedJob { name, job }
293}
294
295pub fn tests_pass(jobs: &[NamedJob], extra_job_names: &[&str]) -> NamedJob {
296 let mut script = String::from(indoc::indoc! {r#"
297 set +x
298 EXIT_CODE=0
299
300 check_result() {
301 echo "* $1: $2"
302 if [[ "$2" != "skipped" && "$2" != "success" ]]; then EXIT_CODE=1; fi
303 }
304
305 "#});
306
307 let all_names: Vec<&str> = jobs
308 .iter()
309 .map(|job| job.name.as_str())
310 .chain(extra_job_names.iter().copied())
311 .collect();
312
313 let env_entries: Vec<_> = all_names
314 .iter()
315 .map(|name| {
316 let env_name = format!("RESULT_{}", name.to_uppercase());
317 let env_value = format!("${{{{ needs.{}.result }}}}", name);
318 (env_name, env_value)
319 })
320 .collect();
321
322 script.push_str(
323 &all_names
324 .iter()
325 .zip(env_entries.iter())
326 .map(|(name, (env_name, _))| format!("check_result \"{}\" \"${}\"", name, env_name))
327 .collect::<Vec<_>>()
328 .join("\n"),
329 );
330
331 script.push_str("\n\nexit $EXIT_CODE\n");
332
333 let job = Job::default()
334 .runs_on(runners::LINUX_SMALL)
335 .needs(
336 all_names
337 .iter()
338 .map(|name| name.to_string())
339 .collect::<Vec<String>>(),
340 )
341 .cond(repository_owner_guard_expression(true))
342 .add_step(
343 env_entries
344 .into_iter()
345 .fold(named::bash(&script), |step, env_item| {
346 step.add_env(env_item)
347 }),
348 );
349
350 named::job(job)
351}
352
353/// Bash script snippet that detects changed extension directories from `$CHANGED_FILES`.
354/// Assumes `$CHANGED_FILES` is already set. Sets `$EXTENSIONS_JSON` to a JSON array of
355/// changed extension paths. Callers are responsible for writing the result to `$GITHUB_OUTPUT`.
356pub(crate) const DETECT_CHANGED_EXTENSIONS_SCRIPT: &str = indoc::indoc! {r#"
357 # Detect changed extension directories (excluding extensions/workflows)
358 CHANGED_EXTENSIONS=$(echo "$CHANGED_FILES" | grep -oP '^extensions/[^/]+(?=/)' | sort -u | grep -v '^extensions/workflows$' || true)
359 if [ -n "$CHANGED_EXTENSIONS" ]; then
360 EXTENSIONS_JSON=$(echo "$CHANGED_EXTENSIONS" | jq -R -s -c 'split("\n") | map(select(length > 0))')
361 else
362 EXTENSIONS_JSON="[]"
363 fi
364"#};
365
366const TS_QUERY_LS_FILE: &str = "ts_query_ls-x86_64-unknown-linux-gnu.tar.gz";
367const CI_TS_QUERY_RELEASE: &str = "tags/v3.15.1";
368
369pub(crate) fn fetch_ts_query_ls() -> Step<Use> {
370 named::uses(
371 "dsaltares",
372 "fetch-gh-release-asset",
373 "aa37ae5c44d3c9820bc12fe675e8670ecd93bd1c",
374 ) // v1.1.1
375 .add_with(("repo", "ribru17/ts_query_ls"))
376 .add_with(("version", CI_TS_QUERY_RELEASE))
377 .add_with(("file", TS_QUERY_LS_FILE))
378}
379
380pub(crate) fn run_ts_query_ls() -> Step<Run> {
381 named::bash(formatdoc!(
382 r#"tar -xf "$GITHUB_WORKSPACE/{TS_QUERY_LS_FILE}" -C "$GITHUB_WORKSPACE"
383 "$GITHUB_WORKSPACE/ts_query_ls" format --check . || {{
384 echo "Found unformatted queries, please format them with ts_query_ls."
385 echo "For easy use, install the Tree-sitter query extension:"
386 echo "zed://extension/tree-sitter-query"
387 false
388 }}"#
389 ))
390}
391
392fn check_style() -> NamedJob {
393 fn check_for_typos() -> Step<Use> {
394 named::uses(
395 "crate-ci",
396 "typos",
397 "2d0ce569feab1f8752f1dde43cc2f2aa53236e06",
398 ) // v1.40.0
399 .with(("config", "./typos.toml"))
400 }
401
402 named::job(
403 release_job(&[])
404 .runs_on(runners::LINUX_MEDIUM)
405 .add_step(steps::checkout_repo())
406 .add_step(steps::cache_rust_dependencies_namespace())
407 .add_step(steps::setup_pnpm())
408 .add_step(steps::prettier())
409 .add_step(steps::cargo_fmt())
410 .add_step(steps::script("./script/check-todos"))
411 .add_step(steps::script("./script/check-keymaps"))
412 .add_step(check_for_typos())
413 .add_step(fetch_ts_query_ls())
414 .add_step(run_ts_query_ls()),
415 )
416}
417
418fn check_dependencies() -> NamedJob {
419 fn install_cargo_machete() -> Step<Use> {
420 named::uses(
421 "clechasseur",
422 "rs-cargo",
423 "8435b10f6e71c2e3d4d3b7573003a8ce4bfc6386", // v2
424 )
425 .add_with(("command", "install"))
426 .add_with(("args", "cargo-machete@0.7.0"))
427 }
428
429 fn run_cargo_machete() -> Step<Use> {
430 named::uses(
431 "clechasseur",
432 "rs-cargo",
433 "8435b10f6e71c2e3d4d3b7573003a8ce4bfc6386", // v2
434 )
435 .add_with(("command", "machete"))
436 }
437
438 fn check_cargo_lock() -> Step<Run> {
439 named::bash("cargo update --locked --workspace")
440 }
441
442 fn check_vulnerable_dependencies() -> Step<Use> {
443 named::uses(
444 "actions",
445 "dependency-review-action",
446 "67d4f4bd7a9b17a0db54d2a7519187c65e339de8", // v4
447 )
448 .if_condition(Expression::new("github.event_name == 'pull_request'"))
449 .with(("license-check", false))
450 }
451
452 named::job(use_clang(
453 release_job(&[])
454 .runs_on(runners::LINUX_SMALL)
455 .add_step(steps::checkout_repo())
456 .add_step(steps::cache_rust_dependencies_namespace())
457 .add_step(install_cargo_machete())
458 .add_step(run_cargo_machete())
459 .add_step(check_cargo_lock())
460 .add_step(check_vulnerable_dependencies()),
461 ))
462}
463
464fn check_wasm() -> NamedJob {
465 fn install_nightly_wasm_toolchain() -> Step<Run> {
466 named::bash(
467 "rustup toolchain install nightly --component rust-src --target wasm32-unknown-unknown",
468 )
469 }
470
471 fn cargo_check_wasm() -> Step<Run> {
472 named::bash(concat!(
473 "cargo +nightly -Zbuild-std=std,panic_abort ",
474 "check --target wasm32-unknown-unknown -p gpui_platform",
475 ))
476 .add_env((
477 "CARGO_TARGET_WASM32_UNKNOWN_UNKNOWN_RUSTFLAGS",
478 "-C target-feature=+atomics,+bulk-memory,+mutable-globals",
479 ))
480 }
481
482 named::job(
483 release_job(&[])
484 .runs_on(runners::LINUX_LARGE)
485 .add_step(steps::checkout_repo())
486 .add_step(steps::setup_cargo_config(Platform::Linux))
487 .add_step(steps::cache_rust_dependencies_namespace())
488 .add_step(install_nightly_wasm_toolchain())
489 .add_step(steps::setup_sccache(Platform::Linux))
490 .add_step(cargo_check_wasm())
491 .add_step(steps::show_sccache_stats(Platform::Linux))
492 .add_step(steps::cleanup_cargo_config(Platform::Linux)),
493 )
494}
495
496fn check_workspace_binaries() -> NamedJob {
497 named::job(use_clang(
498 release_job(&[])
499 .runs_on(runners::LINUX_LARGE)
500 .add_step(steps::checkout_repo())
501 .add_step(steps::setup_cargo_config(Platform::Linux))
502 .add_step(steps::cache_rust_dependencies_namespace())
503 .map(steps::install_linux_dependencies)
504 .add_step(steps::setup_sccache(Platform::Linux))
505 .add_step(steps::script("cargo build -p collab"))
506 .add_step(steps::script("cargo build --workspace --bins --examples"))
507 .add_step(steps::show_sccache_stats(Platform::Linux))
508 .add_step(steps::cleanup_cargo_config(Platform::Linux)),
509 ))
510}
511
512pub(crate) fn clippy(platform: Platform, arch: Option<Arch>) -> NamedJob {
513 let target = arch.map(|arch| match (platform, arch) {
514 (Platform::Mac, Arch::X86_64) => "x86_64-apple-darwin",
515 (Platform::Mac, Arch::AARCH64) => "aarch64-apple-darwin",
516 _ => unimplemented!("cross-arch clippy not supported for {platform}/{arch}"),
517 });
518 let runner = match platform {
519 Platform::Windows => runners::WINDOWS_DEFAULT,
520 Platform::Linux => runners::LINUX_DEFAULT,
521 Platform::Mac => runners::MAC_DEFAULT,
522 };
523 let mut job = release_job(&[])
524 .runs_on(runner)
525 .add_step(steps::checkout_repo())
526 .add_step(steps::setup_cargo_config(platform))
527 .when(
528 platform == Platform::Linux || platform == Platform::Mac,
529 |this| this.add_step(steps::cache_rust_dependencies_namespace()),
530 )
531 .when(
532 platform == Platform::Linux,
533 steps::install_linux_dependencies,
534 )
535 .when_some(target, |this, target| {
536 this.add_step(steps::install_rustup_target(target))
537 })
538 .add_step(steps::setup_sccache(platform))
539 .add_step(steps::clippy(platform, target))
540 .add_step(steps::show_sccache_stats(platform));
541 if platform == Platform::Linux {
542 job = use_clang(job);
543 }
544 let name = match arch {
545 Some(arch) => format!("clippy_{platform}_{arch}"),
546 None => format!("clippy_{platform}"),
547 };
548 NamedJob { name, job }
549}
550
551pub(crate) fn run_platform_tests(platform: Platform) -> NamedJob {
552 run_platform_tests_impl(platform, true)
553}
554
555pub(crate) fn run_platform_tests_no_filter(platform: Platform) -> NamedJob {
556 run_platform_tests_impl(platform, false)
557}
558
559fn run_platform_tests_impl(platform: Platform, filter_packages: bool) -> NamedJob {
560 let runner = match platform {
561 Platform::Windows => runners::WINDOWS_DEFAULT,
562 Platform::Linux => runners::LINUX_DEFAULT,
563 Platform::Mac => runners::MAC_DEFAULT,
564 };
565 NamedJob {
566 name: format!("run_tests_{platform}"),
567 job: release_job(&[])
568 .runs_on(runner)
569 .when(platform == Platform::Linux, |job| {
570 job.add_service(
571 "postgres",
572 Container::new("postgres:15")
573 .add_env(("POSTGRES_HOST_AUTH_METHOD", "trust"))
574 .ports(vec![Port::Name("5432:5432".into())])
575 .options(
576 "--health-cmd pg_isready \
577 --health-interval 500ms \
578 --health-timeout 5s \
579 --health-retries 10",
580 ),
581 )
582 })
583 .add_step(steps::checkout_repo())
584 .add_step(steps::setup_cargo_config(platform))
585 .when(platform == Platform::Mac, |this| {
586 this.add_step(steps::cache_rust_dependencies_namespace())
587 })
588 .when(platform == Platform::Linux, |this| {
589 use_clang(this.add_step(steps::cache_rust_dependencies_namespace()))
590 })
591 .when(
592 platform == Platform::Linux,
593 steps::install_linux_dependencies,
594 )
595 .add_step(steps::setup_node())
596 .when(
597 platform == Platform::Linux || platform == Platform::Mac,
598 |job| job.add_step(steps::cargo_install_nextest()),
599 )
600 .add_step(steps::clear_target_dir_if_large(platform))
601 .add_step(steps::setup_sccache(platform))
602 .when(filter_packages, |job| {
603 job.add_step(
604 steps::cargo_nextest(platform).with_changed_packages_filter("orchestrate"),
605 )
606 })
607 .when(!filter_packages, |job| {
608 job.add_step(steps::cargo_nextest(platform))
609 })
610 .add_step(steps::show_sccache_stats(platform))
611 .add_step(steps::cleanup_cargo_config(platform)),
612 }
613}
614
615pub(crate) fn check_postgres_and_protobuf_migrations() -> NamedJob {
616 fn ensure_fresh_merge() -> Step<Run> {
617 named::bash(indoc::indoc! {r#"
618 if [ -z "$GITHUB_BASE_REF" ];
619 then
620 echo "BUF_BASE_BRANCH=$(git merge-base origin/main HEAD)" >> "$GITHUB_ENV"
621 else
622 git checkout -B temp
623 git merge -q "origin/$GITHUB_BASE_REF" -m "merge main into temp"
624 echo "BUF_BASE_BRANCH=$GITHUB_BASE_REF" >> "$GITHUB_ENV"
625 fi
626 "#})
627 }
628
629 fn bufbuild_setup_action() -> Step<Use> {
630 named::uses("bufbuild", "buf-setup-action", "v1")
631 .add_with(("version", "v1.29.0"))
632 .add_with(("github_token", vars::GITHUB_TOKEN))
633 }
634
635 fn bufbuild_breaking_action() -> Step<Use> {
636 named::uses("bufbuild", "buf-breaking-action", "v1").add_with(("input", "crates/proto/proto/"))
637 .add_with(("against", "https://github.com/${GITHUB_REPOSITORY}.git#branch=${BUF_BASE_BRANCH},subdir=crates/proto/proto/"))
638 }
639
640 fn buf_lint() -> Step<Run> {
641 named::bash("buf lint crates/proto/proto")
642 }
643
644 fn check_protobuf_formatting() -> Step<Run> {
645 named::bash("buf format --diff --exit-code crates/proto/proto")
646 }
647
648 named::job(
649 release_job(&[])
650 .runs_on(runners::LINUX_DEFAULT)
651 .add_env(("GIT_AUTHOR_NAME", "Protobuf Action"))
652 .add_env(("GIT_AUTHOR_EMAIL", "ci@zed.dev"))
653 .add_env(("GIT_COMMITTER_NAME", "Protobuf Action"))
654 .add_env(("GIT_COMMITTER_EMAIL", "ci@zed.dev"))
655 .add_step(steps::checkout_repo().with_full_history())
656 .add_step(ensure_fresh_merge())
657 .add_step(bufbuild_setup_action())
658 .add_step(bufbuild_breaking_action())
659 .add_step(buf_lint())
660 .add_step(check_protobuf_formatting()),
661 )
662}
663
664fn doctests() -> NamedJob {
665 fn run_doctests() -> Step<Run> {
666 named::bash(indoc::indoc! {r#"
667 cargo test --workspace --doc --no-fail-fast
668 "#})
669 .id("run_doctests")
670 }
671
672 named::job(use_clang(
673 release_job(&[])
674 .runs_on(runners::LINUX_DEFAULT)
675 .add_step(steps::checkout_repo())
676 .add_step(steps::cache_rust_dependencies_namespace())
677 .map(steps::install_linux_dependencies)
678 .add_step(steps::setup_cargo_config(Platform::Linux))
679 .add_step(steps::setup_sccache(Platform::Linux))
680 .add_step(run_doctests())
681 .add_step(steps::show_sccache_stats(Platform::Linux))
682 .add_step(steps::cleanup_cargo_config(Platform::Linux)),
683 ))
684}
685
686fn check_licenses() -> NamedJob {
687 named::job(
688 Job::default()
689 .runs_on(runners::LINUX_SMALL)
690 .add_step(steps::checkout_repo())
691 .add_step(steps::cache_rust_dependencies_namespace())
692 .add_step(steps::script("./script/check-licenses"))
693 .add_step(steps::script("./script/generate-licenses")),
694 )
695}
696
697fn check_docs() -> NamedJob {
698 fn lychee_link_check(dir: &str) -> Step<Use> {
699 named::uses(
700 "lycheeverse",
701 "lychee-action",
702 "82202e5e9c2f4ef1a55a3d02563e1cb6041e5332",
703 ) // v2.4.1
704 .add_with(("args", format!("--no-progress --exclude '^http' '{dir}'")))
705 .add_with(("fail", true))
706 .add_with(("jobSummary", false))
707 }
708
709 fn install_mdbook() -> Step<Use> {
710 named::uses(
711 "peaceiris",
712 "actions-mdbook",
713 "ee69d230fe19748b7abf22df32acaa93833fad08", // v2
714 )
715 .with(("mdbook-version", "0.4.37"))
716 }
717
718 fn build_docs() -> Step<Run> {
719 named::bash(indoc::indoc! {r#"
720 mkdir -p target/deploy
721 mdbook build ./docs --dest-dir=../target/deploy/docs/
722 "#})
723 }
724
725 named::job(use_clang(
726 release_job(&[])
727 .runs_on(runners::LINUX_LARGE)
728 .add_step(steps::checkout_repo())
729 .add_step(steps::setup_cargo_config(Platform::Linux))
730 // todo(ci): un-inline build_docs/action.yml here
731 .add_step(steps::cache_rust_dependencies_namespace())
732 .add_step(
733 lychee_link_check("./docs/src/**/*"), // check markdown links
734 )
735 .map(steps::install_linux_dependencies)
736 .add_step(steps::script("./script/generate-action-metadata"))
737 .add_step(install_mdbook())
738 .add_step(build_docs())
739 .add_step(
740 lychee_link_check("target/deploy/docs"), // check links in generated html
741 ),
742 ))
743}
744
745pub(crate) fn check_scripts() -> NamedJob {
746 fn download_actionlint() -> Step<Run> {
747 named::bash(
748 "bash <(curl https://raw.githubusercontent.com/rhysd/actionlint/main/scripts/download-actionlint.bash)",
749 )
750 }
751
752 fn run_actionlint() -> Step<Run> {
753 named::bash(r#""$ACTIONLINT_BIN" -color"#).add_env((
754 "ACTIONLINT_BIN",
755 "${{ steps.get_actionlint.outputs.executable }}",
756 ))
757 }
758
759 fn run_shellcheck() -> Step<Run> {
760 named::bash("./script/shellcheck-scripts error")
761 }
762
763 fn check_xtask_workflows() -> Step<Run> {
764 named::bash(indoc::indoc! {r#"
765 cargo xtask workflows
766 if ! git diff --exit-code .github; then
767 echo "Error: .github directory has uncommitted changes after running 'cargo xtask workflows'"
768 echo "Please run 'cargo xtask workflows' locally and commit the changes"
769 exit 1
770 fi
771 "#})
772 }
773
774 named::job(
775 release_job(&[])
776 .runs_on(runners::LINUX_SMALL)
777 .add_step(steps::checkout_repo())
778 .add_step(run_shellcheck())
779 .add_step(download_actionlint().id("get_actionlint"))
780 .add_step(run_actionlint())
781 .add_step(cache_rust_dependencies_namespace())
782 .add_step(check_xtask_workflows()),
783 )
784}
785
786fn extension_tests() -> NamedJob<UsesJob> {
787 let job = Job::default()
788 .needs(vec!["orchestrate".to_owned()])
789 .cond(Expression::new(
790 "needs.orchestrate.outputs.changed_extensions != '[]'",
791 ))
792 .permissions(Permissions::default().contents(Level::Read))
793 .strategy(
794 Strategy::default()
795 .fail_fast(false)
796 // TODO: Remove the limit. We currently need this to workaround the concurrency group issue
797 // where different matrix jobs would be placed in the same concurrency group and thus cancelled.
798 .max_parallel(1u32)
799 .matrix(json!({
800 "extension": "${{ fromJson(needs.orchestrate.outputs.changed_extensions) }}"
801 })),
802 )
803 .uses_local(".github/workflows/extension_tests.yml")
804 .with(Input::default().add("working-directory", "${{ matrix.extension }}"));
805
806 named::job(job)
807}