1use gh_workflow::{
2 Concurrency, Event, Expression, Job, PullRequest, Push, Run, Step, Use, Workflow,
3};
4use indexmap::IndexMap;
5
6use crate::tasks::workflows::{
7 nix_build::build_nix,
8 runners::Arch,
9 steps::{BASH_SHELL, CommonJobConditions, repository_owner_guard_expression},
10 vars::{self, PathCondition},
11};
12
13use super::{
14 runners::{self, Platform},
15 steps::{self, FluentBuilder, NamedJob, named, release_job},
16};
17
18pub(crate) fn run_tests() -> Workflow {
19 // Specify anything which should potentially skip full test suite in this regex:
20 // - docs/
21 // - script/update_top_ranking_issues/
22 // - .github/ISSUE_TEMPLATE/
23 // - .github/workflows/ (except .github/workflows/ci.yml)
24 let should_run_tests = PathCondition::inverted(
25 "run_tests",
26 r"^(docs/|script/update_top_ranking_issues/|\.github/(ISSUE_TEMPLATE|workflows/(?!run_tests)))",
27 );
28 let should_check_docs = PathCondition::new("run_docs", r"^(docs/|crates/.*\.rs)");
29 let should_check_scripts = PathCondition::new(
30 "run_action_checks",
31 r"^\.github/(workflows/|actions/|actionlint.yml)|tooling/xtask|script/",
32 );
33 let should_check_licences =
34 PathCondition::new("run_licenses", r"^(Cargo.lock|script/.*licenses)");
35 let should_build_nix = PathCondition::new(
36 "run_nix",
37 r"^(nix/|flake\.|Cargo\.|rust-toolchain.toml|\.cargo/config.toml)",
38 );
39
40 let orchestrate = orchestrate(&[
41 &should_check_scripts,
42 &should_check_docs,
43 &should_check_licences,
44 &should_build_nix,
45 &should_run_tests,
46 ]);
47
48 let check_style = check_style();
49 let run_tests_linux = run_platform_tests(Platform::Linux);
50 let call_autofix = call_autofix(&check_style, &run_tests_linux);
51
52 let mut jobs = vec![
53 orchestrate,
54 check_style,
55 should_run_tests.guard(run_platform_tests(Platform::Windows)),
56 should_run_tests.guard(run_tests_linux),
57 should_run_tests.guard(run_platform_tests(Platform::Mac)),
58 should_run_tests.guard(doctests()),
59 should_run_tests.guard(check_workspace_binaries()),
60 should_run_tests.guard(check_dependencies()), // could be more specific here?
61 should_check_docs.guard(check_docs()),
62 should_check_licences.guard(check_licenses()),
63 should_check_scripts.guard(check_scripts()),
64 should_build_nix.guard(build_nix(
65 Platform::Linux,
66 Arch::X86_64,
67 "debug",
68 // *don't* cache the built output
69 Some("-zed-editor-[0-9.]*-nightly"),
70 &[],
71 )),
72 should_build_nix.guard(build_nix(
73 Platform::Mac,
74 Arch::AARCH64,
75 "debug",
76 // *don't* cache the built output
77 Some("-zed-editor-[0-9.]*-nightly"),
78 &[],
79 )),
80 ];
81 let tests_pass = tests_pass(&jobs);
82
83 jobs.push(should_run_tests.guard(check_postgres_and_protobuf_migrations())); // could be more specific here?
84
85 named::workflow()
86 .add_event(
87 Event::default()
88 .push(
89 Push::default()
90 .add_branch("main")
91 .add_branch("v[0-9]+.[0-9]+.x"),
92 )
93 .pull_request(PullRequest::default().add_branch("**")),
94 )
95 .concurrency(
96 Concurrency::default()
97 .group(concat!(
98 "${{ github.workflow }}-${{ github.ref_name }}-",
99 "${{ github.ref_name == 'main' && github.sha || 'anysha' }}"
100 ))
101 .cancel_in_progress(true),
102 )
103 .add_env(("CARGO_TERM_COLOR", "always"))
104 .add_env(("RUST_BACKTRACE", 1))
105 .add_env(("CARGO_INCREMENTAL", 0))
106 .map(|mut workflow| {
107 for job in jobs {
108 workflow = workflow.add_job(job.name, job.job)
109 }
110 workflow
111 })
112 .add_job(tests_pass.name, tests_pass.job)
113 .add_job(call_autofix.name, call_autofix.job)
114}
115
116// Generates a bash script that checks changed files against regex patterns
117// and sets GitHub output variables accordingly
118pub fn orchestrate(rules: &[&PathCondition]) -> NamedJob {
119 let name = "orchestrate".to_owned();
120 let step_name = "filter".to_owned();
121 let mut script = String::new();
122
123 script.push_str(indoc::indoc! {r#"
124 if [ -z "$GITHUB_BASE_REF" ]; then
125 echo "Not in a PR context (i.e., push to main/stable/preview)"
126 COMPARE_REV="$(git rev-parse HEAD~1)"
127 else
128 echo "In a PR context comparing to pull_request.base.ref"
129 git fetch origin "$GITHUB_BASE_REF" --depth=350
130 COMPARE_REV="$(git merge-base "origin/${GITHUB_BASE_REF}" HEAD)"
131 fi
132 CHANGED_FILES="$(git diff --name-only "$COMPARE_REV" ${{ github.sha }})"
133
134 check_pattern() {
135 local output_name="$1"
136 local pattern="$2"
137 local grep_arg="$3"
138
139 echo "$CHANGED_FILES" | grep "$grep_arg" "$pattern" && \
140 echo "${output_name}=true" >> "$GITHUB_OUTPUT" || \
141 echo "${output_name}=false" >> "$GITHUB_OUTPUT"
142 }
143
144 "#});
145
146 let mut outputs = IndexMap::new();
147
148 for rule in rules {
149 assert!(
150 rule.set_by_step
151 .borrow_mut()
152 .replace(name.clone())
153 .is_none()
154 );
155 assert!(
156 outputs
157 .insert(
158 rule.name.to_owned(),
159 format!("${{{{ steps.{}.outputs.{} }}}}", step_name, rule.name)
160 )
161 .is_none()
162 );
163
164 let grep_arg = if rule.invert { "-qvP" } else { "-qP" };
165 script.push_str(&format!(
166 "check_pattern \"{}\" '{}' {}\n",
167 rule.name, rule.pattern, grep_arg
168 ));
169 }
170
171 let job = Job::default()
172 .runs_on(runners::LINUX_SMALL)
173 .with_repository_owner_guard()
174 .outputs(outputs)
175 .add_step(steps::checkout_repo().add_with((
176 "fetch-depth",
177 "${{ github.ref == 'refs/heads/main' && 2 || 350 }}",
178 )))
179 .add_step(
180 Step::new(step_name.clone())
181 .run(script)
182 .id(step_name)
183 .shell(BASH_SHELL),
184 );
185
186 NamedJob { name, job }
187}
188
189pub fn tests_pass(jobs: &[NamedJob]) -> NamedJob {
190 let mut script = String::from(indoc::indoc! {r#"
191 set +x
192 EXIT_CODE=0
193
194 check_result() {
195 echo "* $1: $2"
196 if [[ "$2" != "skipped" && "$2" != "success" ]]; then EXIT_CODE=1; fi
197 }
198
199 "#});
200
201 script.push_str(
202 &jobs
203 .iter()
204 .map(|job| {
205 format!(
206 "check_result \"{}\" \"${{{{ needs.{}.result }}}}\"",
207 job.name, job.name
208 )
209 })
210 .collect::<Vec<_>>()
211 .join("\n"),
212 );
213
214 script.push_str("\n\nexit $EXIT_CODE\n");
215
216 let job = Job::default()
217 .runs_on(runners::LINUX_SMALL)
218 .needs(
219 jobs.iter()
220 .map(|j| j.name.to_string())
221 .collect::<Vec<String>>(),
222 )
223 .cond(repository_owner_guard_expression(true))
224 .add_step(named::bash(&script));
225
226 named::job(job)
227}
228
229fn check_style() -> NamedJob {
230 fn check_for_typos() -> Step<Use> {
231 named::uses(
232 "crate-ci",
233 "typos",
234 "2d0ce569feab1f8752f1dde43cc2f2aa53236e06",
235 ) // v1.40.0
236 .with(("config", "./typos.toml"))
237 }
238 named::job(
239 release_job(&[])
240 .runs_on(runners::LINUX_MEDIUM)
241 .add_step(steps::checkout_repo())
242 .add_step(steps::cache_rust_dependencies_namespace())
243 .add_step(steps::setup_pnpm())
244 .add_step(steps::script("./script/prettier"))
245 .add_step(steps::cargo_fmt())
246 .add_step(steps::script("./script/check-todos"))
247 .add_step(steps::script("./script/check-keymaps"))
248 .add_step(check_for_typos()),
249 )
250}
251
252fn call_autofix(check_style: &NamedJob, run_tests_linux: &NamedJob) -> NamedJob {
253 fn dispatch_autofix(run_tests_linux_name: &str) -> Step<Run> {
254 let clippy_failed_expr = format!(
255 "needs.{}.outputs.{} == 'true'",
256 run_tests_linux_name, CLIPPY_FAILED_OUTPUT
257 );
258 named::bash(format!(
259 "gh workflow run autofix_pr.yml -f pr_number=${{{{ github.event.pull_request.number }}}} -f run_clippy=${{{{ {} }}}}",
260 clippy_failed_expr
261 ))
262 .add_env(("GITHUB_TOKEN", "${{ steps.get-app-token.outputs.token }}"))
263 }
264
265 let clippy_failed_expr = format!(
266 "needs.{}.outputs.{} == 'true'",
267 run_tests_linux.name, CLIPPY_FAILED_OUTPUT
268 );
269 let (authenticate, _token) = steps::authenticate_as_zippy();
270
271 let job = Job::default()
272 .runs_on(runners::LINUX_SMALL)
273 .cond(Expression::new(format!(
274 "(needs.{}.result == 'failure' || {}) && github.event_name == 'pull_request' && github.actor != 'zed-zippy[bot]'",
275 check_style.name, clippy_failed_expr
276 )))
277 .needs(vec![check_style.name.clone(), run_tests_linux.name.clone()])
278 .add_step(authenticate)
279 .add_step(dispatch_autofix(&run_tests_linux.name));
280
281 named::job(job)
282}
283
284fn check_dependencies() -> NamedJob {
285 fn install_cargo_machete() -> Step<Use> {
286 named::uses(
287 "clechasseur",
288 "rs-cargo",
289 "8435b10f6e71c2e3d4d3b7573003a8ce4bfc6386", // v2
290 )
291 .add_with(("command", "install"))
292 .add_with(("args", "cargo-machete@0.7.0"))
293 }
294
295 fn run_cargo_machete() -> Step<Use> {
296 named::uses(
297 "clechasseur",
298 "rs-cargo",
299 "8435b10f6e71c2e3d4d3b7573003a8ce4bfc6386", // v2
300 )
301 .add_with(("command", "machete"))
302 }
303
304 fn check_cargo_lock() -> Step<Run> {
305 named::bash("cargo update --locked --workspace")
306 }
307
308 fn check_vulnerable_dependencies() -> Step<Use> {
309 named::uses(
310 "actions",
311 "dependency-review-action",
312 "67d4f4bd7a9b17a0db54d2a7519187c65e339de8", // v4
313 )
314 .if_condition(Expression::new("github.event_name == 'pull_request'"))
315 .with(("license-check", false))
316 }
317
318 named::job(
319 release_job(&[])
320 .runs_on(runners::LINUX_SMALL)
321 .add_step(steps::checkout_repo())
322 .add_step(steps::cache_rust_dependencies_namespace())
323 .add_step(install_cargo_machete())
324 .add_step(run_cargo_machete())
325 .add_step(check_cargo_lock())
326 .add_step(check_vulnerable_dependencies()),
327 )
328}
329
330fn check_workspace_binaries() -> NamedJob {
331 named::job(
332 release_job(&[])
333 .runs_on(runners::LINUX_LARGE)
334 .add_step(steps::checkout_repo())
335 .add_step(steps::setup_cargo_config(Platform::Linux))
336 .add_step(steps::cache_rust_dependencies_namespace())
337 .map(steps::install_linux_dependencies)
338 .add_step(steps::script("cargo build -p collab"))
339 .add_step(steps::script("cargo build --workspace --bins --examples"))
340 .add_step(steps::cleanup_cargo_config(Platform::Linux)),
341 )
342}
343
344pub const CLIPPY_FAILED_OUTPUT: &str = "clippy_failed";
345
346pub(crate) fn run_platform_tests(platform: Platform) -> NamedJob {
347 let runner = match platform {
348 Platform::Windows => runners::WINDOWS_DEFAULT,
349 Platform::Linux => runners::LINUX_DEFAULT,
350 Platform::Mac => runners::MAC_DEFAULT,
351 };
352 NamedJob {
353 name: format!("run_tests_{platform}"),
354 job: release_job(&[])
355 .runs_on(runner)
356 .add_step(steps::checkout_repo())
357 .add_step(steps::setup_cargo_config(platform))
358 .when(platform == Platform::Linux, |this| {
359 this.add_step(steps::cache_rust_dependencies_namespace())
360 })
361 .when(
362 platform == Platform::Linux,
363 steps::install_linux_dependencies,
364 )
365 .add_step(steps::setup_node())
366 .add_step(steps::clippy(platform))
367 .when(platform == Platform::Linux, |job| {
368 job.add_step(steps::cargo_install_nextest())
369 })
370 .add_step(steps::clear_target_dir_if_large(platform))
371 .add_step(steps::cargo_nextest(platform))
372 .add_step(steps::cleanup_cargo_config(platform))
373 .when(platform == Platform::Linux, |job| {
374 job.outputs([(
375 CLIPPY_FAILED_OUTPUT.to_owned(),
376 format!(
377 "${{{{ steps.{}.outcome == 'failure' }}}}",
378 steps::CLIPPY_STEP_ID
379 ),
380 )])
381 }),
382 }
383}
384
385pub(crate) fn check_postgres_and_protobuf_migrations() -> NamedJob {
386 fn remove_untracked_files() -> Step<Run> {
387 named::bash("git clean -df")
388 }
389
390 fn ensure_fresh_merge() -> Step<Run> {
391 named::bash(indoc::indoc! {r#"
392 if [ -z "$GITHUB_BASE_REF" ];
393 then
394 echo "BUF_BASE_BRANCH=$(git merge-base origin/main HEAD)" >> "$GITHUB_ENV"
395 else
396 git checkout -B temp
397 git merge -q "origin/$GITHUB_BASE_REF" -m "merge main into temp"
398 echo "BUF_BASE_BRANCH=$GITHUB_BASE_REF" >> "$GITHUB_ENV"
399 fi
400 "#})
401 }
402
403 fn bufbuild_setup_action() -> Step<Use> {
404 named::uses("bufbuild", "buf-setup-action", "v1")
405 .add_with(("version", "v1.29.0"))
406 .add_with(("github_token", vars::GITHUB_TOKEN))
407 }
408
409 fn bufbuild_breaking_action() -> Step<Use> {
410 named::uses("bufbuild", "buf-breaking-action", "v1").add_with(("input", "crates/proto/proto/"))
411 .add_with(("against", "https://github.com/${GITHUB_REPOSITORY}.git#branch=${BUF_BASE_BRANCH},subdir=crates/proto/proto/"))
412 }
413
414 named::job(
415 release_job(&[])
416 .runs_on(runners::LINUX_DEFAULT)
417 .add_env(("GIT_AUTHOR_NAME", "Protobuf Action"))
418 .add_env(("GIT_AUTHOR_EMAIL", "ci@zed.dev"))
419 .add_env(("GIT_COMMITTER_NAME", "Protobuf Action"))
420 .add_env(("GIT_COMMITTER_EMAIL", "ci@zed.dev"))
421 .add_step(steps::checkout_repo().with(("fetch-depth", 0))) // fetch full history
422 .add_step(remove_untracked_files())
423 .add_step(ensure_fresh_merge())
424 .add_step(bufbuild_setup_action())
425 .add_step(bufbuild_breaking_action()),
426 )
427}
428
429fn doctests() -> NamedJob {
430 fn run_doctests() -> Step<Run> {
431 named::bash(indoc::indoc! {r#"
432 cargo test --workspace --doc --no-fail-fast
433 "#})
434 .id("run_doctests")
435 }
436
437 named::job(
438 release_job(&[])
439 .runs_on(runners::LINUX_DEFAULT)
440 .add_step(steps::checkout_repo())
441 .add_step(steps::cache_rust_dependencies_namespace())
442 .map(steps::install_linux_dependencies)
443 .add_step(steps::setup_cargo_config(Platform::Linux))
444 .add_step(run_doctests())
445 .add_step(steps::cleanup_cargo_config(Platform::Linux)),
446 )
447}
448
449fn check_licenses() -> NamedJob {
450 named::job(
451 Job::default()
452 .runs_on(runners::LINUX_SMALL)
453 .add_step(steps::checkout_repo())
454 .add_step(steps::cache_rust_dependencies_namespace())
455 .add_step(steps::script("./script/check-licenses"))
456 .add_step(steps::script("./script/generate-licenses")),
457 )
458}
459
460fn check_docs() -> NamedJob {
461 fn lychee_link_check(dir: &str) -> Step<Use> {
462 named::uses(
463 "lycheeverse",
464 "lychee-action",
465 "82202e5e9c2f4ef1a55a3d02563e1cb6041e5332",
466 ) // v2.4.1
467 .add_with(("args", format!("--no-progress --exclude '^http' '{dir}'")))
468 .add_with(("fail", true))
469 .add_with(("jobSummary", false))
470 }
471
472 fn install_mdbook() -> Step<Use> {
473 named::uses(
474 "peaceiris",
475 "actions-mdbook",
476 "ee69d230fe19748b7abf22df32acaa93833fad08", // v2
477 )
478 .with(("mdbook-version", "0.4.37"))
479 }
480
481 fn build_docs() -> Step<Run> {
482 named::bash(indoc::indoc! {r#"
483 mkdir -p target/deploy
484 mdbook build ./docs --dest-dir=../target/deploy/docs/
485 "#})
486 }
487
488 named::job(
489 release_job(&[])
490 .runs_on(runners::LINUX_LARGE)
491 .add_step(steps::checkout_repo())
492 .add_step(steps::setup_cargo_config(Platform::Linux))
493 // todo(ci): un-inline build_docs/action.yml here
494 .add_step(steps::cache_rust_dependencies_namespace())
495 .add_step(
496 lychee_link_check("./docs/src/**/*"), // check markdown links
497 )
498 .map(steps::install_linux_dependencies)
499 .add_step(install_mdbook())
500 .add_step(build_docs())
501 .add_step(
502 lychee_link_check("target/deploy/docs"), // check links in generated html
503 ),
504 )
505}
506
507pub(crate) fn check_scripts() -> NamedJob {
508 fn download_actionlint() -> Step<Run> {
509 named::bash(
510 "bash <(curl https://raw.githubusercontent.com/rhysd/actionlint/main/scripts/download-actionlint.bash)",
511 )
512 }
513
514 fn run_actionlint() -> Step<Run> {
515 named::bash(indoc::indoc! {r#"
516 ${{ steps.get_actionlint.outputs.executable }} -color
517 "#})
518 }
519
520 fn run_shellcheck() -> Step<Run> {
521 named::bash("./script/shellcheck-scripts error")
522 }
523
524 fn check_xtask_workflows() -> Step<Run> {
525 named::bash(indoc::indoc! {r#"
526 cargo xtask workflows
527 if ! git diff --exit-code .github; then
528 echo "Error: .github directory has uncommitted changes after running 'cargo xtask workflows'"
529 echo "Please run 'cargo xtask workflows' locally and commit the changes"
530 exit 1
531 fi
532 "#})
533 }
534
535 named::job(
536 release_job(&[])
537 .runs_on(runners::LINUX_SMALL)
538 .add_step(steps::checkout_repo())
539 .add_step(run_shellcheck())
540 .add_step(download_actionlint().id("get_actionlint"))
541 .add_step(run_actionlint())
542 .add_step(check_xtask_workflows()),
543 )
544}