1use gh_workflow::{
2 Concurrency, Event, Expression, Job, PullRequest, Push, Run, Step, Use, Workflow,
3};
4use indexmap::IndexMap;
5
6use crate::tasks::workflows::{
7 nix_build::build_nix,
8 runners::Arch,
9 steps::{BASH_SHELL, CommonJobConditions, repository_owner_guard_expression},
10 vars::{self, PathCondition},
11};
12
13use super::{
14 runners::{self, Platform},
15 steps::{self, FluentBuilder, NamedJob, named, release_job},
16};
17
18pub(crate) fn run_tests() -> Workflow {
19 // Specify anything which should potentially skip full test suite in this regex:
20 // - docs/
21 // - script/update_top_ranking_issues/
22 // - .github/ISSUE_TEMPLATE/
23 // - .github/workflows/ (except .github/workflows/ci.yml)
24 let should_run_tests = PathCondition::inverted(
25 "run_tests",
26 r"^(docs/|script/update_top_ranking_issues/|\.github/(ISSUE_TEMPLATE|workflows/(?!run_tests)))",
27 );
28 let should_check_docs = PathCondition::new("run_docs", r"^(docs/|crates/.*\.rs)");
29 let should_check_scripts = PathCondition::new(
30 "run_action_checks",
31 r"^\.github/(workflows/|actions/|actionlint.yml)|tooling/xtask|script/",
32 );
33 let should_check_licences =
34 PathCondition::new("run_licenses", r"^(Cargo.lock|script/.*licenses)");
35 let should_build_nix = PathCondition::new(
36 "run_nix",
37 r"^(nix/|flake\.|Cargo\.|rust-toolchain.toml|\.cargo/config.toml)",
38 );
39
40 let orchestrate = orchestrate(&[
41 &should_check_scripts,
42 &should_check_docs,
43 &should_check_licences,
44 &should_build_nix,
45 &should_run_tests,
46 ]);
47
48 let mut jobs = vec![
49 orchestrate,
50 check_style(),
51 should_run_tests.guard(clippy(Platform::Windows)),
52 should_run_tests.guard(clippy(Platform::Linux)),
53 should_run_tests.guard(clippy(Platform::Mac)),
54 should_run_tests.guard(run_platform_tests(Platform::Windows)),
55 should_run_tests.guard(run_platform_tests(Platform::Linux)),
56 should_run_tests.guard(run_platform_tests(Platform::Mac)),
57 should_run_tests.guard(doctests()),
58 should_run_tests.guard(check_workspace_binaries()),
59 should_run_tests.guard(check_dependencies()), // could be more specific here?
60 should_check_docs.guard(check_docs()),
61 should_check_licences.guard(check_licenses()),
62 should_check_scripts.guard(check_scripts()),
63 should_build_nix.guard(build_nix(
64 Platform::Linux,
65 Arch::X86_64,
66 "debug",
67 // *don't* cache the built output
68 Some("-zed-editor-[0-9.]*-nightly"),
69 &[],
70 )),
71 should_build_nix.guard(build_nix(
72 Platform::Mac,
73 Arch::AARCH64,
74 "debug",
75 // *don't* cache the built output
76 Some("-zed-editor-[0-9.]*-nightly"),
77 &[],
78 )),
79 ];
80 let tests_pass = tests_pass(&jobs);
81
82 jobs.push(should_run_tests.guard(check_postgres_and_protobuf_migrations())); // could be more specific here?
83
84 named::workflow()
85 .add_event(
86 Event::default()
87 .push(
88 Push::default()
89 .add_branch("main")
90 .add_branch("v[0-9]+.[0-9]+.x"),
91 )
92 .pull_request(PullRequest::default().add_branch("**")),
93 )
94 .concurrency(
95 Concurrency::default()
96 .group(concat!(
97 "${{ github.workflow }}-${{ github.ref_name }}-",
98 "${{ github.ref_name == 'main' && github.sha || 'anysha' }}"
99 ))
100 .cancel_in_progress(true),
101 )
102 .add_env(("CARGO_TERM_COLOR", "always"))
103 .add_env(("RUST_BACKTRACE", 1))
104 .add_env(("CARGO_INCREMENTAL", 0))
105 .map(|mut workflow| {
106 for job in jobs {
107 workflow = workflow.add_job(job.name, job.job)
108 }
109 workflow
110 })
111 .add_job(tests_pass.name, tests_pass.job)
112}
113
114// Generates a bash script that checks changed files against regex patterns
115// and sets GitHub output variables accordingly
116pub fn orchestrate(rules: &[&PathCondition]) -> NamedJob {
117 let name = "orchestrate".to_owned();
118 let step_name = "filter".to_owned();
119 let mut script = String::new();
120
121 script.push_str(indoc::indoc! {r#"
122 if [ -z "$GITHUB_BASE_REF" ]; then
123 echo "Not in a PR context (i.e., push to main/stable/preview)"
124 COMPARE_REV="$(git rev-parse HEAD~1)"
125 else
126 echo "In a PR context comparing to pull_request.base.ref"
127 git fetch origin "$GITHUB_BASE_REF" --depth=350
128 COMPARE_REV="$(git merge-base "origin/${GITHUB_BASE_REF}" HEAD)"
129 fi
130 CHANGED_FILES="$(git diff --name-only "$COMPARE_REV" ${{ github.sha }})"
131
132 check_pattern() {
133 local output_name="$1"
134 local pattern="$2"
135 local grep_arg="$3"
136
137 echo "$CHANGED_FILES" | grep "$grep_arg" "$pattern" && \
138 echo "${output_name}=true" >> "$GITHUB_OUTPUT" || \
139 echo "${output_name}=false" >> "$GITHUB_OUTPUT"
140 }
141
142 "#});
143
144 let mut outputs = IndexMap::new();
145
146 for rule in rules {
147 assert!(
148 rule.set_by_step
149 .borrow_mut()
150 .replace(name.clone())
151 .is_none()
152 );
153 assert!(
154 outputs
155 .insert(
156 rule.name.to_owned(),
157 format!("${{{{ steps.{}.outputs.{} }}}}", step_name, rule.name)
158 )
159 .is_none()
160 );
161
162 let grep_arg = if rule.invert { "-qvP" } else { "-qP" };
163 script.push_str(&format!(
164 "check_pattern \"{}\" '{}' {}\n",
165 rule.name, rule.pattern, grep_arg
166 ));
167 }
168
169 let job = Job::default()
170 .runs_on(runners::LINUX_SMALL)
171 .with_repository_owner_guard()
172 .outputs(outputs)
173 .add_step(steps::checkout_repo().add_with((
174 "fetch-depth",
175 "${{ github.ref == 'refs/heads/main' && 2 || 350 }}",
176 )))
177 .add_step(
178 Step::new(step_name.clone())
179 .run(script)
180 .id(step_name)
181 .shell(BASH_SHELL),
182 );
183
184 NamedJob { name, job }
185}
186
187pub fn tests_pass(jobs: &[NamedJob]) -> NamedJob {
188 let mut script = String::from(indoc::indoc! {r#"
189 set +x
190 EXIT_CODE=0
191
192 check_result() {
193 echo "* $1: $2"
194 if [[ "$2" != "skipped" && "$2" != "success" ]]; then EXIT_CODE=1; fi
195 }
196
197 "#});
198
199 script.push_str(
200 &jobs
201 .iter()
202 .map(|job| {
203 format!(
204 "check_result \"{}\" \"${{{{ needs.{}.result }}}}\"",
205 job.name, job.name
206 )
207 })
208 .collect::<Vec<_>>()
209 .join("\n"),
210 );
211
212 script.push_str("\n\nexit $EXIT_CODE\n");
213
214 let job = Job::default()
215 .runs_on(runners::LINUX_SMALL)
216 .needs(
217 jobs.iter()
218 .map(|j| j.name.to_string())
219 .collect::<Vec<String>>(),
220 )
221 .cond(repository_owner_guard_expression(true))
222 .add_step(named::bash(&script));
223
224 named::job(job)
225}
226
227fn check_style() -> NamedJob {
228 fn check_for_typos() -> Step<Use> {
229 named::uses(
230 "crate-ci",
231 "typos",
232 "2d0ce569feab1f8752f1dde43cc2f2aa53236e06",
233 ) // v1.40.0
234 .with(("config", "./typos.toml"))
235 }
236 named::job(
237 release_job(&[])
238 .runs_on(runners::LINUX_MEDIUM)
239 .add_step(steps::checkout_repo())
240 .add_step(steps::cache_rust_dependencies_namespace())
241 .add_step(steps::setup_pnpm())
242 .add_step(steps::prettier())
243 .add_step(steps::cargo_fmt())
244 .add_step(steps::script("./script/check-todos"))
245 .add_step(steps::script("./script/check-keymaps"))
246 .add_step(check_for_typos()),
247 )
248}
249
250fn check_dependencies() -> NamedJob {
251 fn install_cargo_machete() -> Step<Use> {
252 named::uses(
253 "clechasseur",
254 "rs-cargo",
255 "8435b10f6e71c2e3d4d3b7573003a8ce4bfc6386", // v2
256 )
257 .add_with(("command", "install"))
258 .add_with(("args", "cargo-machete@0.7.0"))
259 }
260
261 fn run_cargo_machete() -> Step<Use> {
262 named::uses(
263 "clechasseur",
264 "rs-cargo",
265 "8435b10f6e71c2e3d4d3b7573003a8ce4bfc6386", // v2
266 )
267 .add_with(("command", "machete"))
268 }
269
270 fn check_cargo_lock() -> Step<Run> {
271 named::bash("cargo update --locked --workspace")
272 }
273
274 fn check_vulnerable_dependencies() -> Step<Use> {
275 named::uses(
276 "actions",
277 "dependency-review-action",
278 "67d4f4bd7a9b17a0db54d2a7519187c65e339de8", // v4
279 )
280 .if_condition(Expression::new("github.event_name == 'pull_request'"))
281 .with(("license-check", false))
282 }
283
284 named::job(
285 release_job(&[])
286 .runs_on(runners::LINUX_SMALL)
287 .add_step(steps::checkout_repo())
288 .add_step(steps::cache_rust_dependencies_namespace())
289 .add_step(install_cargo_machete())
290 .add_step(run_cargo_machete())
291 .add_step(check_cargo_lock())
292 .add_step(check_vulnerable_dependencies()),
293 )
294}
295
296fn check_workspace_binaries() -> NamedJob {
297 named::job(
298 release_job(&[])
299 .runs_on(runners::LINUX_LARGE)
300 .add_step(steps::checkout_repo())
301 .add_step(steps::setup_cargo_config(Platform::Linux))
302 .add_step(steps::cache_rust_dependencies_namespace())
303 .map(steps::install_linux_dependencies)
304 .add_step(steps::script("cargo build -p collab"))
305 .add_step(steps::script("cargo build --workspace --bins --examples"))
306 .add_step(steps::cleanup_cargo_config(Platform::Linux)),
307 )
308}
309
310pub(crate) fn clippy(platform: Platform) -> NamedJob {
311 let runner = match platform {
312 Platform::Windows => runners::WINDOWS_DEFAULT,
313 Platform::Linux => runners::LINUX_DEFAULT,
314 Platform::Mac => runners::MAC_DEFAULT,
315 };
316 NamedJob {
317 name: format!("clippy_{platform}"),
318 job: release_job(&[])
319 .runs_on(runner)
320 .add_step(steps::checkout_repo())
321 .add_step(steps::setup_cargo_config(platform))
322 .when(platform == Platform::Linux, |this| {
323 this.add_step(steps::cache_rust_dependencies_namespace())
324 })
325 .when(
326 platform == Platform::Linux,
327 steps::install_linux_dependencies,
328 )
329 .add_step(steps::clippy(platform)),
330 }
331}
332
333pub(crate) fn run_platform_tests(platform: Platform) -> NamedJob {
334 let runner = match platform {
335 Platform::Windows => runners::WINDOWS_DEFAULT,
336 Platform::Linux => runners::LINUX_DEFAULT,
337 Platform::Mac => runners::MAC_DEFAULT,
338 };
339 NamedJob {
340 name: format!("run_tests_{platform}"),
341 job: release_job(&[])
342 .runs_on(runner)
343 .add_step(steps::checkout_repo())
344 .add_step(steps::setup_cargo_config(platform))
345 .when(platform == Platform::Linux, |this| {
346 this.add_step(steps::cache_rust_dependencies_namespace())
347 })
348 .when(
349 platform == Platform::Linux,
350 steps::install_linux_dependencies,
351 )
352 .add_step(steps::setup_node())
353 .when(platform == Platform::Linux, |job| {
354 job.add_step(steps::cargo_install_nextest())
355 })
356 .add_step(steps::clear_target_dir_if_large(platform))
357 .add_step(steps::cargo_nextest(platform))
358 .add_step(steps::cleanup_cargo_config(platform)),
359 }
360}
361
362pub(crate) fn check_postgres_and_protobuf_migrations() -> NamedJob {
363 fn remove_untracked_files() -> Step<Run> {
364 named::bash("git clean -df")
365 }
366
367 fn ensure_fresh_merge() -> Step<Run> {
368 named::bash(indoc::indoc! {r#"
369 if [ -z "$GITHUB_BASE_REF" ];
370 then
371 echo "BUF_BASE_BRANCH=$(git merge-base origin/main HEAD)" >> "$GITHUB_ENV"
372 else
373 git checkout -B temp
374 git merge -q "origin/$GITHUB_BASE_REF" -m "merge main into temp"
375 echo "BUF_BASE_BRANCH=$GITHUB_BASE_REF" >> "$GITHUB_ENV"
376 fi
377 "#})
378 }
379
380 fn bufbuild_setup_action() -> Step<Use> {
381 named::uses("bufbuild", "buf-setup-action", "v1")
382 .add_with(("version", "v1.29.0"))
383 .add_with(("github_token", vars::GITHUB_TOKEN))
384 }
385
386 fn bufbuild_breaking_action() -> Step<Use> {
387 named::uses("bufbuild", "buf-breaking-action", "v1").add_with(("input", "crates/proto/proto/"))
388 .add_with(("against", "https://github.com/${GITHUB_REPOSITORY}.git#branch=${BUF_BASE_BRANCH},subdir=crates/proto/proto/"))
389 }
390
391 named::job(
392 release_job(&[])
393 .runs_on(runners::LINUX_DEFAULT)
394 .add_env(("GIT_AUTHOR_NAME", "Protobuf Action"))
395 .add_env(("GIT_AUTHOR_EMAIL", "ci@zed.dev"))
396 .add_env(("GIT_COMMITTER_NAME", "Protobuf Action"))
397 .add_env(("GIT_COMMITTER_EMAIL", "ci@zed.dev"))
398 .add_step(steps::checkout_repo().with(("fetch-depth", 0))) // fetch full history
399 .add_step(remove_untracked_files())
400 .add_step(ensure_fresh_merge())
401 .add_step(bufbuild_setup_action())
402 .add_step(bufbuild_breaking_action()),
403 )
404}
405
406fn doctests() -> NamedJob {
407 fn run_doctests() -> Step<Run> {
408 named::bash(indoc::indoc! {r#"
409 cargo test --workspace --doc --no-fail-fast
410 "#})
411 .id("run_doctests")
412 }
413
414 named::job(
415 release_job(&[])
416 .runs_on(runners::LINUX_DEFAULT)
417 .add_step(steps::checkout_repo())
418 .add_step(steps::cache_rust_dependencies_namespace())
419 .map(steps::install_linux_dependencies)
420 .add_step(steps::setup_cargo_config(Platform::Linux))
421 .add_step(run_doctests())
422 .add_step(steps::cleanup_cargo_config(Platform::Linux)),
423 )
424}
425
426fn check_licenses() -> NamedJob {
427 named::job(
428 Job::default()
429 .runs_on(runners::LINUX_SMALL)
430 .add_step(steps::checkout_repo())
431 .add_step(steps::cache_rust_dependencies_namespace())
432 .add_step(steps::script("./script/check-licenses"))
433 .add_step(steps::script("./script/generate-licenses")),
434 )
435}
436
437fn check_docs() -> NamedJob {
438 fn lychee_link_check(dir: &str) -> Step<Use> {
439 named::uses(
440 "lycheeverse",
441 "lychee-action",
442 "82202e5e9c2f4ef1a55a3d02563e1cb6041e5332",
443 ) // v2.4.1
444 .add_with(("args", format!("--no-progress --exclude '^http' '{dir}'")))
445 .add_with(("fail", true))
446 .add_with(("jobSummary", false))
447 }
448
449 fn install_mdbook() -> Step<Use> {
450 named::uses(
451 "peaceiris",
452 "actions-mdbook",
453 "ee69d230fe19748b7abf22df32acaa93833fad08", // v2
454 )
455 .with(("mdbook-version", "0.4.37"))
456 }
457
458 fn build_docs() -> Step<Run> {
459 named::bash(indoc::indoc! {r#"
460 mkdir -p target/deploy
461 mdbook build ./docs --dest-dir=../target/deploy/docs/
462 "#})
463 }
464
465 named::job(
466 release_job(&[])
467 .runs_on(runners::LINUX_LARGE)
468 .add_step(steps::checkout_repo())
469 .add_step(steps::setup_cargo_config(Platform::Linux))
470 // todo(ci): un-inline build_docs/action.yml here
471 .add_step(steps::cache_rust_dependencies_namespace())
472 .add_step(
473 lychee_link_check("./docs/src/**/*"), // check markdown links
474 )
475 .map(steps::install_linux_dependencies)
476 .add_step(steps::script("./script/generate-action-metadata"))
477 .add_step(install_mdbook())
478 .add_step(build_docs())
479 .add_step(
480 lychee_link_check("target/deploy/docs"), // check links in generated html
481 ),
482 )
483}
484
485pub(crate) fn check_scripts() -> NamedJob {
486 fn download_actionlint() -> Step<Run> {
487 named::bash(
488 "bash <(curl https://raw.githubusercontent.com/rhysd/actionlint/main/scripts/download-actionlint.bash)",
489 )
490 }
491
492 fn run_actionlint() -> Step<Run> {
493 named::bash(indoc::indoc! {r#"
494 ${{ steps.get_actionlint.outputs.executable }} -color
495 "#})
496 }
497
498 fn run_shellcheck() -> Step<Run> {
499 named::bash("./script/shellcheck-scripts error")
500 }
501
502 fn check_xtask_workflows() -> Step<Run> {
503 named::bash(indoc::indoc! {r#"
504 cargo xtask workflows
505 if ! git diff --exit-code .github; then
506 echo "Error: .github directory has uncommitted changes after running 'cargo xtask workflows'"
507 echo "Please run 'cargo xtask workflows' locally and commit the changes"
508 exit 1
509 fi
510 "#})
511 }
512
513 named::job(
514 release_job(&[])
515 .runs_on(runners::LINUX_SMALL)
516 .add_step(steps::checkout_repo())
517 .add_step(run_shellcheck())
518 .add_step(download_actionlint().id("get_actionlint"))
519 .add_step(run_actionlint())
520 .add_step(check_xtask_workflows()),
521 )
522}