1use gh_workflow::{
2 Concurrency, Container, Event, Expression, Job, Port, PullRequest, Push, Run, Step, Use,
3 Workflow,
4};
5use indexmap::IndexMap;
6
7use crate::tasks::workflows::{
8 nix_build::build_nix,
9 runners::Arch,
10 steps::{BASH_SHELL, CommonJobConditions, repository_owner_guard_expression},
11 vars::{self, PathCondition},
12};
13
14use super::{
15 runners::{self, Platform},
16 steps::{self, FluentBuilder, NamedJob, named, release_job},
17};
18
19pub(crate) fn run_tests() -> Workflow {
20 // Specify anything which should potentially skip full test suite in this regex:
21 // - docs/
22 // - script/update_top_ranking_issues/
23 // - .github/ISSUE_TEMPLATE/
24 // - .github/workflows/ (except .github/workflows/ci.yml)
25 let should_run_tests = PathCondition::inverted(
26 "run_tests",
27 r"^(docs/|script/update_top_ranking_issues/|\.github/(ISSUE_TEMPLATE|workflows/(?!run_tests)))",
28 );
29 let should_check_docs = PathCondition::new("run_docs", r"^(docs/|crates/.*\.rs)");
30 let should_check_scripts = PathCondition::new(
31 "run_action_checks",
32 r"^\.github/(workflows/|actions/|actionlint.yml)|tooling/xtask|script/",
33 );
34 let should_check_licences =
35 PathCondition::new("run_licenses", r"^(Cargo.lock|script/.*licenses)");
36 let should_build_nix = PathCondition::new(
37 "run_nix",
38 r"^(nix/|flake\.|Cargo\.|rust-toolchain.toml|\.cargo/config.toml)",
39 );
40
41 let orchestrate = orchestrate(&[
42 &should_check_scripts,
43 &should_check_docs,
44 &should_check_licences,
45 &should_build_nix,
46 &should_run_tests,
47 ]);
48
49 let mut jobs = vec![
50 orchestrate,
51 check_style(),
52 should_run_tests.guard(clippy(Platform::Windows)),
53 should_run_tests.guard(clippy(Platform::Linux)),
54 should_run_tests.guard(clippy(Platform::Mac)),
55 should_run_tests.guard(run_platform_tests(Platform::Windows)),
56 should_run_tests.guard(run_platform_tests(Platform::Linux)),
57 should_run_tests.guard(run_platform_tests(Platform::Mac)),
58 should_run_tests.guard(doctests()),
59 should_run_tests.guard(check_workspace_binaries()),
60 should_run_tests.guard(check_dependencies()), // could be more specific here?
61 should_check_docs.guard(check_docs()),
62 should_check_licences.guard(check_licenses()),
63 should_check_scripts.guard(check_scripts()),
64 should_build_nix.guard(build_nix(
65 Platform::Linux,
66 Arch::X86_64,
67 "debug",
68 // *don't* cache the built output
69 Some("-zed-editor-[0-9.]*-nightly"),
70 &[],
71 )),
72 should_build_nix.guard(build_nix(
73 Platform::Mac,
74 Arch::AARCH64,
75 "debug",
76 // *don't* cache the built output
77 Some("-zed-editor-[0-9.]*-nightly"),
78 &[],
79 )),
80 ];
81 let tests_pass = tests_pass(&jobs);
82
83 jobs.push(should_run_tests.guard(check_postgres_and_protobuf_migrations())); // could be more specific here?
84
85 named::workflow()
86 .add_event(
87 Event::default()
88 .push(
89 Push::default()
90 .add_branch("main")
91 .add_branch("v[0-9]+.[0-9]+.x"),
92 )
93 .pull_request(PullRequest::default().add_branch("**")),
94 )
95 .concurrency(
96 Concurrency::default()
97 .group(concat!(
98 "${{ github.workflow }}-${{ github.ref_name }}-",
99 "${{ github.ref_name == 'main' && github.sha || 'anysha' }}"
100 ))
101 .cancel_in_progress(true),
102 )
103 .add_env(("CARGO_TERM_COLOR", "always"))
104 .add_env(("RUST_BACKTRACE", 1))
105 .add_env(("CARGO_INCREMENTAL", 0))
106 .map(|mut workflow| {
107 for job in jobs {
108 workflow = workflow.add_job(job.name, job.job)
109 }
110 workflow
111 })
112 .add_job(tests_pass.name, tests_pass.job)
113}
114
115// Generates a bash script that checks changed files against regex patterns
116// and sets GitHub output variables accordingly
117pub fn orchestrate(rules: &[&PathCondition]) -> NamedJob {
118 let name = "orchestrate".to_owned();
119 let step_name = "filter".to_owned();
120 let mut script = String::new();
121
122 script.push_str(indoc::indoc! {r#"
123 if [ -z "$GITHUB_BASE_REF" ]; then
124 echo "Not in a PR context (i.e., push to main/stable/preview)"
125 COMPARE_REV="$(git rev-parse HEAD~1)"
126 else
127 echo "In a PR context comparing to pull_request.base.ref"
128 git fetch origin "$GITHUB_BASE_REF" --depth=350
129 COMPARE_REV="$(git merge-base "origin/${GITHUB_BASE_REF}" HEAD)"
130 fi
131 CHANGED_FILES="$(git diff --name-only "$COMPARE_REV" ${{ github.sha }})"
132
133 check_pattern() {
134 local output_name="$1"
135 local pattern="$2"
136 local grep_arg="$3"
137
138 echo "$CHANGED_FILES" | grep "$grep_arg" "$pattern" && \
139 echo "${output_name}=true" >> "$GITHUB_OUTPUT" || \
140 echo "${output_name}=false" >> "$GITHUB_OUTPUT"
141 }
142
143 "#});
144
145 let mut outputs = IndexMap::new();
146
147 for rule in rules {
148 assert!(
149 rule.set_by_step
150 .borrow_mut()
151 .replace(name.clone())
152 .is_none()
153 );
154 assert!(
155 outputs
156 .insert(
157 rule.name.to_owned(),
158 format!("${{{{ steps.{}.outputs.{} }}}}", step_name, rule.name)
159 )
160 .is_none()
161 );
162
163 let grep_arg = if rule.invert { "-qvP" } else { "-qP" };
164 script.push_str(&format!(
165 "check_pattern \"{}\" '{}' {}\n",
166 rule.name, rule.pattern, grep_arg
167 ));
168 }
169
170 let job = Job::default()
171 .runs_on(runners::LINUX_SMALL)
172 .with_repository_owner_guard()
173 .outputs(outputs)
174 .add_step(steps::checkout_repo().add_with((
175 "fetch-depth",
176 "${{ github.ref == 'refs/heads/main' && 2 || 350 }}",
177 )))
178 .add_step(
179 Step::new(step_name.clone())
180 .run(script)
181 .id(step_name)
182 .shell(BASH_SHELL),
183 );
184
185 NamedJob { name, job }
186}
187
188pub fn tests_pass(jobs: &[NamedJob]) -> NamedJob {
189 let mut script = String::from(indoc::indoc! {r#"
190 set +x
191 EXIT_CODE=0
192
193 check_result() {
194 echo "* $1: $2"
195 if [[ "$2" != "skipped" && "$2" != "success" ]]; then EXIT_CODE=1; fi
196 }
197
198 "#});
199
200 script.push_str(
201 &jobs
202 .iter()
203 .map(|job| {
204 format!(
205 "check_result \"{}\" \"${{{{ needs.{}.result }}}}\"",
206 job.name, job.name
207 )
208 })
209 .collect::<Vec<_>>()
210 .join("\n"),
211 );
212
213 script.push_str("\n\nexit $EXIT_CODE\n");
214
215 let job = Job::default()
216 .runs_on(runners::LINUX_SMALL)
217 .needs(
218 jobs.iter()
219 .map(|j| j.name.to_string())
220 .collect::<Vec<String>>(),
221 )
222 .cond(repository_owner_guard_expression(true))
223 .add_step(named::bash(&script));
224
225 named::job(job)
226}
227
228fn check_style() -> NamedJob {
229 fn check_for_typos() -> Step<Use> {
230 named::uses(
231 "crate-ci",
232 "typos",
233 "2d0ce569feab1f8752f1dde43cc2f2aa53236e06",
234 ) // v1.40.0
235 .with(("config", "./typos.toml"))
236 }
237 named::job(
238 release_job(&[])
239 .runs_on(runners::LINUX_MEDIUM)
240 .add_step(steps::checkout_repo())
241 .add_step(steps::cache_rust_dependencies_namespace())
242 .add_step(steps::setup_pnpm())
243 .add_step(steps::prettier())
244 .add_step(steps::cargo_fmt())
245 .add_step(steps::script("./script/check-todos"))
246 .add_step(steps::script("./script/check-keymaps"))
247 .add_step(check_for_typos()),
248 )
249}
250
251fn check_dependencies() -> NamedJob {
252 fn install_cargo_machete() -> Step<Use> {
253 named::uses(
254 "clechasseur",
255 "rs-cargo",
256 "8435b10f6e71c2e3d4d3b7573003a8ce4bfc6386", // v2
257 )
258 .add_with(("command", "install"))
259 .add_with(("args", "cargo-machete@0.7.0"))
260 }
261
262 fn run_cargo_machete() -> Step<Use> {
263 named::uses(
264 "clechasseur",
265 "rs-cargo",
266 "8435b10f6e71c2e3d4d3b7573003a8ce4bfc6386", // v2
267 )
268 .add_with(("command", "machete"))
269 }
270
271 fn check_cargo_lock() -> Step<Run> {
272 named::bash("cargo update --locked --workspace")
273 }
274
275 fn check_vulnerable_dependencies() -> Step<Use> {
276 named::uses(
277 "actions",
278 "dependency-review-action",
279 "67d4f4bd7a9b17a0db54d2a7519187c65e339de8", // v4
280 )
281 .if_condition(Expression::new("github.event_name == 'pull_request'"))
282 .with(("license-check", false))
283 }
284
285 named::job(
286 release_job(&[])
287 .runs_on(runners::LINUX_SMALL)
288 .add_step(steps::checkout_repo())
289 .add_step(steps::cache_rust_dependencies_namespace())
290 .add_step(install_cargo_machete())
291 .add_step(run_cargo_machete())
292 .add_step(check_cargo_lock())
293 .add_step(check_vulnerable_dependencies()),
294 )
295}
296
297fn check_workspace_binaries() -> NamedJob {
298 named::job(
299 release_job(&[])
300 .runs_on(runners::LINUX_LARGE)
301 .add_step(steps::checkout_repo())
302 .add_step(steps::setup_cargo_config(Platform::Linux))
303 .add_step(steps::cache_rust_dependencies_namespace())
304 .map(steps::install_linux_dependencies)
305 .add_step(steps::script("cargo build -p collab"))
306 .add_step(steps::script("cargo build --workspace --bins --examples"))
307 .add_step(steps::cleanup_cargo_config(Platform::Linux)),
308 )
309}
310
311pub(crate) fn clippy(platform: Platform) -> NamedJob {
312 let runner = match platform {
313 Platform::Windows => runners::WINDOWS_DEFAULT,
314 Platform::Linux => runners::LINUX_DEFAULT,
315 Platform::Mac => runners::MAC_DEFAULT,
316 };
317 NamedJob {
318 name: format!("clippy_{platform}"),
319 job: release_job(&[])
320 .runs_on(runner)
321 .add_step(steps::checkout_repo())
322 .add_step(steps::setup_cargo_config(platform))
323 .when(
324 platform == Platform::Linux || platform == Platform::Mac,
325 |this| this.add_step(steps::cache_rust_dependencies_namespace()),
326 )
327 .when(
328 platform == Platform::Linux,
329 steps::install_linux_dependencies,
330 )
331 .add_step(steps::clippy(platform)),
332 }
333}
334
335pub(crate) fn run_platform_tests(platform: Platform) -> NamedJob {
336 let runner = match platform {
337 Platform::Windows => runners::WINDOWS_DEFAULT,
338 Platform::Linux => runners::LINUX_DEFAULT,
339 Platform::Mac => runners::MAC_DEFAULT,
340 };
341 NamedJob {
342 name: format!("run_tests_{platform}"),
343 job: release_job(&[])
344 .runs_on(runner)
345 .when(platform == Platform::Linux, |job| {
346 job.add_service(
347 "postgres",
348 Container::new("postgres:15")
349 .add_env(("POSTGRES_HOST_AUTH_METHOD", "trust"))
350 .ports(vec![Port::Name("5432:5432".into())])
351 .options(
352 "--health-cmd pg_isready \
353 --health-interval 500ms \
354 --health-timeout 5s \
355 --health-retries 10",
356 ),
357 )
358 })
359 .add_step(steps::checkout_repo())
360 .add_step(steps::setup_cargo_config(platform))
361 .when(
362 platform == Platform::Linux || platform == Platform::Mac,
363 |this| this.add_step(steps::cache_rust_dependencies_namespace()),
364 )
365 .when(
366 platform == Platform::Linux,
367 steps::install_linux_dependencies,
368 )
369 .add_step(steps::setup_node())
370 .when(
371 platform == Platform::Linux || platform == Platform::Mac,
372 |job| job.add_step(steps::cargo_install_nextest()),
373 )
374 .add_step(steps::clear_target_dir_if_large(platform))
375 .add_step(steps::cargo_nextest(platform))
376 .add_step(steps::cleanup_cargo_config(platform)),
377 }
378}
379
380pub(crate) fn check_postgres_and_protobuf_migrations() -> NamedJob {
381 fn remove_untracked_files() -> Step<Run> {
382 named::bash("git clean -df")
383 }
384
385 fn ensure_fresh_merge() -> Step<Run> {
386 named::bash(indoc::indoc! {r#"
387 if [ -z "$GITHUB_BASE_REF" ];
388 then
389 echo "BUF_BASE_BRANCH=$(git merge-base origin/main HEAD)" >> "$GITHUB_ENV"
390 else
391 git checkout -B temp
392 git merge -q "origin/$GITHUB_BASE_REF" -m "merge main into temp"
393 echo "BUF_BASE_BRANCH=$GITHUB_BASE_REF" >> "$GITHUB_ENV"
394 fi
395 "#})
396 }
397
398 fn bufbuild_setup_action() -> Step<Use> {
399 named::uses("bufbuild", "buf-setup-action", "v1")
400 .add_with(("version", "v1.29.0"))
401 .add_with(("github_token", vars::GITHUB_TOKEN))
402 }
403
404 fn bufbuild_breaking_action() -> Step<Use> {
405 named::uses("bufbuild", "buf-breaking-action", "v1").add_with(("input", "crates/proto/proto/"))
406 .add_with(("against", "https://github.com/${GITHUB_REPOSITORY}.git#branch=${BUF_BASE_BRANCH},subdir=crates/proto/proto/"))
407 }
408
409 named::job(
410 release_job(&[])
411 .runs_on(runners::LINUX_DEFAULT)
412 .add_env(("GIT_AUTHOR_NAME", "Protobuf Action"))
413 .add_env(("GIT_AUTHOR_EMAIL", "ci@zed.dev"))
414 .add_env(("GIT_COMMITTER_NAME", "Protobuf Action"))
415 .add_env(("GIT_COMMITTER_EMAIL", "ci@zed.dev"))
416 .add_step(steps::checkout_repo().with(("fetch-depth", 0))) // fetch full history
417 .add_step(remove_untracked_files())
418 .add_step(ensure_fresh_merge())
419 .add_step(bufbuild_setup_action())
420 .add_step(bufbuild_breaking_action()),
421 )
422}
423
424fn doctests() -> NamedJob {
425 fn run_doctests() -> Step<Run> {
426 named::bash(indoc::indoc! {r#"
427 cargo test --workspace --doc --no-fail-fast
428 "#})
429 .id("run_doctests")
430 }
431
432 named::job(
433 release_job(&[])
434 .runs_on(runners::LINUX_DEFAULT)
435 .add_step(steps::checkout_repo())
436 .add_step(steps::cache_rust_dependencies_namespace())
437 .map(steps::install_linux_dependencies)
438 .add_step(steps::setup_cargo_config(Platform::Linux))
439 .add_step(run_doctests())
440 .add_step(steps::cleanup_cargo_config(Platform::Linux)),
441 )
442}
443
444fn check_licenses() -> NamedJob {
445 named::job(
446 Job::default()
447 .runs_on(runners::LINUX_SMALL)
448 .add_step(steps::checkout_repo())
449 .add_step(steps::cache_rust_dependencies_namespace())
450 .add_step(steps::script("./script/check-licenses"))
451 .add_step(steps::script("./script/generate-licenses")),
452 )
453}
454
455fn check_docs() -> NamedJob {
456 fn lychee_link_check(dir: &str) -> Step<Use> {
457 named::uses(
458 "lycheeverse",
459 "lychee-action",
460 "82202e5e9c2f4ef1a55a3d02563e1cb6041e5332",
461 ) // v2.4.1
462 .add_with(("args", format!("--no-progress --exclude '^http' '{dir}'")))
463 .add_with(("fail", true))
464 .add_with(("jobSummary", false))
465 }
466
467 fn install_mdbook() -> Step<Use> {
468 named::uses(
469 "peaceiris",
470 "actions-mdbook",
471 "ee69d230fe19748b7abf22df32acaa93833fad08", // v2
472 )
473 .with(("mdbook-version", "0.4.37"))
474 }
475
476 fn build_docs() -> Step<Run> {
477 named::bash(indoc::indoc! {r#"
478 mkdir -p target/deploy
479 mdbook build ./docs --dest-dir=../target/deploy/docs/
480 "#})
481 }
482
483 named::job(
484 release_job(&[])
485 .runs_on(runners::LINUX_LARGE)
486 .add_step(steps::checkout_repo())
487 .add_step(steps::setup_cargo_config(Platform::Linux))
488 // todo(ci): un-inline build_docs/action.yml here
489 .add_step(steps::cache_rust_dependencies_namespace())
490 .add_step(
491 lychee_link_check("./docs/src/**/*"), // check markdown links
492 )
493 .map(steps::install_linux_dependencies)
494 .add_step(steps::script("./script/generate-action-metadata"))
495 .add_step(install_mdbook())
496 .add_step(build_docs())
497 .add_step(
498 lychee_link_check("target/deploy/docs"), // check links in generated html
499 ),
500 )
501}
502
503pub(crate) fn check_scripts() -> NamedJob {
504 fn download_actionlint() -> Step<Run> {
505 named::bash(
506 "bash <(curl https://raw.githubusercontent.com/rhysd/actionlint/main/scripts/download-actionlint.bash)",
507 )
508 }
509
510 fn run_actionlint() -> Step<Run> {
511 named::bash(indoc::indoc! {r#"
512 ${{ steps.get_actionlint.outputs.executable }} -color
513 "#})
514 }
515
516 fn run_shellcheck() -> Step<Run> {
517 named::bash("./script/shellcheck-scripts error")
518 }
519
520 fn check_xtask_workflows() -> Step<Run> {
521 named::bash(indoc::indoc! {r#"
522 cargo xtask workflows
523 if ! git diff --exit-code .github; then
524 echo "Error: .github directory has uncommitted changes after running 'cargo xtask workflows'"
525 echo "Please run 'cargo xtask workflows' locally and commit the changes"
526 exit 1
527 fi
528 "#})
529 }
530
531 named::job(
532 release_job(&[])
533 .runs_on(runners::LINUX_SMALL)
534 .add_step(steps::checkout_repo())
535 .add_step(run_shellcheck())
536 .add_step(download_actionlint().id("get_actionlint"))
537 .add_step(run_actionlint())
538 .add_step(check_xtask_workflows()),
539 )
540}