1use gh_workflow::{
2 Concurrency, Event, Expression, Job, PullRequest, Push, Run, Step, Use, Workflow,
3};
4use indexmap::IndexMap;
5
6use crate::tasks::workflows::{
7 nix_build::build_nix, runners::Arch, steps::BASH_SHELL, vars::PathCondition,
8};
9
10use super::{
11 runners::{self, Platform},
12 steps::{self, FluentBuilder, NamedJob, named, release_job},
13};
14
15pub(crate) fn run_tests() -> Workflow {
16 // Specify anything which should potentially skip full test suite in this regex:
17 // - docs/
18 // - script/update_top_ranking_issues/
19 // - .github/ISSUE_TEMPLATE/
20 // - .github/workflows/ (except .github/workflows/ci.yml)
21 let should_run_tests = PathCondition::inverted(
22 "run_tests",
23 r"^(docs/|script/update_top_ranking_issues/|\.github/(ISSUE_TEMPLATE|workflows/(?!run_tests)))",
24 );
25 let should_check_docs = PathCondition::new("run_docs", r"^docs/");
26 let should_check_scripts = PathCondition::new(
27 "run_action_checks",
28 r"^\.github/(workflows/|actions/|actionlint.yml)|tooling/xtask",
29 );
30 let should_check_licences =
31 PathCondition::new("run_licenses", r"^(Cargo.lock|script/.*licenses)");
32 let should_build_nix = PathCondition::new(
33 "run_nix",
34 r"^(nix/|flake\.|Cargo\.|rust-toolchain.toml|\.cargo/config.toml)",
35 );
36
37 let orchestrate = orchestrate(&[
38 &should_check_scripts,
39 &should_check_docs,
40 &should_check_licences,
41 &should_build_nix,
42 &should_run_tests,
43 ]);
44
45 let jobs = [
46 orchestrate,
47 check_style(),
48 should_run_tests.guard(run_platform_tests(Platform::Windows)),
49 should_run_tests.guard(run_platform_tests(Platform::Linux)),
50 should_run_tests.guard(run_platform_tests(Platform::Mac)),
51 should_run_tests.guard(doctests()),
52 should_run_tests.guard(check_workspace_binaries()),
53 should_run_tests.guard(check_postgres_and_protobuf_migrations()), // could be more specific here?
54 should_run_tests.guard(check_dependencies()), // could be more specific here?
55 should_check_docs.guard(check_docs()),
56 should_check_licences.guard(check_licenses()),
57 should_check_scripts.guard(check_scripts()),
58 should_build_nix.guard(build_nix(
59 Platform::Linux,
60 Arch::X86_64,
61 "debug",
62 // *don't* cache the built output
63 Some("-zed-editor-[0-9.]*-nightly"),
64 &[],
65 )),
66 should_build_nix.guard(build_nix(
67 Platform::Mac,
68 Arch::ARM64,
69 "debug",
70 // *don't* cache the built output
71 Some("-zed-editor-[0-9.]*-nightly"),
72 &[],
73 )),
74 ];
75 let tests_pass = tests_pass(&jobs);
76
77 let mut workflow = named::workflow()
78 .add_event(Event::default()
79 .push(
80 Push::default()
81 .add_branch("main")
82 .add_branch("v[0-9]+.[0-9]+.x")
83 )
84 .pull_request(PullRequest::default().add_branch("**"))
85 )
86 .concurrency(Concurrency::default()
87 .group("${{ github.workflow }}-${{ github.ref_name }}-${{ github.ref_name == 'main' && github.sha || 'anysha' }}")
88 .cancel_in_progress(true)
89 )
90 .add_env(( "CARGO_TERM_COLOR", "always" ))
91 .add_env(( "RUST_BACKTRACE", 1 ))
92 .add_env(( "CARGO_INCREMENTAL", 0 ));
93 for job in jobs {
94 workflow = workflow.add_job(job.name, job.job)
95 }
96 workflow.add_job(tests_pass.name, tests_pass.job)
97}
98
99// Generates a bash script that checks changed files against regex patterns
100// and sets GitHub output variables accordingly
101fn orchestrate(rules: &[&PathCondition]) -> NamedJob {
102 let name = "orchestrate".to_owned();
103 let step_name = "filter".to_owned();
104 let mut script = String::new();
105
106 script.push_str(indoc::indoc! {r#"
107 if [ -z "$GITHUB_BASE_REF" ]; then
108 echo "Not in a PR context (i.e., push to main/stable/preview)"
109 COMPARE_REV="$(git rev-parse HEAD~1)"
110 else
111 echo "In a PR context comparing to pull_request.base.ref"
112 git fetch origin "$GITHUB_BASE_REF" --depth=350
113 COMPARE_REV="$(git merge-base "origin/${GITHUB_BASE_REF}" HEAD)"
114 fi
115 CHANGED_FILES="$(git diff --name-only "$COMPARE_REV" ${{ github.sha }})"
116
117 check_pattern() {
118 local output_name="$1"
119 local pattern="$2"
120 local grep_arg="$3"
121
122 echo "$CHANGED_FILES" | grep "$grep_arg" "$pattern" && \
123 echo "${output_name}=true" >> "$GITHUB_OUTPUT" || \
124 echo "${output_name}=false" >> "$GITHUB_OUTPUT"
125 }
126
127 "#});
128
129 let mut outputs = IndexMap::new();
130
131 for rule in rules {
132 assert!(
133 rule.set_by_step
134 .borrow_mut()
135 .replace(name.clone())
136 .is_none()
137 );
138 assert!(
139 outputs
140 .insert(
141 rule.name.to_owned(),
142 format!("${{{{ steps.{}.outputs.{} }}}}", step_name, rule.name)
143 )
144 .is_none()
145 );
146
147 let grep_arg = if rule.invert { "-qvP" } else { "-qP" };
148 script.push_str(&format!(
149 "check_pattern \"{}\" '{}' {}\n",
150 rule.name, rule.pattern, grep_arg
151 ));
152 }
153
154 let job = Job::default()
155 .runs_on(runners::LINUX_SMALL)
156 .cond(Expression::new(
157 "github.repository_owner == 'zed-industries'",
158 ))
159 .outputs(outputs)
160 .add_step(steps::checkout_repo().add_with((
161 "fetch-depth",
162 "${{ github.ref == 'refs/heads/main' && 2 || 350 }}",
163 )))
164 .add_step(
165 Step::new(step_name.clone())
166 .run(script)
167 .id(step_name)
168 .shell(BASH_SHELL),
169 );
170
171 NamedJob { name, job }
172}
173
174pub(crate) fn tests_pass(jobs: &[NamedJob]) -> NamedJob {
175 let mut script = String::from(indoc::indoc! {r#"
176 set +x
177 EXIT_CODE=0
178
179 check_result() {
180 echo "* $1: $2"
181 if [[ "$2" != "skipped" && "$2" != "success" ]]; then EXIT_CODE=1; fi
182 }
183
184 "#});
185
186 script.push_str(
187 &jobs
188 .iter()
189 .map(|job| {
190 format!(
191 "check_result \"{}\" \"${{{{ needs.{}.result }}}}\"",
192 job.name, job.name
193 )
194 })
195 .collect::<Vec<_>>()
196 .join("\n"),
197 );
198
199 script.push_str("\n\nexit $EXIT_CODE\n");
200
201 let job = Job::default()
202 .runs_on(runners::LINUX_SMALL)
203 .needs(
204 jobs.iter()
205 .map(|j| j.name.to_string())
206 .collect::<Vec<String>>(),
207 )
208 .cond(Expression::new(
209 "github.repository_owner == 'zed-industries' && always()",
210 ))
211 .add_step(named::bash(&script));
212
213 named::job(job)
214}
215
216fn check_style() -> NamedJob {
217 fn check_for_typos() -> Step<Use> {
218 named::uses(
219 "crate-ci",
220 "typos",
221 "80c8a4945eec0f6d464eaf9e65ed98ef085283d1",
222 ) // v1.38.1
223 .with(("config", "./typos.toml"))
224 }
225 named::job(
226 release_job(&[])
227 .runs_on(runners::LINUX_MEDIUM)
228 .add_step(steps::checkout_repo())
229 .add_step(steps::setup_pnpm())
230 .add_step(steps::script("./script/prettier"))
231 .add_step(steps::script("./script/check-todos"))
232 .add_step(steps::script("./script/check-keymaps"))
233 .add_step(check_for_typos())
234 .add_step(steps::cargo_fmt()),
235 )
236}
237
238fn check_dependencies() -> NamedJob {
239 fn install_cargo_machete() -> Step<Use> {
240 named::uses(
241 "clechasseur",
242 "rs-cargo",
243 "8435b10f6e71c2e3d4d3b7573003a8ce4bfc6386", // v2
244 )
245 .add_with(("command", "install"))
246 .add_with(("args", "cargo-machete@0.7.0"))
247 }
248
249 fn run_cargo_machete() -> Step<Use> {
250 named::uses(
251 "clechasseur",
252 "rs-cargo",
253 "8435b10f6e71c2e3d4d3b7573003a8ce4bfc6386", // v2
254 )
255 .add_with(("command", "machete"))
256 }
257
258 fn check_cargo_lock() -> Step<Run> {
259 named::bash("cargo update --locked --workspace")
260 }
261
262 fn check_vulnerable_dependencies() -> Step<Use> {
263 named::uses(
264 "actions",
265 "dependency-review-action",
266 "67d4f4bd7a9b17a0db54d2a7519187c65e339de8", // v4
267 )
268 .if_condition(Expression::new("github.event_name == 'pull_request'"))
269 .with(("license-check", false))
270 }
271
272 named::job(
273 release_job(&[])
274 .runs_on(runners::LINUX_SMALL)
275 .add_step(steps::checkout_repo())
276 .add_step(install_cargo_machete())
277 .add_step(run_cargo_machete())
278 .add_step(check_cargo_lock())
279 .add_step(check_vulnerable_dependencies()),
280 )
281}
282
283fn check_workspace_binaries() -> NamedJob {
284 named::job(
285 release_job(&[])
286 .runs_on(runners::LINUX_LARGE)
287 .add_step(steps::checkout_repo())
288 .add_step(steps::setup_cargo_config(Platform::Linux))
289 .map(steps::install_linux_dependencies)
290 .add_step(steps::script("cargo build -p collab"))
291 .add_step(steps::script("cargo build --workspace --bins --examples"))
292 .add_step(steps::cleanup_cargo_config(Platform::Linux)),
293 )
294}
295
296pub(crate) fn run_platform_tests(platform: Platform) -> NamedJob {
297 let runner = match platform {
298 Platform::Windows => runners::WINDOWS_DEFAULT,
299 Platform::Linux => runners::LINUX_DEFAULT,
300 Platform::Mac => runners::MAC_DEFAULT,
301 };
302 NamedJob {
303 name: format!("run_tests_{platform}"),
304 job: release_job(&[])
305 .cond(Expression::new(
306 "github.repository_owner == 'zed-industries'",
307 ))
308 .runs_on(runner)
309 .add_step(steps::checkout_repo())
310 .add_step(steps::setup_cargo_config(platform))
311 .when(
312 platform == Platform::Linux,
313 steps::install_linux_dependencies,
314 )
315 .add_step(steps::setup_node())
316 .add_step(steps::clippy(platform))
317 .add_step(steps::cargo_install_nextest(platform))
318 .add_step(steps::clear_target_dir_if_large(platform))
319 .add_step(steps::cargo_nextest(platform))
320 .add_step(steps::cleanup_cargo_config(platform)),
321 }
322}
323
324pub(crate) fn check_postgres_and_protobuf_migrations() -> NamedJob {
325 fn remove_untracked_files() -> Step<Run> {
326 named::bash("git clean -df")
327 }
328
329 fn ensure_fresh_merge() -> Step<Run> {
330 named::bash(indoc::indoc! {r#"
331 if [ -z "$GITHUB_BASE_REF" ];
332 then
333 echo "BUF_BASE_BRANCH=$(git merge-base origin/main HEAD)" >> "$GITHUB_ENV"
334 else
335 git checkout -B temp
336 git merge -q "origin/$GITHUB_BASE_REF" -m "merge main into temp"
337 echo "BUF_BASE_BRANCH=$GITHUB_BASE_REF" >> "$GITHUB_ENV"
338 fi
339 "#})
340 }
341
342 fn bufbuild_setup_action() -> Step<Use> {
343 named::uses("bufbuild", "buf-setup-action", "v1").add_with(("version", "v1.29.0"))
344 }
345
346 fn bufbuild_breaking_action() -> Step<Use> {
347 named::uses("bufbuild", "buf-breaking-action", "v1").add_with(("input", "crates/proto/proto/"))
348 .add_with(("against", "https://github.com/${GITHUB_REPOSITORY}.git#branch=${BUF_BASE_BRANCH},subdir=crates/proto/proto/"))
349 }
350
351 named::job(
352 release_job(&[])
353 .runs_on(runners::MAC_DEFAULT)
354 .add_step(steps::checkout_repo().with(("fetch-depth", 0))) // fetch full history
355 .add_step(remove_untracked_files())
356 .add_step(ensure_fresh_merge())
357 .add_step(bufbuild_setup_action())
358 .add_step(bufbuild_breaking_action()),
359 )
360}
361
362fn doctests() -> NamedJob {
363 fn run_doctests() -> Step<Run> {
364 named::bash(indoc::indoc! {r#"
365 cargo test --workspace --doc --no-fail-fast
366 "#})
367 .id("run_doctests")
368 }
369
370 named::job(
371 release_job(&[])
372 .runs_on(runners::LINUX_DEFAULT)
373 .add_step(steps::checkout_repo())
374 .add_step(steps::cache_rust_dependencies())
375 .map(steps::install_linux_dependencies)
376 .add_step(steps::setup_cargo_config(Platform::Linux))
377 .add_step(run_doctests())
378 .add_step(steps::cleanup_cargo_config(Platform::Linux)),
379 )
380}
381
382fn check_licenses() -> NamedJob {
383 named::job(
384 Job::default()
385 .runs_on(runners::LINUX_SMALL)
386 .add_step(steps::checkout_repo())
387 .add_step(steps::script("./script/check-licenses"))
388 .add_step(steps::script("./script/generate-licenses")),
389 )
390}
391
392fn check_docs() -> NamedJob {
393 fn lychee_link_check(dir: &str) -> Step<Use> {
394 named::uses(
395 "lycheeverse",
396 "lychee-action",
397 "82202e5e9c2f4ef1a55a3d02563e1cb6041e5332",
398 ) // v2.4.1
399 .add_with(("args", format!("--no-progress --exclude '^http' '{dir}'")))
400 .add_with(("fail", true))
401 .add_with(("jobSummary", false))
402 }
403
404 fn install_mdbook() -> Step<Use> {
405 named::uses(
406 "peaceiris",
407 "actions-mdbook",
408 "ee69d230fe19748b7abf22df32acaa93833fad08", // v2
409 )
410 .with(("mdbook-version", "0.4.37"))
411 }
412
413 fn build_docs() -> Step<Run> {
414 named::bash(indoc::indoc! {r#"
415 mkdir -p target/deploy
416 mdbook build ./docs --dest-dir=../target/deploy/docs/
417 "#})
418 }
419
420 named::job(
421 release_job(&[])
422 .runs_on(runners::LINUX_LARGE)
423 .add_step(steps::checkout_repo())
424 .add_step(steps::setup_cargo_config(Platform::Linux))
425 // todo(ci): un-inline build_docs/action.yml here
426 .add_step(steps::cache_rust_dependencies())
427 .add_step(
428 lychee_link_check("./docs/src/**/*"), // check markdown links
429 )
430 .map(steps::install_linux_dependencies)
431 .add_step(install_mdbook())
432 .add_step(build_docs())
433 .add_step(
434 lychee_link_check("target/deploy/docs"), // check links in generated html
435 ),
436 )
437}
438
439fn check_scripts() -> NamedJob {
440 fn download_actionlint() -> Step<Run> {
441 named::bash(
442 "bash <(curl https://raw.githubusercontent.com/rhysd/actionlint/main/scripts/download-actionlint.bash)",
443 )
444 }
445
446 fn run_actionlint() -> Step<Run> {
447 named::bash(indoc::indoc! {r#"
448 ${{ steps.get_actionlint.outputs.executable }} -color
449 "#})
450 }
451
452 fn run_shellcheck() -> Step<Run> {
453 named::bash("./script/shellcheck-scripts error")
454 }
455
456 fn check_xtask_workflows() -> Step<Run> {
457 named::bash(indoc::indoc! {r#"
458 cargo xtask workflows
459 if ! git diff --exit-code .github; then
460 echo "Error: .github directory has uncommitted changes after running 'cargo xtask workflows'"
461 echo "Please run 'cargo xtask workflows' locally and commit the changes"
462 exit 1
463 fi
464 "#})
465 }
466
467 named::job(
468 release_job(&[])
469 .runs_on(runners::LINUX_SMALL)
470 .add_step(steps::checkout_repo())
471 .add_step(run_shellcheck())
472 .add_step(download_actionlint().id("get_actionlint"))
473 .add_step(run_actionlint())
474 .add_step(check_xtask_workflows()),
475 )
476}