1use gh_workflow::{
2 Concurrency, Event, Expression, Job, PullRequest, Push, Run, Step, Use, Workflow,
3};
4use indexmap::IndexMap;
5
6use crate::tasks::workflows::{
7 nix_build::build_nix, runners::Arch, steps::BASH_SHELL, vars::PathCondition,
8};
9
10use super::{
11 runners::{self, Platform},
12 steps::{self, FluentBuilder, NamedJob, named, release_job},
13};
14
15pub(crate) fn run_tests() -> Workflow {
16 // Specify anything which should potentially skip full test suite in this regex:
17 // - docs/
18 // - script/update_top_ranking_issues/
19 // - .github/ISSUE_TEMPLATE/
20 // - .github/workflows/ (except .github/workflows/ci.yml)
21 let should_run_tests = PathCondition::inverted(
22 "run_tests",
23 r"^(docs/|script/update_top_ranking_issues/|\.github/(ISSUE_TEMPLATE|workflows/(?!run_tests)))",
24 );
25 let should_check_docs = PathCondition::new("run_docs", r"^docs/");
26 let should_check_scripts = PathCondition::new(
27 "run_action_checks",
28 r"^\.github/(workflows/|actions/|actionlint.yml)|tooling/xtask|script/",
29 );
30 let should_check_licences =
31 PathCondition::new("run_licenses", r"^(Cargo.lock|script/.*licenses)");
32 let should_build_nix = PathCondition::new(
33 "run_nix",
34 r"^(nix/|flake\.|Cargo\.|rust-toolchain.toml|\.cargo/config.toml)",
35 );
36
37 let orchestrate = orchestrate(&[
38 &should_check_scripts,
39 &should_check_docs,
40 &should_check_licences,
41 &should_build_nix,
42 &should_run_tests,
43 ]);
44
45 let jobs = [
46 orchestrate,
47 check_style(),
48 should_run_tests.guard(run_platform_tests(Platform::Windows)),
49 should_run_tests.guard(run_platform_tests(Platform::Linux)),
50 should_run_tests.guard(run_platform_tests(Platform::Mac)),
51 should_run_tests.guard(doctests()),
52 should_run_tests.guard(check_workspace_binaries()),
53 should_run_tests.guard(check_postgres_and_protobuf_migrations()), // could be more specific here?
54 should_run_tests.guard(check_dependencies()), // could be more specific here?
55 should_check_docs.guard(check_docs()),
56 should_check_licences.guard(check_licenses()),
57 should_check_scripts.guard(check_scripts()),
58 should_build_nix.guard(build_nix(
59 Platform::Linux,
60 Arch::X86_64,
61 "debug",
62 // *don't* cache the built output
63 Some("-zed-editor-[0-9.]*-nightly"),
64 &[],
65 )),
66 should_build_nix.guard(build_nix(
67 Platform::Mac,
68 Arch::AARCH64,
69 "debug",
70 // *don't* cache the built output
71 Some("-zed-editor-[0-9.]*-nightly"),
72 &[],
73 )),
74 ];
75 let tests_pass = tests_pass(&jobs);
76
77 named::workflow()
78 .add_event(Event::default()
79 .push(
80 Push::default()
81 .add_branch("main")
82 .add_branch("v[0-9]+.[0-9]+.x")
83 )
84 .pull_request(PullRequest::default().add_branch("**"))
85 )
86 .concurrency(Concurrency::default()
87 .group("${{ github.workflow }}-${{ github.ref_name }}-${{ github.ref_name == 'main' && github.sha || 'anysha' }}")
88 .cancel_in_progress(true)
89 )
90 .add_env(( "CARGO_TERM_COLOR", "always" ))
91 .add_env(( "RUST_BACKTRACE", 1 ))
92 .map(|mut workflow| {
93 for job in jobs {
94 workflow = workflow.add_job(job.name, job.job)
95 }
96 workflow
97 })
98 .add_job(tests_pass.name, tests_pass.job)
99}
100
101// Generates a bash script that checks changed files against regex patterns
102// and sets GitHub output variables accordingly
103fn orchestrate(rules: &[&PathCondition]) -> NamedJob {
104 let name = "orchestrate".to_owned();
105 let step_name = "filter".to_owned();
106 let mut script = String::new();
107
108 script.push_str(indoc::indoc! {r#"
109 if [ -z "$GITHUB_BASE_REF" ]; then
110 echo "Not in a PR context (i.e., push to main/stable/preview)"
111 COMPARE_REV="$(git rev-parse HEAD~1)"
112 else
113 echo "In a PR context comparing to pull_request.base.ref"
114 git fetch origin "$GITHUB_BASE_REF" --depth=350
115 COMPARE_REV="$(git merge-base "origin/${GITHUB_BASE_REF}" HEAD)"
116 fi
117 CHANGED_FILES="$(git diff --name-only "$COMPARE_REV" ${{ github.sha }})"
118
119 check_pattern() {
120 local output_name="$1"
121 local pattern="$2"
122 local grep_arg="$3"
123
124 echo "$CHANGED_FILES" | grep "$grep_arg" "$pattern" && \
125 echo "${output_name}=true" >> "$GITHUB_OUTPUT" || \
126 echo "${output_name}=false" >> "$GITHUB_OUTPUT"
127 }
128
129 "#});
130
131 let mut outputs = IndexMap::new();
132
133 for rule in rules {
134 assert!(
135 rule.set_by_step
136 .borrow_mut()
137 .replace(name.clone())
138 .is_none()
139 );
140 assert!(
141 outputs
142 .insert(
143 rule.name.to_owned(),
144 format!("${{{{ steps.{}.outputs.{} }}}}", step_name, rule.name)
145 )
146 .is_none()
147 );
148
149 let grep_arg = if rule.invert { "-qvP" } else { "-qP" };
150 script.push_str(&format!(
151 "check_pattern \"{}\" '{}' {}\n",
152 rule.name, rule.pattern, grep_arg
153 ));
154 }
155
156 let job = Job::default()
157 .runs_on(runners::LINUX_SMALL)
158 .cond(Expression::new(
159 "github.repository_owner == 'zed-industries'",
160 ))
161 .outputs(outputs)
162 .add_step(steps::checkout_repo().add_with((
163 "fetch-depth",
164 "${{ github.ref == 'refs/heads/main' && 2 || 350 }}",
165 )))
166 .add_step(
167 Step::new(step_name.clone())
168 .run(script)
169 .id(step_name)
170 .shell(BASH_SHELL),
171 );
172
173 NamedJob { name, job }
174}
175
176pub(crate) fn tests_pass(jobs: &[NamedJob]) -> NamedJob {
177 let mut script = String::from(indoc::indoc! {r#"
178 set +x
179 EXIT_CODE=0
180
181 check_result() {
182 echo "* $1: $2"
183 if [[ "$2" != "skipped" && "$2" != "success" ]]; then EXIT_CODE=1; fi
184 }
185
186 "#});
187
188 script.push_str(
189 &jobs
190 .iter()
191 .map(|job| {
192 format!(
193 "check_result \"{}\" \"${{{{ needs.{}.result }}}}\"",
194 job.name, job.name
195 )
196 })
197 .collect::<Vec<_>>()
198 .join("\n"),
199 );
200
201 script.push_str("\n\nexit $EXIT_CODE\n");
202
203 let job = Job::default()
204 .runs_on(runners::LINUX_SMALL)
205 .needs(
206 jobs.iter()
207 .map(|j| j.name.to_string())
208 .collect::<Vec<String>>(),
209 )
210 .cond(Expression::new(
211 "github.repository_owner == 'zed-industries' && always()",
212 ))
213 .add_step(named::bash(&script));
214
215 named::job(job)
216}
217
218fn check_style() -> NamedJob {
219 fn check_for_typos() -> Step<Use> {
220 named::uses(
221 "crate-ci",
222 "typos",
223 "80c8a4945eec0f6d464eaf9e65ed98ef085283d1",
224 ) // v1.38.1
225 .with(("config", "./typos.toml"))
226 }
227 named::job(
228 release_job(&[])
229 .runs_on(runners::LINUX_MEDIUM)
230 .add_step(steps::checkout_repo())
231 .add_step(steps::setup_pnpm())
232 .add_step(steps::script("./script/prettier"))
233 .add_step(steps::script("./script/check-todos"))
234 .add_step(steps::script("./script/check-keymaps"))
235 .add_step(check_for_typos())
236 .add_step(steps::cargo_fmt()),
237 )
238}
239
240fn check_dependencies() -> NamedJob {
241 fn install_cargo_machete() -> Step<Use> {
242 named::uses(
243 "clechasseur",
244 "rs-cargo",
245 "8435b10f6e71c2e3d4d3b7573003a8ce4bfc6386", // v2
246 )
247 .add_with(("command", "install"))
248 .add_with(("args", "cargo-machete@0.7.0"))
249 }
250
251 fn run_cargo_machete() -> Step<Use> {
252 named::uses(
253 "clechasseur",
254 "rs-cargo",
255 "8435b10f6e71c2e3d4d3b7573003a8ce4bfc6386", // v2
256 )
257 .add_with(("command", "machete"))
258 }
259
260 fn check_cargo_lock() -> Step<Run> {
261 named::bash("cargo update --locked --workspace")
262 }
263
264 fn check_vulnerable_dependencies() -> Step<Use> {
265 named::uses(
266 "actions",
267 "dependency-review-action",
268 "67d4f4bd7a9b17a0db54d2a7519187c65e339de8", // v4
269 )
270 .if_condition(Expression::new("github.event_name == 'pull_request'"))
271 .with(("license-check", false))
272 }
273
274 named::job(
275 release_job(&[])
276 .runs_on(runners::LINUX_SMALL)
277 .add_step(steps::checkout_repo())
278 .add_step(install_cargo_machete())
279 .add_step(run_cargo_machete())
280 .add_step(check_cargo_lock())
281 .add_step(check_vulnerable_dependencies()),
282 )
283}
284
285fn check_workspace_binaries() -> NamedJob {
286 named::job(
287 release_job(&[])
288 .runs_on(runners::LINUX_LARGE)
289 .add_step(steps::checkout_repo())
290 .add_step(steps::setup_cargo_config(Platform::Linux))
291 .map(steps::install_linux_dependencies)
292 .add_step(steps::script("cargo build -p collab"))
293 .add_step(steps::script("cargo build --workspace --bins --examples"))
294 .add_step(steps::cleanup_cargo_config(Platform::Linux)),
295 )
296}
297
298pub(crate) fn run_platform_tests(platform: Platform) -> NamedJob {
299 let runner = match platform {
300 Platform::Windows => runners::WINDOWS_DEFAULT,
301 Platform::Linux => runners::LINUX_DEFAULT,
302 Platform::Mac => runners::MAC_DEFAULT,
303 };
304 NamedJob {
305 name: format!("run_tests_{platform}"),
306 job: release_job(&[])
307 .runs_on(runner)
308 .add_step(steps::checkout_repo())
309 .add_step(steps::setup_cargo_config(platform))
310 .when(
311 platform == Platform::Linux,
312 steps::install_linux_dependencies,
313 )
314 .add_step(steps::setup_node())
315 .add_step(steps::clippy(platform))
316 .add_step(steps::cargo_install_nextest(platform))
317 .add_step(steps::clear_target_dir_if_large(platform))
318 .add_step(steps::cargo_nextest(platform))
319 .add_step(steps::cleanup_cargo_config(platform)),
320 }
321}
322
323pub(crate) fn check_postgres_and_protobuf_migrations() -> NamedJob {
324 fn remove_untracked_files() -> Step<Run> {
325 named::bash("git clean -df")
326 }
327
328 fn ensure_fresh_merge() -> Step<Run> {
329 named::bash(indoc::indoc! {r#"
330 if [ -z "$GITHUB_BASE_REF" ];
331 then
332 echo "BUF_BASE_BRANCH=$(git merge-base origin/main HEAD)" >> "$GITHUB_ENV"
333 else
334 git checkout -B temp
335 git merge -q "origin/$GITHUB_BASE_REF" -m "merge main into temp"
336 echo "BUF_BASE_BRANCH=$GITHUB_BASE_REF" >> "$GITHUB_ENV"
337 fi
338 "#})
339 }
340
341 fn bufbuild_setup_action() -> Step<Use> {
342 named::uses("bufbuild", "buf-setup-action", "v1").add_with(("version", "v1.29.0"))
343 }
344
345 fn bufbuild_breaking_action() -> Step<Use> {
346 named::uses("bufbuild", "buf-breaking-action", "v1").add_with(("input", "crates/proto/proto/"))
347 .add_with(("against", "https://github.com/${GITHUB_REPOSITORY}.git#branch=${BUF_BASE_BRANCH},subdir=crates/proto/proto/"))
348 }
349
350 named::job(
351 release_job(&[])
352 .runs_on(runners::MAC_DEFAULT)
353 .add_step(steps::checkout_repo().with(("fetch-depth", 0))) // fetch full history
354 .add_step(remove_untracked_files())
355 .add_step(ensure_fresh_merge())
356 .add_step(bufbuild_setup_action())
357 .add_step(bufbuild_breaking_action()),
358 )
359}
360
361fn doctests() -> NamedJob {
362 fn run_doctests() -> Step<Run> {
363 named::bash(indoc::indoc! {r#"
364 cargo test --workspace --doc --no-fail-fast
365 "#})
366 .id("run_doctests")
367 }
368
369 named::job(
370 release_job(&[])
371 .runs_on(runners::LINUX_DEFAULT)
372 .add_step(steps::checkout_repo())
373 .add_step(steps::cache_rust_dependencies())
374 .map(steps::install_linux_dependencies)
375 .add_step(steps::setup_cargo_config(Platform::Linux))
376 .add_step(run_doctests())
377 .add_step(steps::cleanup_cargo_config(Platform::Linux)),
378 )
379}
380
381fn check_licenses() -> NamedJob {
382 named::job(
383 Job::default()
384 .runs_on(runners::LINUX_SMALL)
385 .add_step(steps::checkout_repo())
386 .add_step(steps::script("./script/check-licenses"))
387 .add_step(steps::script("./script/generate-licenses")),
388 )
389}
390
391fn check_docs() -> NamedJob {
392 fn lychee_link_check(dir: &str) -> Step<Use> {
393 named::uses(
394 "lycheeverse",
395 "lychee-action",
396 "82202e5e9c2f4ef1a55a3d02563e1cb6041e5332",
397 ) // v2.4.1
398 .add_with(("args", format!("--no-progress --exclude '^http' '{dir}'")))
399 .add_with(("fail", true))
400 .add_with(("jobSummary", false))
401 }
402
403 fn install_mdbook() -> Step<Use> {
404 named::uses(
405 "peaceiris",
406 "actions-mdbook",
407 "ee69d230fe19748b7abf22df32acaa93833fad08", // v2
408 )
409 .with(("mdbook-version", "0.4.37"))
410 }
411
412 fn build_docs() -> Step<Run> {
413 named::bash(indoc::indoc! {r#"
414 mkdir -p target/deploy
415 mdbook build ./docs --dest-dir=../target/deploy/docs/
416 "#})
417 }
418
419 named::job(
420 release_job(&[])
421 .runs_on(runners::LINUX_LARGE)
422 .add_step(steps::checkout_repo())
423 .add_step(steps::setup_cargo_config(Platform::Linux))
424 // todo(ci): un-inline build_docs/action.yml here
425 .add_step(steps::cache_rust_dependencies())
426 .add_step(
427 lychee_link_check("./docs/src/**/*"), // check markdown links
428 )
429 .map(steps::install_linux_dependencies)
430 .add_step(install_mdbook())
431 .add_step(build_docs())
432 .add_step(
433 lychee_link_check("target/deploy/docs"), // check links in generated html
434 ),
435 )
436}
437
438pub(crate) fn check_scripts() -> NamedJob {
439 fn download_actionlint() -> Step<Run> {
440 named::bash(
441 "bash <(curl https://raw.githubusercontent.com/rhysd/actionlint/main/scripts/download-actionlint.bash)",
442 )
443 }
444
445 fn run_actionlint() -> Step<Run> {
446 named::bash(indoc::indoc! {r#"
447 ${{ steps.get_actionlint.outputs.executable }} -color
448 "#})
449 }
450
451 fn run_shellcheck() -> Step<Run> {
452 named::bash("./script/shellcheck-scripts error")
453 }
454
455 fn check_xtask_workflows() -> Step<Run> {
456 named::bash(indoc::indoc! {r#"
457 cargo xtask workflows
458 if ! git diff --exit-code .github; then
459 echo "Error: .github directory has uncommitted changes after running 'cargo xtask workflows'"
460 echo "Please run 'cargo xtask workflows' locally and commit the changes"
461 exit 1
462 fi
463 "#})
464 }
465
466 named::job(
467 release_job(&[])
468 .runs_on(runners::LINUX_SMALL)
469 .add_step(steps::checkout_repo())
470 .add_step(run_shellcheck())
471 .add_step(download_actionlint().id("get_actionlint"))
472 .add_step(run_actionlint())
473 .add_step(check_xtask_workflows()),
474 )
475}