1use gh_workflow::{
2 Event, Expression, Job, Push, Run, Step, Use, Workflow, WorkflowCall, WorkflowCallSecret,
3 WorkflowDispatch,
4};
5
6use crate::tasks::workflows::{
7 runners,
8 steps::{self, FluentBuilder as _, NamedJob, named, release_job},
9 vars::{self, StepOutput, WorkflowInput},
10};
11
12const BUILD_OUTPUT_DIR: &str = "target/deploy";
13
14pub(crate) enum DocsChannel {
15 Nightly,
16 Preview,
17 Stable,
18}
19
20impl DocsChannel {
21 pub(crate) fn site_url(&self) -> &'static str {
22 match self {
23 Self::Nightly => "/docs/nightly/",
24 Self::Preview => "/docs/preview/",
25 Self::Stable => "/docs/",
26 }
27 }
28
29 pub(crate) fn project_name(&self) -> &'static str {
30 match self {
31 Self::Nightly => "docs-nightly",
32 Self::Preview => "docs-preview",
33 Self::Stable => "docs",
34 }
35 }
36
37 pub(crate) fn channel_name(&self) -> &'static str {
38 match self {
39 Self::Nightly => "nightly",
40 Self::Preview => "preview",
41 Self::Stable => "stable",
42 }
43 }
44}
45
46pub(crate) fn lychee_link_check(dir: &str) -> Step<Use> {
47 named::uses(
48 "lycheeverse",
49 "lychee-action",
50 "82202e5e9c2f4ef1a55a3d02563e1cb6041e5332",
51 ) // v2.4.1
52 .add_with(("args", format!("--no-progress --exclude '^http' '{dir}'")))
53 .add_with(("fail", true))
54 .add_with(("jobSummary", false))
55}
56
57pub(crate) fn install_mdbook() -> Step<Use> {
58 named::uses(
59 "peaceiris",
60 "actions-mdbook",
61 "ee69d230fe19748b7abf22df32acaa93833fad08", // v2
62 )
63 .with(("mdbook-version", "0.4.37"))
64}
65
66pub(crate) fn build_docs_book() -> Step<Run> {
67 named::bash(indoc::formatdoc! {r#"
68 mkdir -p {BUILD_OUTPUT_DIR}
69 mdbook build ./docs --dest-dir=../{BUILD_OUTPUT_DIR}/docs/
70 "#})
71}
72
73fn docs_build_steps(
74 job: Job,
75 checkout_ref: Option<String>,
76 docs_channel: impl Into<String>,
77 site_url: impl Into<String>,
78) -> Job {
79 job.add_env(("DOCS_AMPLITUDE_API_KEY", vars::DOCS_AMPLITUDE_API_KEY))
80 .add_step(
81 steps::checkout_repo().when_some(checkout_ref, |step, checkout_ref| {
82 step.with_ref(checkout_ref)
83 }),
84 )
85 .add_env(("MDBOOK_BOOK__SITE_URL", site_url.into()))
86 .add_env(("DOCS_CHANNEL", docs_channel.into()))
87 .runs_on(runners::LINUX_XL)
88 .add_step(steps::setup_cargo_config(runners::Platform::Linux))
89 .add_step(steps::cache_rust_dependencies_namespace())
90 .map(steps::install_linux_dependencies)
91 .add_step(steps::script("./script/generate-action-metadata"))
92 .add_step(lychee_link_check("./docs/src/**/*"))
93 .add_step(install_mdbook())
94 .add_step(build_docs_book())
95 .add_step(lychee_link_check(&format!("{BUILD_OUTPUT_DIR}/docs")))
96}
97
98fn docs_deploy_steps(job: Job, project_name: &StepOutput) -> Job {
99 fn deploy_to_cf_pages(project_name: &StepOutput) -> Step<Use> {
100 named::uses(
101 "cloudflare",
102 "wrangler-action",
103 "da0e0dfe58b7a431659754fdf3f186c529afbe65",
104 ) // v3
105 .add_with(("apiToken", vars::CLOUDFLARE_API_TOKEN))
106 .add_with(("accountId", vars::CLOUDFLARE_ACCOUNT_ID))
107 .add_with((
108 "command",
109 format!(
110 "pages deploy {BUILD_OUTPUT_DIR} --project-name=${{{{ {} }}}}",
111 project_name.expr()
112 ),
113 ))
114 }
115
116 fn upload_install_script() -> Step<Use> {
117 named::uses(
118 "cloudflare",
119 "wrangler-action",
120 "da0e0dfe58b7a431659754fdf3f186c529afbe65",
121 ) // v3
122 .add_with(("apiToken", vars::CLOUDFLARE_API_TOKEN))
123 .add_with(("accountId", vars::CLOUDFLARE_ACCOUNT_ID))
124 .add_with((
125 "command",
126 "r2 object put -f script/install.sh zed-open-source-website-assets/install.sh",
127 ))
128 }
129
130 fn deploy_docs_worker() -> Step<Use> {
131 named::uses(
132 "cloudflare",
133 "wrangler-action",
134 "da0e0dfe58b7a431659754fdf3f186c529afbe65",
135 ) // v3
136 .add_with(("apiToken", vars::CLOUDFLARE_API_TOKEN))
137 .add_with(("accountId", vars::CLOUDFLARE_ACCOUNT_ID))
138 .add_with(("command", "deploy .cloudflare/docs-proxy/src/worker.js"))
139 }
140
141 fn upload_wrangler_logs() -> Step<Use> {
142 named::uses(
143 "actions",
144 "upload-artifact",
145 "ea165f8d65b6e75b540449e92b4886f43607fa02",
146 ) // v4
147 .if_condition(Expression::new("always()"))
148 .add_with(("name", "wrangler_logs"))
149 .add_with(("path", "/home/runner/.config/.wrangler/logs/"))
150 }
151
152 job.add_step(deploy_to_cf_pages(project_name))
153 .add_step(upload_install_script())
154 .add_step(deploy_docs_worker())
155 .add_step(upload_wrangler_logs())
156}
157
158pub(crate) fn check_docs() -> NamedJob {
159 NamedJob {
160 name: "check_docs".to_owned(),
161 job: docs_build_steps(
162 release_job(&[]),
163 None,
164 DocsChannel::Stable.channel_name(),
165 DocsChannel::Stable.site_url(),
166 ),
167 }
168}
169
170pub(crate) fn deploy_docs_job(
171 channel_input: &WorkflowInput,
172 commit_sha_input: &WorkflowInput,
173) -> NamedJob {
174 fn resolve_channel_step(
175 channel_input: &WorkflowInput,
176 ) -> (Step<Run>, StepOutput, StepOutput, StepOutput) {
177 let step = named::bash(format!(
178 indoc::indoc! {r#"
179 CHANNEL="${{{{ {channel_input_expr} }}}}"
180
181 if [ -z "$CHANNEL" ]; then
182 if [ "${{{{ github.ref }}}}" = "refs/heads/main" ]; then
183 CHANNEL="nightly"
184 else
185 echo "::error::channel input is required when ref is not main."
186 exit 1
187 fi
188 fi
189
190 case "$CHANNEL" in
191 "nightly")
192 SITE_URL="{nightly_site_url}"
193 PROJECT_NAME="{nightly_project_name}"
194 ;;
195 "preview")
196 SITE_URL="{preview_site_url}"
197 PROJECT_NAME="{preview_project_name}"
198 ;;
199 "stable")
200 SITE_URL="{stable_site_url}"
201 PROJECT_NAME="{stable_project_name}"
202 ;;
203 *)
204 echo "::error::Invalid docs channel '$CHANNEL'. Expected one of: nightly, preview, stable."
205 exit 1
206 ;;
207 esac
208
209 echo "channel=$CHANNEL" >> "$GITHUB_OUTPUT"
210 echo "site_url=$SITE_URL" >> "$GITHUB_OUTPUT"
211 echo "project_name=$PROJECT_NAME" >> "$GITHUB_OUTPUT"
212 "#},
213 channel_input_expr = channel_input.expr(),
214 nightly_site_url = DocsChannel::Nightly.site_url(),
215 preview_site_url = DocsChannel::Preview.site_url(),
216 stable_site_url = DocsChannel::Stable.site_url(),
217 nightly_project_name = DocsChannel::Nightly.project_name(),
218 preview_project_name = DocsChannel::Preview.project_name(),
219 stable_project_name = DocsChannel::Stable.project_name(),
220 ))
221 .id("resolve-channel");
222
223 let channel = StepOutput::new(&step, "channel");
224 let site_url = StepOutput::new(&step, "site_url");
225 let project_name = StepOutput::new(&step, "project_name");
226 (step, channel, site_url, project_name)
227 }
228 let (resolve_step, channel, site_url, project_name) = resolve_channel_step(channel_input);
229
230 NamedJob {
231 name: "deploy_docs".to_owned(),
232 job: docs_deploy_steps(
233 docs_build_steps(
234 release_job(&[])
235 .name("Build and Deploy Docs")
236 .cond(Expression::new(
237 "github.repository_owner == 'zed-industries'",
238 ))
239 .add_step(resolve_step),
240 Some(format!(
241 "${{{{ {} != '' && {} || github.sha }}}}",
242 commit_sha_input.expr(),
243 commit_sha_input.expr()
244 )),
245 channel.to_string(),
246 site_url.to_string(),
247 ),
248 &project_name,
249 ),
250 }
251}
252
253pub(crate) fn deploy_docs() -> Workflow {
254 let channel = WorkflowInput::string("channel", Some(String::new()))
255 .description("Docs channel to deploy: nightly, preview, or stable");
256 let commit_sha = WorkflowInput::string("commit_sha", Some(String::new())).description(
257 "Exact commit SHA to checkout and deploy. Defaults to event SHA when omitted.",
258 );
259 let deploy_docs = deploy_docs_job(&channel, &commit_sha);
260
261 named::workflow()
262 .add_event(
263 Event::default()
264 .push(Push::default().add_branch("main"))
265 .workflow_dispatch(
266 WorkflowDispatch::default()
267 .add_input(channel.name, channel.input())
268 .add_input(commit_sha.name, commit_sha.input()),
269 ),
270 )
271 .add_event(
272 Event::default().workflow_call(
273 WorkflowCall::default()
274 .add_input(channel.name, channel.call_input())
275 .add_input(commit_sha.name, commit_sha.call_input())
276 .secrets([
277 (
278 "DOCS_AMPLITUDE_API_KEY".to_owned(),
279 WorkflowCallSecret {
280 description: "DOCS_AMPLITUDE_API_KEY".to_owned(),
281 required: true,
282 },
283 ),
284 (
285 "CLOUDFLARE_API_TOKEN".to_owned(),
286 WorkflowCallSecret {
287 description: "CLOUDFLARE_API_TOKEN".to_owned(),
288 required: true,
289 },
290 ),
291 (
292 "CLOUDFLARE_ACCOUNT_ID".to_owned(),
293 WorkflowCallSecret {
294 description: "CLOUDFLARE_ACCOUNT_ID".to_owned(),
295 required: true,
296 },
297 ),
298 ]),
299 ),
300 )
301 .add_job(deploy_docs.name, deploy_docs.job)
302}