1use gh_workflow::{
2 Event, Expression, Job, Push, Run, Step, Use, Workflow, WorkflowCall, WorkflowCallSecret,
3 WorkflowDispatch,
4};
5
6use crate::tasks::workflows::{
7 runners,
8 steps::{self, FluentBuilder as _, NamedJob, named, release_job},
9 vars::{self, StepOutput, WorkflowInput},
10};
11
12const BUILD_OUTPUT_DIR: &str = "target/deploy";
13
14pub(crate) enum DocsChannel {
15 Nightly,
16 Preview,
17 Stable,
18}
19
20impl DocsChannel {
21 pub(crate) fn site_url(&self) -> &'static str {
22 match self {
23 Self::Nightly => "/docs/nightly/",
24 Self::Preview => "/docs/preview/",
25 Self::Stable => "/docs/",
26 }
27 }
28
29 pub(crate) fn project_name(&self) -> &'static str {
30 match self {
31 Self::Nightly => "docs-nightly",
32 Self::Preview => "docs-preview",
33 Self::Stable => "docs",
34 }
35 }
36
37 pub(crate) fn channel_name(&self) -> &'static str {
38 match self {
39 Self::Nightly => "nightly",
40 Self::Preview => "preview",
41 Self::Stable => "stable",
42 }
43 }
44}
45
46pub(crate) fn lychee_link_check(dir: &str) -> Step<Use> {
47 named::uses(
48 "lycheeverse",
49 "lychee-action",
50 "82202e5e9c2f4ef1a55a3d02563e1cb6041e5332",
51 ) // v2.4.1
52 .add_with(("args", format!("--no-progress --exclude '^http' '{dir}'")))
53 .add_with(("fail", true))
54 .add_with(("jobSummary", false))
55}
56
57pub(crate) fn install_mdbook() -> Step<Use> {
58 named::uses(
59 "peaceiris",
60 "actions-mdbook",
61 "ee69d230fe19748b7abf22df32acaa93833fad08", // v2
62 )
63 .with(("mdbook-version", "0.4.37"))
64}
65
66pub(crate) fn build_docs_book() -> Step<Run> {
67 named::bash(indoc::formatdoc! {r#"
68 mkdir -p {BUILD_OUTPUT_DIR}
69 mdbook build ./docs --dest-dir=../{BUILD_OUTPUT_DIR}/docs/
70 "#})
71}
72
73fn docs_build_steps(
74 job: Job,
75 checkout_ref: Option<String>,
76 docs_channel: impl Into<String>,
77 site_url: impl Into<String>,
78) -> Job {
79 job.add_env(("DOCS_AMPLITUDE_API_KEY", vars::DOCS_AMPLITUDE_API_KEY))
80 .add_step(
81 steps::checkout_repo().when_some(checkout_ref, |step, checkout_ref| {
82 step.with_ref(checkout_ref)
83 }),
84 )
85 .add_env(("MDBOOK_BOOK__SITE_URL", site_url.into()))
86 .add_env(("DOCS_CHANNEL", docs_channel.into()))
87 .runs_on(runners::LINUX_XL)
88 .add_step(steps::setup_cargo_config(runners::Platform::Linux))
89 .add_step(steps::cache_rust_dependencies_namespace())
90 .map(steps::install_linux_dependencies)
91 .add_step(steps::script("./script/generate-action-metadata"))
92 .add_step(lychee_link_check("./docs/src/**/*"))
93 .add_step(install_mdbook())
94 .add_step(build_docs_book())
95 .add_step(lychee_link_check(&format!("{BUILD_OUTPUT_DIR}/docs")))
96}
97
98fn docs_deploy_steps(job: Job, project_name: &StepOutput) -> Job {
99 fn deploy_to_cf_pages(project_name: &StepOutput) -> Step<Use> {
100 named::uses(
101 "cloudflare",
102 "wrangler-action",
103 "da0e0dfe58b7a431659754fdf3f186c529afbe65",
104 ) // v3
105 .add_with(("apiToken", vars::CLOUDFLARE_API_TOKEN))
106 .add_with(("accountId", vars::CLOUDFLARE_ACCOUNT_ID))
107 .add_with((
108 "command",
109 format!(
110 "pages deploy {BUILD_OUTPUT_DIR} --project-name=${{{{ {} }}}}",
111 project_name.expr()
112 ),
113 ))
114 }
115
116 fn upload_install_script() -> Step<Use> {
117 named::uses(
118 "cloudflare",
119 "wrangler-action",
120 "da0e0dfe58b7a431659754fdf3f186c529afbe65",
121 ) // v3
122 .add_with(("apiToken", vars::CLOUDFLARE_API_TOKEN))
123 .add_with(("accountId", vars::CLOUDFLARE_ACCOUNT_ID))
124 .add_with((
125 "command",
126 "r2 object put -f script/install.sh zed-open-source-website-assets/install.sh",
127 ))
128 }
129
130 fn deploy_docs_worker() -> Step<Use> {
131 named::uses(
132 "cloudflare",
133 "wrangler-action",
134 "da0e0dfe58b7a431659754fdf3f186c529afbe65",
135 ) // v3
136 .add_with(("apiToken", vars::CLOUDFLARE_API_TOKEN))
137 .add_with(("accountId", vars::CLOUDFLARE_ACCOUNT_ID))
138 .add_with(("command", "deploy .cloudflare/docs-proxy/src/worker.js"))
139 }
140
141 fn upload_wrangler_logs() -> Step<Use> {
142 named::uses(
143 "actions",
144 "upload-artifact",
145 "ea165f8d65b6e75b540449e92b4886f43607fa02",
146 ) // v4
147 .if_condition(Expression::new("always()"))
148 .add_with(("name", "wrangler_logs"))
149 .add_with(("path", "/home/runner/.config/.wrangler/logs/"))
150 }
151
152 job.add_step(deploy_to_cf_pages(project_name))
153 .add_step(upload_install_script())
154 .add_step(deploy_docs_worker())
155 .add_step(upload_wrangler_logs())
156}
157
158pub(crate) fn check_docs() -> NamedJob {
159 NamedJob {
160 name: "check_docs".to_owned(),
161 job: docs_build_steps(
162 release_job(&[]),
163 None,
164 DocsChannel::Stable.channel_name(),
165 DocsChannel::Stable.site_url(),
166 ),
167 }
168}
169
170pub(crate) fn deploy_docs_job(
171 channel_input: &WorkflowInput,
172 commit_sha_input: &WorkflowInput,
173) -> NamedJob {
174 fn resolve_channel_step(
175 channel_input: &WorkflowInput,
176 ) -> (Step<Run>, StepOutput, StepOutput, StepOutput) {
177 let step = named::bash(format!(
178 indoc::indoc! {r#"
179 if [ -z "$CHANNEL" ]; then
180 if [ "$GITHUB_REF" = "refs/heads/main" ]; then
181 CHANNEL="nightly"
182 else
183 echo "::error::channel input is required when ref is not main."
184 exit 1
185 fi
186 fi
187
188 case "$CHANNEL" in
189 "nightly")
190 SITE_URL="{nightly_site_url}"
191 PROJECT_NAME="{nightly_project_name}"
192 ;;
193 "preview")
194 SITE_URL="{preview_site_url}"
195 PROJECT_NAME="{preview_project_name}"
196 ;;
197 "stable")
198 SITE_URL="{stable_site_url}"
199 PROJECT_NAME="{stable_project_name}"
200 ;;
201 *)
202 echo "::error::Invalid docs channel '$CHANNEL'. Expected one of: nightly, preview, stable."
203 exit 1
204 ;;
205 esac
206
207 echo "channel=$CHANNEL" >> "$GITHUB_OUTPUT"
208 echo "site_url=$SITE_URL" >> "$GITHUB_OUTPUT"
209 echo "project_name=$PROJECT_NAME" >> "$GITHUB_OUTPUT"
210 "#},
211 nightly_site_url = DocsChannel::Nightly.site_url(),
212 preview_site_url = DocsChannel::Preview.site_url(),
213 stable_site_url = DocsChannel::Stable.site_url(),
214 nightly_project_name = DocsChannel::Nightly.project_name(),
215 preview_project_name = DocsChannel::Preview.project_name(),
216 stable_project_name = DocsChannel::Stable.project_name(),
217 ))
218 .id("resolve-channel")
219 .add_env(("CHANNEL", channel_input.expr()))
220 ;
221
222 let channel = StepOutput::new(&step, "channel");
223 let site_url = StepOutput::new(&step, "site_url");
224 let project_name = StepOutput::new(&step, "project_name");
225 (step, channel, site_url, project_name)
226 }
227 let (resolve_step, channel, site_url, project_name) = resolve_channel_step(channel_input);
228
229 NamedJob {
230 name: "deploy_docs".to_owned(),
231 job: docs_deploy_steps(
232 docs_build_steps(
233 release_job(&[])
234 .name("Build and Deploy Docs")
235 .cond(Expression::new(
236 "github.repository_owner == 'zed-industries'",
237 ))
238 .add_step(resolve_step),
239 Some(format!(
240 "${{{{ {} != '' && {} || github.sha }}}}",
241 commit_sha_input.expr(),
242 commit_sha_input.expr()
243 )),
244 channel.to_string(),
245 site_url.to_string(),
246 ),
247 &project_name,
248 ),
249 }
250}
251
252pub(crate) fn deploy_docs() -> Workflow {
253 let channel = WorkflowInput::string("channel", Some(String::new()))
254 .description("Docs channel to deploy: nightly, preview, or stable");
255 let commit_sha = WorkflowInput::string("commit_sha", Some(String::new())).description(
256 "Exact commit SHA to checkout and deploy. Defaults to event SHA when omitted.",
257 );
258 let deploy_docs = deploy_docs_job(&channel, &commit_sha);
259
260 named::workflow()
261 .add_event(
262 Event::default()
263 .push(Push::default().add_branch("main"))
264 .workflow_dispatch(
265 WorkflowDispatch::default()
266 .add_input(channel.name, channel.input())
267 .add_input(commit_sha.name, commit_sha.input()),
268 ),
269 )
270 .add_event(
271 Event::default().workflow_call(
272 WorkflowCall::default()
273 .add_input(channel.name, channel.call_input())
274 .add_input(commit_sha.name, commit_sha.call_input())
275 .secrets([
276 (
277 "DOCS_AMPLITUDE_API_KEY".to_owned(),
278 WorkflowCallSecret {
279 description: "DOCS_AMPLITUDE_API_KEY".to_owned(),
280 required: true,
281 },
282 ),
283 (
284 "CLOUDFLARE_API_TOKEN".to_owned(),
285 WorkflowCallSecret {
286 description: "CLOUDFLARE_API_TOKEN".to_owned(),
287 required: true,
288 },
289 ),
290 (
291 "CLOUDFLARE_ACCOUNT_ID".to_owned(),
292 WorkflowCallSecret {
293 description: "CLOUDFLARE_ACCOUNT_ID".to_owned(),
294 required: true,
295 },
296 ),
297 ]),
298 ),
299 )
300 .add_job(deploy_docs.name, deploy_docs.job)
301}