1use crate::{
2 ExtensionLibraryKind, ExtensionManifest, GrammarManifestEntry, build_debug_adapter_schema_path,
3 parse_wasm_extension_version,
4};
5use anyhow::{Context as _, Result, bail};
6use futures::AsyncReadExt;
7use heck::ToSnakeCase;
8use http_client::{self, AsyncBody, HttpClient};
9use serde::Deserialize;
10use std::{
11 env, fs, mem,
12 path::{Path, PathBuf},
13 process::Stdio,
14 str::FromStr,
15 sync::Arc,
16};
17use wasm_encoder::{ComponentSectionId, Encode as _, RawSection, Section as _};
18use wasmparser::Parser;
19
20/// Currently, we compile with Rust's `wasm32-wasip2` target, which works with WASI `preview2` and the component model.
21const RUST_TARGET: &str = "wasm32-wasip2";
22
23/// Compiling Tree-sitter parsers from C to WASM requires Clang 17, and a WASM build of libc
24/// and clang's runtime library. The `wasi-sdk` provides these binaries.
25///
26/// Once Clang 17 and its wasm target are available via system package managers, we won't need
27/// to download this.
28const WASI_SDK_URL: &str = "https://github.com/WebAssembly/wasi-sdk/releases/download/wasi-sdk-25/";
29const WASI_SDK_ASSET_NAME: Option<&str> = if cfg!(all(target_os = "macos", target_arch = "x86_64"))
30{
31 Some("wasi-sdk-25.0-x86_64-macos.tar.gz")
32} else if cfg!(all(target_os = "macos", target_arch = "aarch64")) {
33 Some("wasi-sdk-25.0-arm64-macos.tar.gz")
34} else if cfg!(all(target_os = "linux", target_arch = "x86_64")) {
35 Some("wasi-sdk-25.0-x86_64-linux.tar.gz")
36} else if cfg!(all(target_os = "linux", target_arch = "aarch64")) {
37 Some("wasi-sdk-25.0-arm64-linux.tar.gz")
38} else if cfg!(all(target_os = "freebsd", target_arch = "x86_64")) {
39 Some("wasi-sdk-25.0-x86_64-linux.tar.gz")
40} else if cfg!(all(target_os = "freebsd", target_arch = "aarch64")) {
41 Some("wasi-sdk-25.0-arm64-linux.tar.gz")
42} else if cfg!(all(target_os = "windows", target_arch = "x86_64")) {
43 Some("wasi-sdk-25.0-x86_64-windows.tar.gz")
44} else {
45 None
46};
47
48pub struct ExtensionBuilder {
49 cache_dir: PathBuf,
50 pub http: Arc<dyn HttpClient>,
51}
52
53pub struct CompileExtensionOptions {
54 pub release: bool,
55}
56
57#[derive(Deserialize)]
58struct CargoToml {
59 package: CargoTomlPackage,
60}
61
62#[derive(Deserialize)]
63struct CargoTomlPackage {
64 name: String,
65}
66
67impl ExtensionBuilder {
68 pub fn new(http_client: Arc<dyn HttpClient>, cache_dir: PathBuf) -> Self {
69 Self {
70 cache_dir,
71 http: http_client,
72 }
73 }
74
75 pub async fn compile_extension(
76 &self,
77 extension_dir: &Path,
78 extension_manifest: &mut ExtensionManifest,
79 options: CompileExtensionOptions,
80 ) -> Result<()> {
81 populate_defaults(extension_manifest, extension_dir)?;
82
83 if extension_dir.is_relative() {
84 bail!(
85 "extension dir {} is not an absolute path",
86 extension_dir.display()
87 );
88 }
89
90 fs::create_dir_all(&self.cache_dir).context("failed to create cache dir")?;
91
92 if extension_manifest.lib.kind == Some(ExtensionLibraryKind::Rust) {
93 log::info!("compiling Rust extension {}", extension_dir.display());
94 self.compile_rust_extension(extension_dir, extension_manifest, options)
95 .await
96 .context("failed to compile Rust extension")?;
97 log::info!("compiled Rust extension {}", extension_dir.display());
98 }
99
100 for (debug_adapter_name, meta) in &mut extension_manifest.debug_adapters {
101 let debug_adapter_schema_path =
102 extension_dir.join(build_debug_adapter_schema_path(debug_adapter_name, meta));
103
104 let debug_adapter_schema = fs::read_to_string(&debug_adapter_schema_path)
105 .with_context(|| {
106 format!("failed to read debug adapter schema for `{debug_adapter_name}` from `{debug_adapter_schema_path:?}`")
107 })?;
108 _ = serde_json::Value::from_str(&debug_adapter_schema).with_context(|| {
109 format!("Debug adapter schema for `{debug_adapter_name}` (path: `{debug_adapter_schema_path:?}`) is not a valid JSON")
110 })?;
111 }
112 for (grammar_name, grammar_metadata) in &extension_manifest.grammars {
113 let snake_cased_grammar_name = grammar_name.to_snake_case();
114 if grammar_name.as_ref() != snake_cased_grammar_name.as_str() {
115 bail!(
116 "grammar name '{grammar_name}' must be written in snake_case: {snake_cased_grammar_name}"
117 );
118 }
119
120 log::info!(
121 "compiling grammar {grammar_name} for extension {}",
122 extension_dir.display()
123 );
124 self.compile_grammar(extension_dir, grammar_name.as_ref(), grammar_metadata)
125 .await
126 .with_context(|| format!("failed to compile grammar '{grammar_name}'"))?;
127 log::info!(
128 "compiled grammar {grammar_name} for extension {}",
129 extension_dir.display()
130 );
131 }
132
133 log::info!("finished compiling extension {}", extension_dir.display());
134 Ok(())
135 }
136
137 async fn compile_rust_extension(
138 &self,
139 extension_dir: &Path,
140 manifest: &mut ExtensionManifest,
141 options: CompileExtensionOptions,
142 ) -> anyhow::Result<()> {
143 self.install_rust_wasm_target_if_needed().await?;
144
145 let cargo_toml_content = fs::read_to_string(extension_dir.join("Cargo.toml"))?;
146 let cargo_toml: CargoToml = toml::from_str(&cargo_toml_content)?;
147
148 log::info!(
149 "compiling Rust crate for extension {}",
150 extension_dir.display()
151 );
152 let output = util::command::new_smol_command("cargo")
153 .args(["build", "--target", RUST_TARGET])
154 .args(options.release.then_some("--release"))
155 .arg("--target-dir")
156 .arg(extension_dir.join("target"))
157 // WASI builds do not work with sccache and just stuck, so disable it.
158 .env("RUSTC_WRAPPER", "")
159 .current_dir(extension_dir)
160 .output()
161 .await
162 .context("failed to run `cargo`")?;
163 if !output.status.success() {
164 bail!(
165 "failed to build extension {}",
166 String::from_utf8_lossy(&output.stderr)
167 );
168 }
169
170 log::info!(
171 "compiled Rust crate for extension {}",
172 extension_dir.display()
173 );
174
175 let mut wasm_path = PathBuf::from(extension_dir);
176 wasm_path.extend([
177 "target",
178 RUST_TARGET,
179 if options.release { "release" } else { "debug" },
180 &cargo_toml
181 .package
182 .name
183 // The wasm32-wasip2 target normalizes `-` in package names to `_` in the resulting `.wasm` file.
184 .replace('-', "_"),
185 ]);
186 wasm_path.set_extension("wasm");
187
188 log::info!(
189 "encoding wasm component for extension {}",
190 extension_dir.display()
191 );
192
193 let component_bytes = fs::read(&wasm_path)
194 .with_context(|| format!("failed to read output module `{}`", wasm_path.display()))?;
195
196 let component_bytes = self
197 .strip_custom_sections(&component_bytes)
198 .context("failed to strip debug sections from wasm component")?;
199
200 let wasm_extension_api_version =
201 parse_wasm_extension_version(&manifest.id, &component_bytes)
202 .context("compiled wasm did not contain a valid zed extension api version")?;
203 manifest.lib.version = Some(wasm_extension_api_version);
204
205 let extension_file = extension_dir.join("extension.wasm");
206 fs::write(extension_file.clone(), &component_bytes)
207 .context("failed to write extension.wasm")?;
208
209 log::info!(
210 "extension {} written to {}",
211 extension_dir.display(),
212 extension_file.display()
213 );
214
215 Ok(())
216 }
217
218 async fn compile_grammar(
219 &self,
220 extension_dir: &Path,
221 grammar_name: &str,
222 grammar_metadata: &GrammarManifestEntry,
223 ) -> Result<()> {
224 let clang_path = self.install_wasi_sdk_if_needed().await?;
225
226 let mut grammar_repo_dir = extension_dir.to_path_buf();
227 grammar_repo_dir.extend(["grammars", grammar_name]);
228
229 let mut grammar_wasm_path = grammar_repo_dir.clone();
230 grammar_wasm_path.set_extension("wasm");
231
232 log::info!("checking out {grammar_name} parser");
233 self.checkout_repo(
234 &grammar_repo_dir,
235 &grammar_metadata.repository,
236 &grammar_metadata.rev,
237 )
238 .await?;
239
240 let base_grammar_path = grammar_metadata
241 .path
242 .as_ref()
243 .map(|path| grammar_repo_dir.join(path))
244 .unwrap_or(grammar_repo_dir);
245
246 let src_path = base_grammar_path.join("src");
247 let parser_path = src_path.join("parser.c");
248 let scanner_path = src_path.join("scanner.c");
249
250 // Skip recompiling if the WASM object is already newer than the source files
251 if file_newer_than_deps(&grammar_wasm_path, &[&parser_path, &scanner_path]).unwrap_or(false)
252 {
253 log::info!(
254 "skipping compilation of {grammar_name} parser because the existing compiled grammar is up to date"
255 );
256 } else {
257 log::info!("compiling {grammar_name} parser");
258 let clang_output = util::command::new_smol_command(&clang_path)
259 .args(["-fPIC", "-shared", "-Os"])
260 .arg(format!("-Wl,--export=tree_sitter_{grammar_name}"))
261 .arg("-o")
262 .arg(&grammar_wasm_path)
263 .arg("-I")
264 .arg(&src_path)
265 .arg(&parser_path)
266 .args(scanner_path.exists().then_some(scanner_path))
267 .output()
268 .await
269 .context("failed to run clang")?;
270
271 if !clang_output.status.success() {
272 bail!(
273 "failed to compile {} parser with clang: {}",
274 grammar_name,
275 String::from_utf8_lossy(&clang_output.stderr),
276 );
277 }
278 }
279
280 Ok(())
281 }
282
283 async fn checkout_repo(&self, directory: &Path, url: &str, rev: &str) -> Result<()> {
284 let git_dir = directory.join(".git");
285
286 if directory.exists() {
287 let remotes_output = util::command::new_smol_command("git")
288 .arg("--git-dir")
289 .arg(&git_dir)
290 .args(["remote", "-v"])
291 .output()
292 .await?;
293 let has_remote = remotes_output.status.success()
294 && String::from_utf8_lossy(&remotes_output.stdout)
295 .lines()
296 .any(|line| {
297 let mut parts = line.split(|c: char| c.is_whitespace());
298 parts.next() == Some("origin") && parts.any(|part| part == url)
299 });
300 if !has_remote {
301 bail!(
302 "grammar directory '{}' already exists, but is not a git clone of '{}'",
303 directory.display(),
304 url
305 );
306 }
307 } else {
308 fs::create_dir_all(directory).with_context(|| {
309 format!("failed to create grammar directory {}", directory.display(),)
310 })?;
311 let init_output = util::command::new_smol_command("git")
312 .arg("init")
313 .current_dir(directory)
314 .output()
315 .await?;
316 if !init_output.status.success() {
317 bail!(
318 "failed to run `git init` in directory '{}'",
319 directory.display()
320 );
321 }
322
323 let remote_add_output = util::command::new_smol_command("git")
324 .arg("--git-dir")
325 .arg(&git_dir)
326 .args(["remote", "add", "origin", url])
327 .output()
328 .await
329 .context("failed to execute `git remote add`")?;
330 if !remote_add_output.status.success() {
331 bail!(
332 "failed to add remote {url} for git repository {}",
333 git_dir.display()
334 );
335 }
336 }
337
338 let fetch_output = util::command::new_smol_command("git")
339 .arg("--git-dir")
340 .arg(&git_dir)
341 .args(["fetch", "--depth", "1", "origin", rev])
342 .output()
343 .await
344 .context("failed to execute `git fetch`")?;
345
346 let checkout_output = util::command::new_smol_command("git")
347 .arg("--git-dir")
348 .arg(&git_dir)
349 .args(["checkout", rev])
350 .current_dir(directory)
351 .output()
352 .await
353 .context("failed to execute `git checkout`")?;
354 if !checkout_output.status.success() {
355 if !fetch_output.status.success() {
356 bail!(
357 "failed to fetch revision {} in directory '{}'",
358 rev,
359 directory.display()
360 );
361 }
362 bail!(
363 "failed to checkout revision {} in directory '{}': {}",
364 rev,
365 directory.display(),
366 String::from_utf8_lossy(&checkout_output.stderr)
367 );
368 }
369
370 Ok(())
371 }
372
373 async fn install_rust_wasm_target_if_needed(&self) -> Result<()> {
374 let rustc_output = util::command::new_smol_command("rustc")
375 .arg("--print")
376 .arg("sysroot")
377 .output()
378 .await
379 .context("failed to run rustc")?;
380 if !rustc_output.status.success() {
381 bail!(
382 "failed to retrieve rust sysroot: {}",
383 String::from_utf8_lossy(&rustc_output.stderr)
384 );
385 }
386
387 let sysroot = PathBuf::from(String::from_utf8(rustc_output.stdout)?.trim());
388 if sysroot.join("lib/rustlib").join(RUST_TARGET).exists() {
389 return Ok(());
390 }
391
392 let output = util::command::new_smol_command("rustup")
393 .args(["target", "add", RUST_TARGET])
394 .stderr(Stdio::piped())
395 .stdout(Stdio::inherit())
396 .output()
397 .await
398 .context("failed to run `rustup target add`")?;
399 if !output.status.success() {
400 bail!(
401 "failed to install the `{RUST_TARGET}` target: {}",
402 String::from_utf8_lossy(&rustc_output.stderr)
403 );
404 }
405
406 Ok(())
407 }
408
409 async fn install_wasi_sdk_if_needed(&self) -> Result<PathBuf> {
410 let url = if let Some(asset_name) = WASI_SDK_ASSET_NAME {
411 format!("{WASI_SDK_URL}{asset_name}")
412 } else {
413 bail!("wasi-sdk is not available for platform {}", env::consts::OS);
414 };
415
416 let wasi_sdk_dir = self.cache_dir.join("wasi-sdk");
417 let mut clang_path = wasi_sdk_dir.clone();
418 clang_path.extend(["bin", &format!("clang{}", env::consts::EXE_SUFFIX)]);
419
420 log::info!("downloading wasi-sdk to {}", wasi_sdk_dir.display());
421
422 if fs::metadata(&clang_path).is_ok_and(|metadata| metadata.is_file()) {
423 return Ok(clang_path);
424 }
425
426 let tar_out_dir = self.cache_dir.join("wasi-sdk-temp");
427
428 fs::remove_dir_all(&wasi_sdk_dir).ok();
429 fs::remove_dir_all(&tar_out_dir).ok();
430 fs::create_dir_all(&tar_out_dir).context("failed to create extraction directory")?;
431
432 let mut response = self.http.get(&url, AsyncBody::default(), true).await?;
433
434 // Write the response to a temporary file
435 let tar_gz_path = self.cache_dir.join("wasi-sdk.tar.gz");
436 let mut tar_gz_file =
437 fs::File::create(&tar_gz_path).context("failed to create temporary tar.gz file")?;
438 let response_body = response.body_mut();
439 let mut body_bytes = Vec::new();
440 response_body.read_to_end(&mut body_bytes).await?;
441 std::io::Write::write_all(&mut tar_gz_file, &body_bytes)?;
442 drop(tar_gz_file);
443
444 log::info!("un-tarring wasi-sdk to {}", tar_out_dir.display());
445
446 // Shell out to tar to extract the archive
447 let tar_output = util::command::new_smol_command("tar")
448 .arg("-xzf")
449 .arg(&tar_gz_path)
450 .arg("-C")
451 .arg(&tar_out_dir)
452 .output()
453 .await
454 .context("failed to run tar")?;
455
456 if !tar_output.status.success() {
457 bail!(
458 "failed to extract wasi-sdk archive: {}",
459 String::from_utf8_lossy(&tar_output.stderr)
460 );
461 }
462
463 log::info!("finished downloading wasi-sdk");
464
465 // Clean up the temporary tar.gz file
466 fs::remove_file(&tar_gz_path).ok();
467
468 let inner_dir = fs::read_dir(&tar_out_dir)?
469 .next()
470 .context("no content")?
471 .context("failed to read contents of extracted wasi archive directory")?
472 .path();
473 fs::rename(&inner_dir, &wasi_sdk_dir).context("failed to move extracted wasi dir")?;
474 fs::remove_dir_all(&tar_out_dir).ok();
475
476 Ok(clang_path)
477 }
478
479 // This was adapted from:
480 // https://github.com/bytecodealliance/wasm-tools/blob/e8809bb17fcf69aa8c85cd5e6db7cff5cf36b1de/src/bin/wasm-tools/strip.rs
481 fn strip_custom_sections(&self, input: &Vec<u8>) -> Result<Vec<u8>> {
482 use wasmparser::Payload::*;
483
484 let strip_custom_section = |name: &str| {
485 // Default strip everything but:
486 // * the `name` section
487 // * any `component-type` sections
488 // * the `dylink.0` section
489 // * our custom version section
490 name != "name"
491 && !name.starts_with("component-type:")
492 && name != "dylink.0"
493 && name != "zed:api-version"
494 };
495
496 let mut output = Vec::new();
497 let mut stack = Vec::new();
498
499 for payload in Parser::new(0).parse_all(input) {
500 let payload = payload?;
501
502 // Track nesting depth, so that we don't mess with inner producer sections:
503 match payload {
504 Version { encoding, .. } => {
505 output.extend_from_slice(match encoding {
506 wasmparser::Encoding::Component => &wasm_encoder::Component::HEADER,
507 wasmparser::Encoding::Module => &wasm_encoder::Module::HEADER,
508 });
509 }
510 ModuleSection { .. } | ComponentSection { .. } => {
511 stack.push(mem::take(&mut output));
512 continue;
513 }
514 End { .. } => {
515 let mut parent = match stack.pop() {
516 Some(c) => c,
517 None => break,
518 };
519 if output.starts_with(&wasm_encoder::Component::HEADER) {
520 parent.push(ComponentSectionId::Component as u8);
521 output.encode(&mut parent);
522 } else {
523 parent.push(ComponentSectionId::CoreModule as u8);
524 output.encode(&mut parent);
525 }
526 output = parent;
527 }
528 _ => {}
529 }
530
531 if let CustomSection(c) = &payload
532 && strip_custom_section(c.name())
533 {
534 continue;
535 }
536 if let Some((id, range)) = payload.as_section() {
537 RawSection {
538 id,
539 data: &input[range],
540 }
541 .append_to(&mut output);
542 }
543 }
544
545 Ok(output)
546 }
547}
548
549fn populate_defaults(manifest: &mut ExtensionManifest, extension_path: &Path) -> Result<()> {
550 // For legacy extensions on the v0 schema (aka, using `extension.json`), clear out any existing
551 // contents of the computed fields, since we don't care what the existing values are.
552 if manifest.schema_version.is_v0() {
553 manifest.languages.clear();
554 manifest.grammars.clear();
555 manifest.themes.clear();
556 }
557
558 let cargo_toml_path = extension_path.join("Cargo.toml");
559 if cargo_toml_path.exists() {
560 manifest.lib.kind = Some(ExtensionLibraryKind::Rust);
561 }
562
563 let languages_dir = extension_path.join("languages");
564 if languages_dir.exists() {
565 for entry in fs::read_dir(&languages_dir).context("failed to list languages dir")? {
566 let entry = entry?;
567 let language_dir = entry.path();
568 let config_path = language_dir.join("config.toml");
569 if config_path.exists() {
570 let relative_language_dir =
571 language_dir.strip_prefix(extension_path)?.to_path_buf();
572 if !manifest.languages.contains(&relative_language_dir) {
573 manifest.languages.push(relative_language_dir);
574 }
575 }
576 }
577 }
578
579 let themes_dir = extension_path.join("themes");
580 if themes_dir.exists() {
581 for entry in fs::read_dir(&themes_dir).context("failed to list themes dir")? {
582 let entry = entry?;
583 let theme_path = entry.path();
584 if theme_path.extension() == Some("json".as_ref()) {
585 let relative_theme_path = theme_path.strip_prefix(extension_path)?.to_path_buf();
586 if !manifest.themes.contains(&relative_theme_path) {
587 manifest.themes.push(relative_theme_path);
588 }
589 }
590 }
591 }
592
593 let icon_themes_dir = extension_path.join("icon_themes");
594 if icon_themes_dir.exists() {
595 for entry in fs::read_dir(&icon_themes_dir).context("failed to list icon themes dir")? {
596 let entry = entry?;
597 let icon_theme_path = entry.path();
598 if icon_theme_path.extension() == Some("json".as_ref()) {
599 let relative_icon_theme_path =
600 icon_theme_path.strip_prefix(extension_path)?.to_path_buf();
601 if !manifest.icon_themes.contains(&relative_icon_theme_path) {
602 manifest.icon_themes.push(relative_icon_theme_path);
603 }
604 }
605 }
606 }
607
608 let snippets_json_path = extension_path.join("snippets.json");
609 if snippets_json_path.exists() {
610 manifest.snippets = Some(snippets_json_path);
611 }
612
613 // For legacy extensions on the v0 schema (aka, using `extension.json`), we want to populate the grammars in
614 // the manifest using the contents of the `grammars` directory.
615 if manifest.schema_version.is_v0() {
616 let grammars_dir = extension_path.join("grammars");
617 if grammars_dir.exists() {
618 for entry in fs::read_dir(&grammars_dir).context("failed to list grammars dir")? {
619 let entry = entry?;
620 let grammar_path = entry.path();
621 if grammar_path.extension() == Some("toml".as_ref()) {
622 #[derive(Deserialize)]
623 struct GrammarConfigToml {
624 pub repository: String,
625 pub commit: String,
626 #[serde(default)]
627 pub path: Option<String>,
628 }
629
630 let grammar_config = fs::read_to_string(&grammar_path)?;
631 let grammar_config: GrammarConfigToml = toml::from_str(&grammar_config)?;
632
633 let grammar_name = grammar_path
634 .file_stem()
635 .and_then(|stem| stem.to_str())
636 .context("no grammar name")?;
637 if !manifest.grammars.contains_key(grammar_name) {
638 manifest.grammars.insert(
639 grammar_name.into(),
640 GrammarManifestEntry {
641 repository: grammar_config.repository,
642 rev: grammar_config.commit,
643 path: grammar_config.path,
644 },
645 );
646 }
647 }
648 }
649 }
650 }
651
652 Ok(())
653}
654
655/// Returns `true` if the target exists and its last modified time is greater than that
656/// of each dependency which exists (i.e., dependency paths which do not exist are ignored).
657///
658/// # Errors
659///
660/// Returns `Err` if any of the underlying file I/O operations fail.
661fn file_newer_than_deps(target: &Path, dependencies: &[&Path]) -> Result<bool, std::io::Error> {
662 if !target.try_exists()? {
663 return Ok(false);
664 }
665 let target_modified = target.metadata()?.modified()?;
666 for dependency in dependencies {
667 if !dependency.try_exists()? {
668 continue;
669 }
670 let dep_modified = dependency.metadata()?.modified()?;
671 if target_modified < dep_modified {
672 return Ok(false);
673 }
674 }
675 Ok(true)
676}
677
678#[cfg(test)]
679mod tests {
680 use super::*;
681
682 use std::{fs, thread::sleep, time::Duration};
683
684 #[test]
685 fn test_file_newer_than_deps() {
686 // Don't use TempTree because we need to guarantee the order
687 let tmpdir = tempfile::tempdir().unwrap();
688 let target = tmpdir.path().join("target.wasm");
689 let dep1 = tmpdir.path().join("parser.c");
690 let dep2 = tmpdir.path().join("scanner.c");
691
692 assert!(
693 !file_newer_than_deps(&target, &[&dep1, &dep2]).unwrap(),
694 "target doesn't exist"
695 );
696 fs::write(&target, "foo").unwrap(); // Create target
697 assert!(
698 file_newer_than_deps(&target, &[&dep1, &dep2]).unwrap(),
699 "dependencies don't exist; target is newer"
700 );
701 sleep(Duration::from_secs(1));
702 fs::write(&dep1, "foo").unwrap(); // Create dep1 (newer than target)
703 // Dependency is newer
704 assert!(
705 !file_newer_than_deps(&target, &[&dep1, &dep2]).unwrap(),
706 "a dependency is newer (target {:?}, dep1 {:?})",
707 target.metadata().unwrap().modified().unwrap(),
708 dep1.metadata().unwrap().modified().unwrap(),
709 );
710 sleep(Duration::from_secs(1));
711 fs::write(&dep2, "foo").unwrap(); // Create dep2
712 sleep(Duration::from_secs(1));
713 fs::write(&target, "foobar").unwrap(); // Update target
714 assert!(
715 file_newer_than_deps(&target, &[&dep1, &dep2]).unwrap(),
716 "target is newer than dependencies (target {:?}, dep2 {:?})",
717 target.metadata().unwrap().modified().unwrap(),
718 dep2.metadata().unwrap().modified().unwrap(),
719 );
720 }
721}