Detailed changes
@@ -1,5 +1,6 @@
use anyhow::{Context as _, Result};
use client::{Client, telemetry::MINIDUMP_ENDPOINT};
+use feature_flags::FeatureFlagAppExt;
use futures::{AsyncReadExt, TryStreamExt};
use gpui::{App, AppContext as _, SerializedThreadTaskTimings};
use http_client::{self, AsyncBody, HttpClient, Request};
@@ -10,8 +11,10 @@ use reqwest::{
Method,
multipart::{Form, Part},
};
+use serde::Deserialize;
use smol::stream::StreamExt;
use std::{ffi::OsStr, fs, sync::Arc, thread::ThreadId, time::Duration};
+use sysinfo::{MemoryRefreshKind, RefreshKind, System};
use util::ResultExt;
use crate::STARTUP_TIME;
@@ -21,6 +24,20 @@ const MAX_HANG_TRACES: usize = 3;
pub fn init(client: Arc<Client>, cx: &mut App) {
monitor_hangs(cx);
+ cx.on_flags_ready({
+ let client = client.clone();
+ move |flags_ready, cx| {
+ if flags_ready.is_staff {
+ let client = client.clone();
+ cx.background_spawn(async move {
+ upload_build_timings(client).await.warn_on_err();
+ })
+ .detach();
+ }
+ }
+ })
+ .detach();
+
if client.telemetry().diagnostics_enabled() {
let client = client.clone();
cx.background_spawn(async move {
@@ -374,6 +391,81 @@ async fn upload_minidump(
Ok(())
}
+#[derive(Debug, Deserialize)]
+struct BuildTiming {
+ started_at: chrono::DateTime<chrono::Utc>,
+ duration_ms: f32,
+ first_crate: String,
+ target: String,
+ lock_wait_ms: f32,
+ command: String,
+}
+
+// NOTE: this is a bit of a hack. We want to be able to have internal
+// metrics around build times, but we don't have an easy way to authenticate
+// users - except - we know internal users use Zed.
+// So, we have it upload the timings on their behalf, it'd be better to do
+// this more directly in ./script/cargo-timing-info.js.
+async fn upload_build_timings(_client: Arc<Client>) -> Result<()> {
+ let build_timings_dir = paths::data_dir().join("build_timings");
+
+ if !build_timings_dir.exists() {
+ return Ok(());
+ }
+
+ let cpu_count = std::thread::available_parallelism()
+ .map(|n| n.get())
+ .unwrap_or(1);
+ let system = System::new_with_specifics(
+ RefreshKind::nothing().with_memory(MemoryRefreshKind::everything()),
+ );
+ let ram_size_gb = (system.total_memory() as f64) / (1024.0 * 1024.0 * 1024.0);
+
+ let mut entries = smol::fs::read_dir(&build_timings_dir).await?;
+ while let Some(entry) = entries.next().await {
+ let entry = entry?;
+ let path = entry.path();
+
+ if path.extension() != Some(OsStr::new("json")) {
+ continue;
+ }
+
+ let contents = match smol::fs::read_to_string(&path).await {
+ Ok(contents) => contents,
+ Err(err) => {
+ log::warn!("Failed to read build timing file {:?}: {}", path, err);
+ continue;
+ }
+ };
+
+ let timing: BuildTiming = match serde_json::from_str(&contents) {
+ Ok(timing) => timing,
+ Err(err) => {
+ log::warn!("Failed to parse build timing file {:?}: {}", path, err);
+ continue;
+ }
+ };
+
+ telemetry::event!(
+ "Build Timing: Cargo Build",
+ started_at = timing.started_at.to_rfc3339(),
+ duration_ms = timing.duration_ms,
+ first_crate = timing.first_crate,
+ target = timing.target,
+ lock_wait_ms = timing.lock_wait_ms,
+ command = timing.command,
+ cpu_count = cpu_count,
+ ram_size_gb = ram_size_gb
+ );
+
+ if let Err(err) = smol::fs::remove_file(&path).await {
+ log::warn!("Failed to delete build timing file {:?}: {}", path, err);
+ }
+ }
+
+ Ok(())
+}
+
trait FormExt {
fn text_if_some(
self,
@@ -0,0 +1,281 @@
+#!/usr/bin/env node
+
+// ./script/cargo is a transparent wrapper around cargo that:
+// - When running in a clone of `./zed-industries/zed`
+// - outputs build timings to the ZED_DATA_DIR/build_timings
+// When Zed starts for staff-members it uploads the build timings to Snowflake
+// To use it:
+// ./script/cargo --init
+// This will add a wrapper to your shell configuration files.
+// (Otherwise set up an alias `cargo=PATH_TO_THIS_FILE`)
+
+// We need to ignore SIGINT in this process so that we can continue
+// processing timing files after the child cargo process exits.
+// The signal will still be delivered to the child process.
+process.on("SIGINT", () => {});
+
+const { spawn, spawnSync } = require("child_process");
+const fs = require("fs");
+const os = require("os");
+const path = require("path");
+const readline = require("readline");
+
+const SUBCOMMANDS_WITH_TIMINGS = new Set(["build", "check", "run", "test"]);
+
+// Built-in cargo aliases
+const CARGO_ALIASES = {
+ b: "build",
+ c: "check",
+ t: "test",
+ r: "run",
+ d: "doc",
+};
+
+function expandAlias(subcommand) {
+ return CARGO_ALIASES[subcommand] || subcommand;
+}
+
+function detectShell() {
+ // Check for PowerShell first (works when running from pwsh)
+ if (process.env.PSModulePath && !process.env.BASH_VERSION) {
+ return "powershell";
+ }
+
+ const shell = process.env.SHELL || "";
+ if (shell.endsWith("/zsh")) return "zsh";
+ if (shell.endsWith("/bash")) return "bash";
+ if (shell.endsWith("/fish")) return "fish";
+ if (shell.endsWith("/pwsh") || shell.endsWith("/powershell")) return "powershell";
+ return path.basename(shell) || "unknown";
+}
+
+function getShellConfigPath(shell) {
+ const home = os.homedir();
+ switch (shell) {
+ case "zsh":
+ return path.join(home, ".zshrc");
+ case "bash":
+ // Prefer .bashrc, fall back to .bash_profile
+ const bashrc = path.join(home, ".bashrc");
+ if (fs.existsSync(bashrc)) return bashrc;
+ return path.join(home, ".bash_profile");
+ case "fish":
+ return path.join(home, ".config", "fish", "config.fish");
+ case "powershell":
+ // PowerShell Core (pwsh) profile locations
+ if (process.platform === "win32") {
+ return path.join(home, "Documents", "PowerShell", "Microsoft.PowerShell_profile.ps1");
+ } else {
+ return path.join(home, ".config", "powershell", "Microsoft.PowerShell_profile.ps1");
+ }
+ default:
+ return null;
+ }
+}
+
+function generateAlias(shell, scriptDir) {
+ const cargoWrapper = path.join(scriptDir, "cargo");
+
+ switch (shell) {
+ case "zsh":
+ case "bash":
+ return `\n# Zed cargo timing wrapper\ncargo() { local w="${cargoWrapper}"; [[ -x "$w" ]] && "$w" "$@" || command cargo "$@"; }\n`;
+ case "fish":
+ return `\n# Zed cargo timing wrapper\nfunction cargo\n set -l w "${cargoWrapper}"\n if test -x "$w"\n "$w" $argv\n else\n command cargo $argv\n end\nend\n`;
+ case "powershell":
+ return `\n# Zed cargo timing wrapper\nfunction cargo {\n \$wrapper = "${cargoWrapper}"\n if (Test-Path \$wrapper) {\n & \$wrapper @args\n } else {\n & (Get-Command -Name cargo -CommandType Application | Select-Object -First 1).Source @args\n }\n}\n`;
+ default:
+ return `cargo() { local w="${cargoWrapper}"; [[ -x "$w" ]] && "$w" "$@" || command cargo "$@"; }`;
+ }
+}
+
+function initShellAlias() {
+ const scriptDir = __dirname;
+ const shell = detectShell();
+ const configPath = getShellConfigPath(shell);
+ const alias = generateAlias(shell, scriptDir);
+
+ if (!configPath) {
+ console.log(`Unsupported shell: ${shell}`);
+ console.log("\nAdd the following to your shell configuration:\n");
+ console.log(alias);
+ return;
+ }
+
+ // Check if alias already exists
+ if (fs.existsSync(configPath)) {
+ const content = fs.readFileSync(configPath, "utf-8");
+ if (content.includes("Zed cargo timing wrapper")) {
+ console.log(`Alias already exists in ${configPath}`);
+ console.log("To update, remove the existing alias and run --init again.");
+ return;
+ }
+ }
+
+ // Create parent directory if needed (for PowerShell on Linux/macOS)
+ const configDir = path.dirname(configPath);
+ if (!fs.existsSync(configDir)) {
+ fs.mkdirSync(configDir, { recursive: true });
+ }
+
+ // Append alias to config file
+ fs.appendFileSync(configPath, alias);
+ console.log(`Added cargo timing alias to ${configPath}`);
+
+ if (shell === "powershell") {
+ console.log(`\nRestart PowerShell or run: . "${configPath}"`);
+ } else {
+ console.log(`\nRestart your shell or run: source ${configPath}`);
+ }
+}
+
+function isZedRepo() {
+ try {
+ const result = spawnSync("git", ["remote", "-v"], {
+ encoding: "utf-8",
+ stdio: ["pipe", "pipe", "pipe"],
+ timeout: 5000,
+ });
+ if (result.status !== 0 || !result.stdout) {
+ return false;
+ }
+ return result.stdout.includes("zed-industries/zed");
+ } catch {
+ return false;
+ }
+}
+
+function findSubcommand(args) {
+ for (let i = 0; i < args.length; i++) {
+ const arg = args[i];
+ // Skip flags and their values
+ if (arg.startsWith("-")) {
+ // If this flag takes a value and it's not using = syntax, skip the next arg too
+ if (!arg.includes("=") && i + 1 < args.length && !args[i + 1].startsWith("-")) {
+ i++;
+ }
+ continue;
+ }
+ // First non-flag argument is the subcommand
+ return { subcommand: arg, index: i };
+ }
+ return null;
+}
+
+function findLatestTimingFile(targetDir) {
+ const timingsDir = path.join(targetDir, "cargo-timings");
+ if (!fs.existsSync(timingsDir)) {
+ return null;
+ }
+
+ const files = fs
+ .readdirSync(timingsDir)
+ .filter((f) => f.startsWith("cargo-timing-") && f.endsWith(".html") && f !== "cargo-timing.html")
+ .map((f) => ({
+ name: f,
+ path: path.join(timingsDir, f),
+ mtime: fs.statSync(path.join(timingsDir, f)).mtime.getTime(),
+ }))
+ .sort((a, b) => b.mtime - a.mtime);
+
+ return files.length > 0 ? files[0].path : null;
+}
+
+function getTargetDir(args) {
+ // Check for --target-dir flag
+ for (let i = 0; i < args.length; i++) {
+ if (args[i] === "--target-dir" && i + 1 < args.length) {
+ return args[i + 1];
+ }
+ if (args[i].startsWith("--target-dir=")) {
+ return args[i].substring("--target-dir=".length);
+ }
+ }
+ // Default target directory
+ return "target";
+}
+
+function runCargoPassthrough(args) {
+ const cargoCmd = process.env.CARGO || "cargo";
+ const result = spawnSync(cargoCmd, args, {
+ stdio: "inherit",
+ shell: false,
+ });
+ process.exit(result.status ?? 1);
+}
+
+async function main() {
+ const args = process.argv.slice(2);
+
+ // Handle --init flag
+ if (args[0] === "--init") {
+ initShellAlias();
+ return;
+ }
+
+ // If not in zed repo, just pass through to cargo
+ if (!isZedRepo()) {
+ runCargoPassthrough(args);
+ return;
+ }
+
+ const cargoCmd = process.env.CARGO || "cargo";
+ const subcommandInfo = findSubcommand(args);
+ const expandedSubcommand = subcommandInfo ? expandAlias(subcommandInfo.subcommand) : null;
+ const shouldAddTimings = expandedSubcommand && SUBCOMMANDS_WITH_TIMINGS.has(expandedSubcommand);
+
+ // Build the final args array
+ let finalArgs = [...args];
+ if (shouldAddTimings) {
+ // Check if --timings is already present
+ const hasTimings = args.some((arg) => arg === "--timings" || arg.startsWith("--timings="));
+ if (!hasTimings) {
+ // Insert --timings after the subcommand
+ finalArgs.splice(subcommandInfo.index + 1, 0, "--timings");
+ }
+ }
+
+ // Run cargo asynchronously so we can handle signals properly
+ const child = spawn(cargoCmd, finalArgs, {
+ stdio: "inherit",
+ shell: false,
+ });
+
+ // Wait for the child to exit
+ const result = await new Promise((resolve) => {
+ child.on("exit", (code, signal) => {
+ resolve({ status: code, signal });
+ });
+ });
+
+ // If we added timings, try to process the timing file (regardless of cargo's exit status)
+ if (shouldAddTimings) {
+ const targetDir = getTargetDir(args);
+ const timingFile = findLatestTimingFile(targetDir);
+
+ if (timingFile) {
+ // Run cargo-timing-info.js in the background
+ const scriptDir = __dirname;
+ const timingScript = path.join(scriptDir, "cargo-timing-info.js");
+
+ if (fs.existsSync(timingScript)) {
+ const timingChild = spawn(process.execPath, [timingScript, timingFile, `cargo ${expandedSubcommand}`], {
+ detached: true,
+ stdio: "ignore",
+ });
+ timingChild.unref();
+ }
+ }
+ }
+
+ // Exit with cargo's exit code, or re-raise the signal if it was killed
+ if (result.signal) {
+ // Reset signal handler and re-raise so parent sees the signal
+ process.removeAllListeners(result.signal);
+ process.kill(process.pid, result.signal);
+ } else {
+ process.exit(result.status ?? 1);
+ }
+}
+
+main();
@@ -0,0 +1,185 @@
+#!/usr/bin/env node
+
+const fs = require("fs");
+const os = require("os");
+const path = require("path");
+
+function getZedDataDir() {
+ const platform = process.platform;
+
+ if (platform === "darwin") {
+ // macOS: ~/Library/Application Support/Zed
+ return path.join(os.homedir(), "Library", "Application Support", "Zed");
+ } else if (platform === "linux" || platform === "freebsd") {
+ // Linux/FreeBSD: $FLATPAK_XDG_DATA_HOME or XDG_DATA_HOME/zed
+ if (process.env.FLATPAK_XDG_DATA_HOME) {
+ return path.join(process.env.FLATPAK_XDG_DATA_HOME, "zed");
+ }
+ const xdgDataHome = process.env.XDG_DATA_HOME || path.join(os.homedir(), ".local", "share");
+ return path.join(xdgDataHome, "zed");
+ } else if (platform === "win32") {
+ // Windows: LocalAppData/Zed
+ const localAppData = process.env.LOCALAPPDATA || path.join(os.homedir(), "AppData", "Local");
+ return path.join(localAppData, "Zed");
+ } else {
+ // Fallback to XDG config dir
+ const xdgConfigHome = process.env.XDG_CONFIG_HOME || path.join(os.homedir(), ".config");
+ return path.join(xdgConfigHome, "zed");
+ }
+}
+
+function extractUnitData(htmlContent) {
+ // Find the UNIT_DATA array in the file
+ const unitDataMatch = htmlContent.match(/const\s+UNIT_DATA\s*=\s*(\[[\s\S]*?\]);/);
+ if (!unitDataMatch) {
+ throw new Error("Could not find UNIT_DATA in the file");
+ }
+
+ try {
+ return JSON.parse(unitDataMatch[1]);
+ } catch (e) {
+ throw new Error(`Failed to parse UNIT_DATA as JSON: ${e.message}`);
+ }
+}
+
+function formatTime(seconds) {
+ if (seconds < 60) {
+ return `${seconds.toFixed(2)}s`;
+ }
+ const minutes = Math.floor(seconds / 60);
+ const remainingSeconds = seconds % 60;
+ return `${minutes}m ${remainingSeconds.toFixed(2)}s`;
+}
+
+function formatUnit(unit) {
+ let name = `${unit.name} v${unit.version}`;
+ if (unit.target && unit.target.trim()) {
+ name += ` (${unit.target.trim()})`;
+ }
+ return name;
+}
+
+function parseTimestampFromFilename(filePath) {
+ const basename = path.basename(filePath);
+ // Format: cargo-timing-20260219T161555.879263Z.html
+ const match = basename.match(/cargo-timing-(\d{4})(\d{2})(\d{2})T(\d{2})(\d{2})(\d{2})\.(\d+)Z\.html/);
+ if (!match) {
+ return null;
+ }
+ const [, year, month, day, hour, minute, second, microseconds] = match;
+ // Convert to ISO 8601 format
+ const milliseconds = Math.floor(parseInt(microseconds) / 1000);
+ return `${year}-${month}-${day}T${hour}:${minute}:${second}.${milliseconds.toString().padStart(3, "0")}Z`;
+}
+
+function writeBuildTimingJson(filePath, durationMs, firstCrate, target, blockedMs, command) {
+ const buildTimingsDir = path.join(getZedDataDir(), "build_timings");
+
+ // Create directory if it doesn't exist
+ if (!fs.existsSync(buildTimingsDir)) {
+ fs.mkdirSync(buildTimingsDir, { recursive: true });
+ }
+
+ // Parse timestamp from filename, or use file modification time as fallback
+ let startedAt = parseTimestampFromFilename(filePath);
+ if (!startedAt) {
+ const stats = fs.statSync(filePath);
+ startedAt = stats.mtime.toISOString();
+ }
+
+ const buildTiming = {
+ started_at: startedAt,
+ duration_ms: durationMs,
+ first_crate: firstCrate,
+ target: target,
+ blocked_ms: blockedMs,
+ command: command,
+ };
+
+ const jsonPath = path.join(buildTimingsDir, `build-timing-${startedAt}.json`);
+ fs.writeFileSync(jsonPath, JSON.stringify(buildTiming, null, 2) + "\n");
+ console.log(`\nWrote build timing JSON to: ${jsonPath}`);
+}
+
+function analyzeTimings(filePath, command) {
+ // Read the file
+ const htmlContent = fs.readFileSync(filePath, "utf-8");
+
+ // Extract UNIT_DATA
+ const unitData = extractUnitData(htmlContent);
+
+ if (unitData.length === 0) {
+ console.log("No units found in UNIT_DATA");
+ return;
+ }
+
+ // Find the unit that finishes last (start + duration)
+ let lastFinishingUnit = unitData[0];
+ let maxEndTime = unitData[0].start + unitData[0].duration;
+
+ for (const unit of unitData) {
+ const endTime = unit.start + unit.duration;
+ if (endTime > maxEndTime) {
+ maxEndTime = endTime;
+ lastFinishingUnit = unit;
+ }
+ }
+
+ // Find the first crate that had to be rebuilt (earliest start time)
+ // Sort by start time to find the first one
+ const sortedByStart = [...unitData].sort((a, b) => a.start - b.start);
+ const firstRebuilt = sortedByStart[0];
+
+ // The minimum start time indicates time spent blocked (e.g. waiting for cargo lock)
+ const blockedTime = firstRebuilt.start;
+
+ // Find the last item being built (the one that was still building when the build finished)
+ // This is the unit with the latest end time (which we already found)
+ const lastBuilding = lastFinishingUnit;
+
+ console.log("=== Cargo Timing Analysis ===\n");
+ console.log(`File: ${path.basename(filePath)}\n`);
+ console.log(`Total build time: ${formatTime(maxEndTime)}`);
+ console.log(`Time blocked: ${formatTime(blockedTime)}`);
+ console.log(`Total crates compiled: ${unitData.length}\n`);
+ console.log(`First crate rebuilt: ${formatUnit(firstRebuilt)}`);
+ console.log(` Started at: ${formatTime(firstRebuilt.start)}`);
+ console.log(` Duration: ${formatTime(firstRebuilt.duration)}\n`);
+ console.log(`Last item being built: ${formatUnit(lastBuilding)}`);
+ console.log(` Started at: ${formatTime(lastBuilding.start)}`);
+ console.log(` Duration: ${formatTime(lastBuilding.duration)}`);
+ console.log(` Finished at: ${formatTime(lastBuilding.start + lastBuilding.duration)}`);
+
+ // Write JSON file for BuildTiming struct
+ const durationMs = maxEndTime * 1000;
+ const blockedMs = blockedTime * 1000;
+ const firstCrateName = firstRebuilt.name;
+ const targetName = lastBuilding.name;
+ writeBuildTimingJson(filePath, durationMs, firstCrateName, targetName, blockedMs, command);
+}
+
+// Main execution
+const args = process.argv.slice(2);
+
+if (args.length === 0) {
+ console.error("Usage: cargo-timing-info.js <path-to-cargo-timing.html> [command]");
+ console.error("");
+ console.error("Example:");
+ console.error(" cargo-timing-info.js target/cargo-timings/cargo-timing-20260219T161555.879263Z.html");
+ process.exit(1);
+}
+
+const filePath = args[0];
+const command = args[1] || null;
+
+if (!fs.existsSync(filePath)) {
+ console.error(`Error: File not found: ${filePath}`);
+ process.exit(1);
+}
+
+try {
+ analyzeTimings(filePath, command);
+} catch (e) {
+ console.error(`Error: ${e.message}`);
+ process.exit(1);
+}