diff --git a/Cargo.lock b/Cargo.lock index 7de383f18105bb13d1978fb7d8aa8a2c5c676398..72ae0c8fbbd4e57bd063444eb531338e0b6c9031 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -7559,6 +7559,9 @@ name = "http_client" version = "0.1.0" dependencies = [ "anyhow", + "async-compression", + "async-fs", + "async-tar", "bytes 1.10.1", "derive_more", "futures 0.3.31", @@ -7569,7 +7572,10 @@ dependencies = [ "reqwest 0.12.15 (git+https://github.com/zed-industries/reqwest.git?rev=951c770a32f1998d6e999cef3e59e0013e6c4415)", "serde", "serde_json", + "sha2", + "tempfile", "url", + "util", "workspace-hack", ] @@ -8840,11 +8846,9 @@ dependencies = [ "serde_json", "serde_json_lenient", "settings", - "sha2", "shlex", "smol", "task", - "tempfile", "text", "theme", "toml 0.8.20", @@ -12087,8 +12091,6 @@ dependencies = [ "aho-corasick", "anyhow", "askpass", - "async-compression", - "async-tar", "async-trait", "base64 0.22.1", "buffer_diff", diff --git a/crates/http_client/Cargo.toml b/crates/http_client/Cargo.toml index f63bff295e22c36512dbc6285e68d4686714f411..bdcfe0fc06d4a195776fe1a4e745eb9aad55125d 100644 --- a/crates/http_client/Cargo.toml +++ b/crates/http_client/Cargo.toml @@ -16,16 +16,22 @@ path = "src/http_client.rs" doctest = true [dependencies] -bytes.workspace = true anyhow.workspace = true +async-compression.workspace = true +async-fs.workspace = true +async-tar.workspace = true +bytes.workspace = true derive_more.workspace = true futures.workspace = true -http.workspace = true http-body.workspace = true +http.workspace = true log.workspace = true parking_lot.workspace = true reqwest.workspace = true serde.workspace = true serde_json.workspace = true +sha2.workspace = true +tempfile.workspace = true url.workspace = true +util.workspace = true workspace-hack.workspace = true diff --git a/crates/languages/src/github_download.rs b/crates/http_client/src/github_download.rs similarity index 91% rename from crates/languages/src/github_download.rs rename to crates/http_client/src/github_download.rs index 766c894fbb2b660778f09933b4facd2114ebb5bf..02dee08b215e547d632caaf5f94b0872aa6aa20d 100644 --- a/crates/languages/src/github_download.rs +++ b/crates/http_client/src/github_download.rs @@ -3,18 +3,18 @@ use std::{path::Path, pin::Pin, task::Poll}; use anyhow::{Context, Result}; use async_compression::futures::bufread::GzipDecoder; use futures::{AsyncRead, AsyncSeek, AsyncSeekExt, AsyncWrite, io::BufReader}; -use http_client::github::AssetKind; -use language::LspAdapterDelegate; use sha2::{Digest, Sha256}; +use crate::{HttpClient, github::AssetKind}; + #[derive(serde::Deserialize, serde::Serialize, Debug)] -pub(crate) struct GithubBinaryMetadata { - pub(crate) metadata_version: u64, - pub(crate) digest: Option, +pub struct GithubBinaryMetadata { + pub metadata_version: u64, + pub digest: Option, } impl GithubBinaryMetadata { - pub(crate) async fn read_from_file(metadata_path: &Path) -> Result { + pub async fn read_from_file(metadata_path: &Path) -> Result { let metadata_content = async_fs::read_to_string(metadata_path) .await .with_context(|| format!("reading metadata file at {metadata_path:?}"))?; @@ -22,7 +22,7 @@ impl GithubBinaryMetadata { .with_context(|| format!("parsing metadata file at {metadata_path:?}")) } - pub(crate) async fn write_to_file(&self, metadata_path: &Path) -> Result<()> { + pub async fn write_to_file(&self, metadata_path: &Path) -> Result<()> { let metadata_content = serde_json::to_string(self) .with_context(|| format!("serializing metadata for {metadata_path:?}"))?; async_fs::write(metadata_path, metadata_content.as_bytes()) @@ -32,16 +32,15 @@ impl GithubBinaryMetadata { } } -pub(crate) async fn download_server_binary( - delegate: &dyn LspAdapterDelegate, +pub async fn download_server_binary( + http_client: &dyn HttpClient, url: &str, digest: Option<&str>, destination_path: &Path, asset_kind: AssetKind, ) -> Result<(), anyhow::Error> { log::info!("downloading github artifact from {url}"); - let mut response = delegate - .http_client() + let mut response = http_client .get(url, Default::default(), true) .await .with_context(|| format!("downloading release from {url}"))?; @@ -143,7 +142,7 @@ async fn extract_gz( from: impl AsyncRead + Unpin, ) -> Result<(), anyhow::Error> { let mut decompressed_bytes = GzipDecoder::new(BufReader::new(from)); - let mut file = smol::fs::File::create(&destination_path) + let mut file = async_fs::File::create(&destination_path) .await .with_context(|| { format!("creating a file {destination_path:?} for a download from {url}") diff --git a/crates/http_client/src/http_client.rs b/crates/http_client/src/http_client.rs index 0bbb7ce037fcda014b346556202256b99e832529..76bf0b905dbdc827f38aa37a95edc0e3b9e834eb 100644 --- a/crates/http_client/src/http_client.rs +++ b/crates/http_client/src/http_client.rs @@ -1,5 +1,6 @@ mod async_body; pub mod github; +pub mod github_download; pub use anyhow::{Result, anyhow}; pub use async_body::{AsyncBody, Inner}; diff --git a/crates/languages/Cargo.toml b/crates/languages/Cargo.toml index d707dd6977a8d15364b52ca4315823ba1a12facf..073f3636baba8029411e1833c57846b2233999e2 100644 --- a/crates/languages/Cargo.toml +++ b/crates/languages/Cargo.toml @@ -62,7 +62,6 @@ project.workspace = true regex.workspace = true rope.workspace = true rust-embed.workspace = true -sha2.workspace = true serde.workspace = true serde_json.workspace = true serde_json_lenient.workspace = true @@ -70,7 +69,6 @@ settings.workspace = true smol.workspace = true url.workspace = true task.workspace = true -tempfile.workspace = true theme.workspace = true toml.workspace = true tree-sitter = { workspace = true, optional = true } diff --git a/crates/languages/src/c.rs b/crates/languages/src/c.rs index 0af467bcaa8f9b4e6b1094d937358a55db402c60..f30120a635655af6c11889d3af110e6c2dca81fc 100644 --- a/crates/languages/src/c.rs +++ b/crates/languages/src/c.rs @@ -3,6 +3,7 @@ use async_trait::async_trait; use futures::StreamExt; use gpui::{App, AsyncApp}; use http_client::github::{AssetKind, GitHubLspBinaryVersion, latest_github_release}; +use http_client::github_download::{GithubBinaryMetadata, download_server_binary}; pub use language::*; use lsp::{InitializeParams, LanguageServerBinary, LanguageServerName}; use project::lsp_store::clangd_ext; @@ -11,8 +12,6 @@ use smol::fs; use std::{env::consts, path::PathBuf, sync::Arc}; use util::{ResultExt, fs::remove_matching, maybe, merge_json_value_into}; -use crate::github_download::{GithubBinaryMetadata, download_server_binary}; - pub struct CLspAdapter; impl CLspAdapter { @@ -119,7 +118,7 @@ impl LspInstaller for CLspAdapter { } } download_server_binary( - delegate, + &*delegate.http_client(), &url, expected_digest.as_deref(), &container_dir, diff --git a/crates/languages/src/lib.rs b/crates/languages/src/lib.rs index 9f8b4f45ecd6ad9c98090a81f4aa75c93f6a290a..76e1ae5edd2593907bd374d398946a1f6083a82e 100644 --- a/crates/languages/src/lib.rs +++ b/crates/languages/src/lib.rs @@ -20,7 +20,6 @@ use crate::{ mod bash; mod c; mod css; -mod github_download; mod go; mod json; mod package_json; diff --git a/crates/languages/src/python.rs b/crates/languages/src/python.rs index 24f11945a92786b523f4b3f82613646552ac7ad9..70cbb26db4eead8cdcf144c04173007bea6afcc8 100644 --- a/crates/languages/src/python.rs +++ b/crates/languages/src/python.rs @@ -26,6 +26,7 @@ use std::env::consts; use util::fs::{make_file_executable, remove_matching}; use util::rel_path::RelPath; +use http_client::github_download::{GithubBinaryMetadata, download_server_binary}; use parking_lot::Mutex; use std::str::FromStr; use std::{ @@ -37,8 +38,6 @@ use std::{ use task::{ShellKind, TaskTemplate, TaskTemplates, VariableName}; use util::{ResultExt, maybe}; -use crate::github_download::{GithubBinaryMetadata, download_server_binary}; - pub(crate) struct PyprojectTomlManifestProvider; impl ManifestProvider for PyprojectTomlManifestProvider { @@ -272,7 +271,7 @@ impl LspInstaller for TyLspAdapter { } download_server_binary( - delegate, + &*delegate.http_client(), &url, expected_digest.as_deref(), &destination_path, @@ -2116,7 +2115,7 @@ impl LspInstaller for RuffLspAdapter { } download_server_binary( - delegate, + &*delegate.http_client(), &url, expected_digest.as_deref(), &destination_path, diff --git a/crates/languages/src/rust.rs b/crates/languages/src/rust.rs index c566d43c2313ebf53688c827956c547caaac0f8f..b315e945a73d1792dcf2b6aeebfe29c0db6cdf7f 100644 --- a/crates/languages/src/rust.rs +++ b/crates/languages/src/rust.rs @@ -5,6 +5,7 @@ use futures::StreamExt; use gpui::{App, AppContext, AsyncApp, SharedString, Task}; use http_client::github::AssetKind; use http_client::github::{GitHubLspBinaryVersion, latest_github_release}; +use http_client::github_download::{GithubBinaryMetadata, download_server_binary}; pub use language::*; use lsp::{InitializeParams, LanguageServerBinary}; use project::lsp_store::rust_analyzer_ext::CARGO_DIAGNOSTICS_SOURCE_NAME; @@ -26,7 +27,6 @@ use util::merge_json_value_into; use util::rel_path::RelPath; use util::{ResultExt, maybe}; -use crate::github_download::{GithubBinaryMetadata, download_server_binary}; use crate::language_settings::language_settings; pub struct RustLspAdapter; @@ -484,7 +484,7 @@ impl LspInstaller for RustLspAdapter { } download_server_binary( - delegate, + &*delegate.http_client(), &url, expected_digest.as_deref(), &destination_path, diff --git a/crates/languages/src/typescript.rs b/crates/languages/src/typescript.rs index 4d1282e28a2bf32fb6fd4925fa6c76056a59c6a9..68fb11bf3526e6e4301d118e6be33dfcc3b3ee2c 100644 --- a/crates/languages/src/typescript.rs +++ b/crates/languages/src/typescript.rs @@ -5,6 +5,7 @@ use collections::HashMap; use futures::future::join_all; use gpui::{App, AppContext, AsyncApp, Task}; use http_client::github::{AssetKind, GitHubLspBinaryVersion, build_asset_url}; +use http_client::github_download::download_server_binary; use itertools::Itertools as _; use language::{ ContextLocation, ContextProvider, File, LanguageName, LanguageToolchainStore, LspAdapter, @@ -25,7 +26,7 @@ use task::{TaskTemplate, TaskTemplates, VariableName}; use util::{ResultExt, fs::remove_matching, maybe}; use util::{merge_json_value_into, rel_path::RelPath}; -use crate::{PackageJson, PackageJsonData, github_download::download_server_binary}; +use crate::{PackageJson, PackageJsonData}; pub(crate) struct TypeScriptContextProvider { fs: Arc, @@ -853,7 +854,7 @@ impl LspInstaller for EsLintLspAdapter { remove_matching(&container_dir, |_| true).await; download_server_binary( - delegate, + &*delegate.http_client(), &version.url, None, &destination_path, diff --git a/crates/project/Cargo.toml b/crates/project/Cargo.toml index f7a037bf1ccc9a86aab827d07ccad6be4592a348..6a5223f33d0864263b36e2eadb4765780ad2128d 100644 --- a/crates/project/Cargo.toml +++ b/crates/project/Cargo.toml @@ -30,8 +30,6 @@ test-support = [ aho-corasick.workspace = true anyhow.workspace = true askpass.workspace = true -async-compression.workspace = true -async-tar.workspace = true async-trait.workspace = true base64.workspace = true buffer_diff.workspace = true diff --git a/crates/project/src/agent_server_store.rs b/crates/project/src/agent_server_store.rs index 53e4fa06fa93df95d271dd119a1b217cf21b0a48..47a62a3cb87cbb8243f50ecede1634b0bf601a57 100644 --- a/crates/project/src/agent_server_store.rs +++ b/crates/project/src/agent_server_store.rs @@ -16,6 +16,7 @@ use futures::StreamExt as _; use gpui::{ App, AppContext as _, AsyncApp, Context, Entity, EventEmitter, SharedString, Subscription, Task, }; +use http_client::github::AssetKind; use node_runtime::NodeRuntime; use remote::RemoteClient; use rpc::{AnyProtoClient, TypedEnvelope, proto}; @@ -1022,7 +1023,7 @@ impl ExternalAgentServer for LocalCodex { // Find or install the latest Codex release (no update checks for now). let http = cx.update(|cx| Client::global(cx).http_client())?; let release = ::http_client::github::latest_github_release( - "zed-industries/codex-acp", + CODEX_ACP_REPO, true, false, http.clone(), @@ -1032,42 +1033,27 @@ impl ExternalAgentServer for LocalCodex { let version_dir = dir.join(&release.tag_name); if !fs.is_dir(&version_dir).await { - // Assemble release download URL from prefix, tag, and filename based on target triple. - // If unsupported, silently skip download. - let tag = release.tag_name.clone(); // e.g. "v0.1.0" + let tag = release.tag_name.clone(); let version_number = tag.trim_start_matches('v'); - if let Some(asset_url) = codex_release_url(version_number) { - let http = http.clone(); - let mut response = http - .get(&asset_url, Default::default(), true) - .await - .with_context(|| { - format!("downloading Codex binary from {}", asset_url) - })?; - anyhow::ensure!( - response.status().is_success(), - "failed to download Codex release: {}", - response.status() - ); - - // Extract archive into the version directory. - if asset_url.ends_with(".zip") { - let reader = futures::io::BufReader::new(response.body_mut()); - util::archive::extract_zip(&version_dir, reader) - .await - .context("extracting Codex binary from zip")?; + let asset_name = asset_name(version_number) + .context("codex acp is not supported for this architecture")?; + let asset = release + .assets + .into_iter() + .find(|asset| asset.name == asset_name) + .with_context(|| format!("no asset found matching `{asset_name:?}`"))?; + ::http_client::github_download::download_server_binary( + &*http, + &asset.browser_download_url, + asset.digest.as_deref(), + &version_dir, + if cfg!(target_os = "windows") && cfg!(target_arch = "x86_64") { + AssetKind::Zip } else { - // Decompress and extract the tar.gz into the version directory. - let reader = futures::io::BufReader::new(response.body_mut()); - let decoder = - async_compression::futures::bufread::GzipDecoder::new(reader); - let archive = async_tar::Archive::new(decoder); - archive - .unpack(&version_dir) - .await - .context("extracting Codex binary from tar.gz")?; - } - } + AssetKind::TarGz + }, + ) + .await?; } let bin_name = if cfg!(windows) { @@ -1101,11 +1087,13 @@ impl ExternalAgentServer for LocalCodex { } } +pub const CODEX_ACP_REPO: &str = "zed-industries/codex-acp"; + /// Assemble Codex release URL for the current OS/arch and the given version number. /// Returns None if the current target is unsupported. /// Example output: /// https://github.com/zed-industries/codex-acp/releases/download/v{version}/codex-acp-{version}-{arch}-{platform}.{ext} -fn codex_release_url(version: &str) -> Option { +fn asset_name(version: &str) -> Option { let arch = if cfg!(target_arch = "x86_64") { "x86_64" } else if cfg!(target_arch = "aarch64") { @@ -1131,11 +1119,7 @@ fn codex_release_url(version: &str) -> Option { "tar.gz" }; - let prefix = "https://github.com/zed-industries/codex-acp/releases/download"; - - Some(format!( - "{prefix}/v{version}/codex-acp-{version}-{arch}-{platform}.{ext}" - )) + Some(format!("codex-acp-{version}-{arch}-{platform}.{ext}")) } struct LocalCustomAgent { @@ -1191,18 +1175,18 @@ mod tests { // Additionally, it verifies that our logic for assembling URLs // correctly resolves to a known-good URL on each of our targets. let allowed = [ - "https://github.com/zed-industries/codex-acp/releases/download/v0.1.0/codex-acp-0.1.0-aarch64-apple-darwin.tar.gz", - "https://github.com/zed-industries/codex-acp/releases/download/v0.1.0/codex-acp-0.1.0-aarch64-pc-windows-msvc.tar.gz", - "https://github.com/zed-industries/codex-acp/releases/download/v0.1.0/codex-acp-0.1.0-aarch64-unknown-linux-gnu.tar.gz", - "https://github.com/zed-industries/codex-acp/releases/download/v0.1.0/codex-acp-0.1.0-x86_64-apple-darwin.tar.gz", - "https://github.com/zed-industries/codex-acp/releases/download/v0.1.0/codex-acp-0.1.0-x86_64-pc-windows-msvc.zip", - "https://github.com/zed-industries/codex-acp/releases/download/v0.1.0/codex-acp-0.1.0-x86_64-unknown-linux-gnu.tar.gz", + "codex-acp-0.1.0-aarch64-apple-darwin.tar.gz", + "codex-acp-0.1.0-aarch64-pc-windows-msvc.tar.gz", + "codex-acp-0.1.0-aarch64-unknown-linux-gnu.tar.gz", + "codex-acp-0.1.0-x86_64-apple-darwin.tar.gz", + "codex-acp-0.1.0-x86_64-pc-windows-msvc.zip", + "codex-acp-0.1.0-x86_64-unknown-linux-gnu.tar.gz", ]; - if let Some(url) = super::codex_release_url(version_number) { + if let Some(url) = super::asset_name(version_number) { assert!( allowed.contains(&url.as_str()), - "Assembled URL {} not in allowed list", + "Assembled asset name {} not in allowed list", url ); } else {