Detailed changes
@@ -7559,6 +7559,9 @@ name = "http_client"
version = "0.1.0"
dependencies = [
"anyhow",
+ "async-compression",
+ "async-fs",
+ "async-tar",
"bytes 1.10.1",
"derive_more",
"futures 0.3.31",
@@ -7569,7 +7572,10 @@ dependencies = [
"reqwest 0.12.15 (git+https://github.com/zed-industries/reqwest.git?rev=951c770a32f1998d6e999cef3e59e0013e6c4415)",
"serde",
"serde_json",
+ "sha2",
+ "tempfile",
"url",
+ "util",
"workspace-hack",
]
@@ -8840,11 +8846,9 @@ dependencies = [
"serde_json",
"serde_json_lenient",
"settings",
- "sha2",
"shlex",
"smol",
"task",
- "tempfile",
"text",
"theme",
"toml 0.8.20",
@@ -12087,8 +12091,6 @@ dependencies = [
"aho-corasick",
"anyhow",
"askpass",
- "async-compression",
- "async-tar",
"async-trait",
"base64 0.22.1",
"buffer_diff",
@@ -16,16 +16,22 @@ path = "src/http_client.rs"
doctest = true
[dependencies]
-bytes.workspace = true
anyhow.workspace = true
+async-compression.workspace = true
+async-fs.workspace = true
+async-tar.workspace = true
+bytes.workspace = true
derive_more.workspace = true
futures.workspace = true
-http.workspace = true
http-body.workspace = true
+http.workspace = true
log.workspace = true
parking_lot.workspace = true
reqwest.workspace = true
serde.workspace = true
serde_json.workspace = true
+sha2.workspace = true
+tempfile.workspace = true
url.workspace = true
+util.workspace = true
workspace-hack.workspace = true
@@ -3,18 +3,18 @@ use std::{path::Path, pin::Pin, task::Poll};
use anyhow::{Context, Result};
use async_compression::futures::bufread::GzipDecoder;
use futures::{AsyncRead, AsyncSeek, AsyncSeekExt, AsyncWrite, io::BufReader};
-use http_client::github::AssetKind;
-use language::LspAdapterDelegate;
use sha2::{Digest, Sha256};
+use crate::{HttpClient, github::AssetKind};
+
#[derive(serde::Deserialize, serde::Serialize, Debug)]
-pub(crate) struct GithubBinaryMetadata {
- pub(crate) metadata_version: u64,
- pub(crate) digest: Option<String>,
+pub struct GithubBinaryMetadata {
+ pub metadata_version: u64,
+ pub digest: Option<String>,
}
impl GithubBinaryMetadata {
- pub(crate) async fn read_from_file(metadata_path: &Path) -> Result<GithubBinaryMetadata> {
+ pub async fn read_from_file(metadata_path: &Path) -> Result<GithubBinaryMetadata> {
let metadata_content = async_fs::read_to_string(metadata_path)
.await
.with_context(|| format!("reading metadata file at {metadata_path:?}"))?;
@@ -22,7 +22,7 @@ impl GithubBinaryMetadata {
.with_context(|| format!("parsing metadata file at {metadata_path:?}"))
}
- pub(crate) async fn write_to_file(&self, metadata_path: &Path) -> Result<()> {
+ pub async fn write_to_file(&self, metadata_path: &Path) -> Result<()> {
let metadata_content = serde_json::to_string(self)
.with_context(|| format!("serializing metadata for {metadata_path:?}"))?;
async_fs::write(metadata_path, metadata_content.as_bytes())
@@ -32,16 +32,15 @@ impl GithubBinaryMetadata {
}
}
-pub(crate) async fn download_server_binary(
- delegate: &dyn LspAdapterDelegate,
+pub async fn download_server_binary(
+ http_client: &dyn HttpClient,
url: &str,
digest: Option<&str>,
destination_path: &Path,
asset_kind: AssetKind,
) -> Result<(), anyhow::Error> {
log::info!("downloading github artifact from {url}");
- let mut response = delegate
- .http_client()
+ let mut response = http_client
.get(url, Default::default(), true)
.await
.with_context(|| format!("downloading release from {url}"))?;
@@ -143,7 +142,7 @@ async fn extract_gz(
from: impl AsyncRead + Unpin,
) -> Result<(), anyhow::Error> {
let mut decompressed_bytes = GzipDecoder::new(BufReader::new(from));
- let mut file = smol::fs::File::create(&destination_path)
+ let mut file = async_fs::File::create(&destination_path)
.await
.with_context(|| {
format!("creating a file {destination_path:?} for a download from {url}")
@@ -1,5 +1,6 @@
mod async_body;
pub mod github;
+pub mod github_download;
pub use anyhow::{Result, anyhow};
pub use async_body::{AsyncBody, Inner};
@@ -62,7 +62,6 @@ project.workspace = true
regex.workspace = true
rope.workspace = true
rust-embed.workspace = true
-sha2.workspace = true
serde.workspace = true
serde_json.workspace = true
serde_json_lenient.workspace = true
@@ -70,7 +69,6 @@ settings.workspace = true
smol.workspace = true
url.workspace = true
task.workspace = true
-tempfile.workspace = true
theme.workspace = true
toml.workspace = true
tree-sitter = { workspace = true, optional = true }
@@ -3,6 +3,7 @@ use async_trait::async_trait;
use futures::StreamExt;
use gpui::{App, AsyncApp};
use http_client::github::{AssetKind, GitHubLspBinaryVersion, latest_github_release};
+use http_client::github_download::{GithubBinaryMetadata, download_server_binary};
pub use language::*;
use lsp::{InitializeParams, LanguageServerBinary, LanguageServerName};
use project::lsp_store::clangd_ext;
@@ -11,8 +12,6 @@ use smol::fs;
use std::{env::consts, path::PathBuf, sync::Arc};
use util::{ResultExt, fs::remove_matching, maybe, merge_json_value_into};
-use crate::github_download::{GithubBinaryMetadata, download_server_binary};
-
pub struct CLspAdapter;
impl CLspAdapter {
@@ -119,7 +118,7 @@ impl LspInstaller for CLspAdapter {
}
}
download_server_binary(
- delegate,
+ &*delegate.http_client(),
&url,
expected_digest.as_deref(),
&container_dir,
@@ -20,7 +20,6 @@ use crate::{
mod bash;
mod c;
mod css;
-mod github_download;
mod go;
mod json;
mod package_json;
@@ -26,6 +26,7 @@ use std::env::consts;
use util::fs::{make_file_executable, remove_matching};
use util::rel_path::RelPath;
+use http_client::github_download::{GithubBinaryMetadata, download_server_binary};
use parking_lot::Mutex;
use std::str::FromStr;
use std::{
@@ -37,8 +38,6 @@ use std::{
use task::{ShellKind, TaskTemplate, TaskTemplates, VariableName};
use util::{ResultExt, maybe};
-use crate::github_download::{GithubBinaryMetadata, download_server_binary};
-
pub(crate) struct PyprojectTomlManifestProvider;
impl ManifestProvider for PyprojectTomlManifestProvider {
@@ -272,7 +271,7 @@ impl LspInstaller for TyLspAdapter {
}
download_server_binary(
- delegate,
+ &*delegate.http_client(),
&url,
expected_digest.as_deref(),
&destination_path,
@@ -2116,7 +2115,7 @@ impl LspInstaller for RuffLspAdapter {
}
download_server_binary(
- delegate,
+ &*delegate.http_client(),
&url,
expected_digest.as_deref(),
&destination_path,
@@ -5,6 +5,7 @@ use futures::StreamExt;
use gpui::{App, AppContext, AsyncApp, SharedString, Task};
use http_client::github::AssetKind;
use http_client::github::{GitHubLspBinaryVersion, latest_github_release};
+use http_client::github_download::{GithubBinaryMetadata, download_server_binary};
pub use language::*;
use lsp::{InitializeParams, LanguageServerBinary};
use project::lsp_store::rust_analyzer_ext::CARGO_DIAGNOSTICS_SOURCE_NAME;
@@ -26,7 +27,6 @@ use util::merge_json_value_into;
use util::rel_path::RelPath;
use util::{ResultExt, maybe};
-use crate::github_download::{GithubBinaryMetadata, download_server_binary};
use crate::language_settings::language_settings;
pub struct RustLspAdapter;
@@ -484,7 +484,7 @@ impl LspInstaller for RustLspAdapter {
}
download_server_binary(
- delegate,
+ &*delegate.http_client(),
&url,
expected_digest.as_deref(),
&destination_path,
@@ -5,6 +5,7 @@ use collections::HashMap;
use futures::future::join_all;
use gpui::{App, AppContext, AsyncApp, Task};
use http_client::github::{AssetKind, GitHubLspBinaryVersion, build_asset_url};
+use http_client::github_download::download_server_binary;
use itertools::Itertools as _;
use language::{
ContextLocation, ContextProvider, File, LanguageName, LanguageToolchainStore, LspAdapter,
@@ -25,7 +26,7 @@ use task::{TaskTemplate, TaskTemplates, VariableName};
use util::{ResultExt, fs::remove_matching, maybe};
use util::{merge_json_value_into, rel_path::RelPath};
-use crate::{PackageJson, PackageJsonData, github_download::download_server_binary};
+use crate::{PackageJson, PackageJsonData};
pub(crate) struct TypeScriptContextProvider {
fs: Arc<dyn Fs>,
@@ -853,7 +854,7 @@ impl LspInstaller for EsLintLspAdapter {
remove_matching(&container_dir, |_| true).await;
download_server_binary(
- delegate,
+ &*delegate.http_client(),
&version.url,
None,
&destination_path,
@@ -30,8 +30,6 @@ test-support = [
aho-corasick.workspace = true
anyhow.workspace = true
askpass.workspace = true
-async-compression.workspace = true
-async-tar.workspace = true
async-trait.workspace = true
base64.workspace = true
buffer_diff.workspace = true
@@ -16,6 +16,7 @@ use futures::StreamExt as _;
use gpui::{
App, AppContext as _, AsyncApp, Context, Entity, EventEmitter, SharedString, Subscription, Task,
};
+use http_client::github::AssetKind;
use node_runtime::NodeRuntime;
use remote::RemoteClient;
use rpc::{AnyProtoClient, TypedEnvelope, proto};
@@ -1022,7 +1023,7 @@ impl ExternalAgentServer for LocalCodex {
// Find or install the latest Codex release (no update checks for now).
let http = cx.update(|cx| Client::global(cx).http_client())?;
let release = ::http_client::github::latest_github_release(
- "zed-industries/codex-acp",
+ CODEX_ACP_REPO,
true,
false,
http.clone(),
@@ -1032,42 +1033,27 @@ impl ExternalAgentServer for LocalCodex {
let version_dir = dir.join(&release.tag_name);
if !fs.is_dir(&version_dir).await {
- // Assemble release download URL from prefix, tag, and filename based on target triple.
- // If unsupported, silently skip download.
- let tag = release.tag_name.clone(); // e.g. "v0.1.0"
+ let tag = release.tag_name.clone();
let version_number = tag.trim_start_matches('v');
- if let Some(asset_url) = codex_release_url(version_number) {
- let http = http.clone();
- let mut response = http
- .get(&asset_url, Default::default(), true)
- .await
- .with_context(|| {
- format!("downloading Codex binary from {}", asset_url)
- })?;
- anyhow::ensure!(
- response.status().is_success(),
- "failed to download Codex release: {}",
- response.status()
- );
-
- // Extract archive into the version directory.
- if asset_url.ends_with(".zip") {
- let reader = futures::io::BufReader::new(response.body_mut());
- util::archive::extract_zip(&version_dir, reader)
- .await
- .context("extracting Codex binary from zip")?;
+ let asset_name = asset_name(version_number)
+ .context("codex acp is not supported for this architecture")?;
+ let asset = release
+ .assets
+ .into_iter()
+ .find(|asset| asset.name == asset_name)
+ .with_context(|| format!("no asset found matching `{asset_name:?}`"))?;
+ ::http_client::github_download::download_server_binary(
+ &*http,
+ &asset.browser_download_url,
+ asset.digest.as_deref(),
+ &version_dir,
+ if cfg!(target_os = "windows") && cfg!(target_arch = "x86_64") {
+ AssetKind::Zip
} else {
- // Decompress and extract the tar.gz into the version directory.
- let reader = futures::io::BufReader::new(response.body_mut());
- let decoder =
- async_compression::futures::bufread::GzipDecoder::new(reader);
- let archive = async_tar::Archive::new(decoder);
- archive
- .unpack(&version_dir)
- .await
- .context("extracting Codex binary from tar.gz")?;
- }
- }
+ AssetKind::TarGz
+ },
+ )
+ .await?;
}
let bin_name = if cfg!(windows) {
@@ -1101,11 +1087,13 @@ impl ExternalAgentServer for LocalCodex {
}
}
+pub const CODEX_ACP_REPO: &str = "zed-industries/codex-acp";
+
/// Assemble Codex release URL for the current OS/arch and the given version number.
/// Returns None if the current target is unsupported.
/// Example output:
/// https://github.com/zed-industries/codex-acp/releases/download/v{version}/codex-acp-{version}-{arch}-{platform}.{ext}
-fn codex_release_url(version: &str) -> Option<String> {
+fn asset_name(version: &str) -> Option<String> {
let arch = if cfg!(target_arch = "x86_64") {
"x86_64"
} else if cfg!(target_arch = "aarch64") {
@@ -1131,11 +1119,7 @@ fn codex_release_url(version: &str) -> Option<String> {
"tar.gz"
};
- let prefix = "https://github.com/zed-industries/codex-acp/releases/download";
-
- Some(format!(
- "{prefix}/v{version}/codex-acp-{version}-{arch}-{platform}.{ext}"
- ))
+ Some(format!("codex-acp-{version}-{arch}-{platform}.{ext}"))
}
struct LocalCustomAgent {
@@ -1191,18 +1175,18 @@ mod tests {
// Additionally, it verifies that our logic for assembling URLs
// correctly resolves to a known-good URL on each of our targets.
let allowed = [
- "https://github.com/zed-industries/codex-acp/releases/download/v0.1.0/codex-acp-0.1.0-aarch64-apple-darwin.tar.gz",
- "https://github.com/zed-industries/codex-acp/releases/download/v0.1.0/codex-acp-0.1.0-aarch64-pc-windows-msvc.tar.gz",
- "https://github.com/zed-industries/codex-acp/releases/download/v0.1.0/codex-acp-0.1.0-aarch64-unknown-linux-gnu.tar.gz",
- "https://github.com/zed-industries/codex-acp/releases/download/v0.1.0/codex-acp-0.1.0-x86_64-apple-darwin.tar.gz",
- "https://github.com/zed-industries/codex-acp/releases/download/v0.1.0/codex-acp-0.1.0-x86_64-pc-windows-msvc.zip",
- "https://github.com/zed-industries/codex-acp/releases/download/v0.1.0/codex-acp-0.1.0-x86_64-unknown-linux-gnu.tar.gz",
+ "codex-acp-0.1.0-aarch64-apple-darwin.tar.gz",
+ "codex-acp-0.1.0-aarch64-pc-windows-msvc.tar.gz",
+ "codex-acp-0.1.0-aarch64-unknown-linux-gnu.tar.gz",
+ "codex-acp-0.1.0-x86_64-apple-darwin.tar.gz",
+ "codex-acp-0.1.0-x86_64-pc-windows-msvc.zip",
+ "codex-acp-0.1.0-x86_64-unknown-linux-gnu.tar.gz",
];
- if let Some(url) = super::codex_release_url(version_number) {
+ if let Some(url) = super::asset_name(version_number) {
assert!(
allowed.contains(&url.as_str()),
- "Assembled URL {} not in allowed list",
+ "Assembled asset name {} not in allowed list",
url
);
} else {