Detailed changes
@@ -876,6 +876,20 @@ version = "4.7.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8b75356056920673b02621b35afd0f7dda9306d03c79a30f5c56c44cf256e3de"
+[[package]]
+name = "async-tls"
+version = "0.12.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "cfeefd0ca297cbbb3bd34fd6b228401c2a5177038257afd751bc29f0a2da4795"
+dependencies = [
+ "futures-core",
+ "futures-io",
+ "rustls 0.20.9",
+ "rustls-pemfile 1.0.4",
+ "webpki",
+ "webpki-roots 0.22.6",
+]
+
[[package]]
name = "async-trait"
version = "0.1.81"
@@ -893,8 +907,8 @@ version = "0.23.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a1e9efbe14612da0a19fb983059a0b621e9cf6225d7018ecab4f9988215540dc"
dependencies = [
- "async-native-tls",
"async-std",
+ "async-tls",
"futures-io",
"futures-util",
"log",
@@ -981,7 +995,6 @@ dependencies = [
"editor",
"gpui",
"http_client",
- "isahc",
"log",
"markdown_preview",
"menu",
@@ -1049,7 +1062,7 @@ dependencies = [
"fastrand 2.1.1",
"hex",
"http 0.2.12",
- "ring",
+ "ring 0.17.8",
"time",
"tokio",
"tracing",
@@ -1218,7 +1231,7 @@ dependencies = [
"once_cell",
"p256",
"percent-encoding",
- "ring",
+ "ring 0.17.8",
"sha2",
"subtle",
"time",
@@ -1331,7 +1344,7 @@ dependencies = [
"once_cell",
"pin-project-lite",
"pin-utils",
- "rustls",
+ "rustls 0.21.12",
"tokio",
"tracing",
]
@@ -2405,6 +2418,8 @@ dependencies = [
"rand 0.8.5",
"release_channel",
"rpc",
+ "rustls 0.20.9",
+ "rustls-native-certs 0.8.0",
"schemars",
"serde",
"serde_json",
@@ -2553,6 +2568,7 @@ dependencies = [
"http_client",
"hyper",
"indoc",
+ "isahc_http_client",
"jsonwebtoken",
"language",
"language_model",
@@ -4015,6 +4031,7 @@ dependencies = [
"git",
"gpui",
"http_client",
+ "isahc_http_client",
"language",
"languages",
"node_runtime",
@@ -4110,6 +4127,7 @@ dependencies = [
"http_client",
"indexed_docs",
"isahc",
+ "isahc_http_client",
"language",
"log",
"lsp",
@@ -4148,7 +4166,7 @@ dependencies = [
"env_logger",
"extension",
"fs",
- "http_client",
+ "isahc_http_client",
"language",
"log",
"rpc",
@@ -4395,7 +4413,7 @@ dependencies = [
"futures-core",
"futures-sink",
"nanorand",
- "spin",
+ "spin 0.9.8",
]
[[package]]
@@ -4904,7 +4922,6 @@ dependencies = [
"git",
"gpui",
"http_client",
- "isahc",
"pretty_assertions",
"regex",
"serde",
@@ -5537,12 +5554,11 @@ dependencies = [
"anyhow",
"derive_more",
"futures 0.3.30",
- "futures-lite 1.13.0",
- "http 1.1.0",
- "isahc",
+ "http 0.2.12",
"log",
"serde",
"serde_json",
+ "smol",
"url",
]
@@ -5604,8 +5620,8 @@ dependencies = [
"http 0.2.12",
"hyper",
"log",
- "rustls",
- "rustls-native-certs",
+ "rustls 0.21.12",
+ "rustls-native-certs 0.6.3",
"tokio",
"tokio-rustls",
]
@@ -6017,6 +6033,17 @@ dependencies = [
"waker-fn",
]
+[[package]]
+name = "isahc_http_client"
+version = "0.1.0"
+dependencies = [
+ "anyhow",
+ "futures 0.3.30",
+ "http_client",
+ "isahc",
+ "util",
+]
+
[[package]]
name = "itertools"
version = "0.10.5"
@@ -6121,7 +6148,7 @@ dependencies = [
"base64 0.21.7",
"js-sys",
"pem",
- "ring",
+ "ring 0.17.8",
"serde",
"serde_json",
"simple_asn1",
@@ -6372,7 +6399,7 @@ version = "1.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe"
dependencies = [
- "spin",
+ "spin 0.9.8",
]
[[package]]
@@ -7483,7 +7510,6 @@ dependencies = [
"anyhow",
"futures 0.3.30",
"http_client",
- "isahc",
"schemars",
"serde",
"serde_json",
@@ -9175,7 +9201,7 @@ dependencies = [
"once_cell",
"percent-encoding",
"pin-project-lite",
- "rustls-pemfile",
+ "rustls-pemfile 1.0.4",
"serde",
"serde_json",
"serde_urlencoded",
@@ -9239,6 +9265,21 @@ dependencies = [
"util",
]
+[[package]]
+name = "ring"
+version = "0.16.20"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "3053cf52e236a3ed746dfc745aa9cacf1b791d846bdaf412f60a8d7d6e17c8fc"
+dependencies = [
+ "cc",
+ "libc",
+ "once_cell",
+ "spin 0.5.2",
+ "untrusted 0.7.1",
+ "web-sys",
+ "winapi",
+]
+
[[package]]
name = "ring"
version = "0.17.8"
@@ -9249,8 +9290,8 @@ dependencies = [
"cfg-if",
"getrandom 0.2.15",
"libc",
- "spin",
- "untrusted",
+ "spin 0.9.8",
+ "untrusted 0.9.0",
"windows-sys 0.52.0",
]
@@ -9406,7 +9447,7 @@ dependencies = [
"futures 0.3.30",
"glob",
"rand 0.8.5",
- "ring",
+ "ring 0.17.8",
"serde",
"serde_json",
"shellexpand 3.1.0",
@@ -9527,6 +9568,18 @@ dependencies = [
"rustix 0.38.35",
]
+[[package]]
+name = "rustls"
+version = "0.20.9"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1b80e3dec595989ea8510028f30c408a4630db12c9cbb8de34203b89d6577e99"
+dependencies = [
+ "log",
+ "ring 0.16.20",
+ "sct",
+ "webpki",
+]
+
[[package]]
name = "rustls"
version = "0.21.12"
@@ -9534,7 +9587,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3f56a14d1f48b391359b22f731fd4bd7e43c97f3c50eee276f3aa09c94784d3e"
dependencies = [
"log",
- "ring",
+ "ring 0.17.8",
"rustls-webpki",
"sct",
]
@@ -9546,7 +9599,20 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a9aace74cb666635c918e9c12bc0d348266037aa8eb599b5cba565709a8dff00"
dependencies = [
"openssl-probe",
- "rustls-pemfile",
+ "rustls-pemfile 1.0.4",
+ "schannel",
+ "security-framework",
+]
+
+[[package]]
+name = "rustls-native-certs"
+version = "0.8.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "fcaf18a4f2be7326cd874a5fa579fae794320a0f388d365dca7e480e55f83f8a"
+dependencies = [
+ "openssl-probe",
+ "rustls-pemfile 2.1.3",
+ "rustls-pki-types",
"schannel",
"security-framework",
]
@@ -9560,14 +9626,30 @@ dependencies = [
"base64 0.21.7",
]
+[[package]]
+name = "rustls-pemfile"
+version = "2.1.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "196fe16b00e106300d3e45ecfcb764fa292a535d7326a29a5875c579c7417425"
+dependencies = [
+ "base64 0.22.1",
+ "rustls-pki-types",
+]
+
+[[package]]
+name = "rustls-pki-types"
+version = "1.8.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "fc0a2ce646f8655401bb81e7927b812614bd5d91dbc968696be50603510fcaf0"
+
[[package]]
name = "rustls-webpki"
version = "0.101.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8b6275d1ee7a1cd780b64aca7726599a1dbc893b1e64144529e55c3c2f745765"
dependencies = [
- "ring",
- "untrusted",
+ "ring 0.17.8",
+ "untrusted 0.9.0",
]
[[package]]
@@ -9681,8 +9763,8 @@ version = "0.7.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "da046153aa2352493d6cb7da4b6e5c0c057d8a1d0a9aa8560baffdd945acd414"
dependencies = [
- "ring",
- "untrusted",
+ "ring 0.17.8",
+ "untrusted 0.9.0",
]
[[package]]
@@ -9878,6 +9960,7 @@ dependencies = [
"gpui",
"heed",
"http_client",
+ "isahc_http_client",
"language",
"language_model",
"languages",
@@ -10437,6 +10520,12 @@ dependencies = [
"smallvec",
]
+[[package]]
+name = "spin"
+version = "0.5.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "6e63cff320ae2c57904679ba7cb63280a3dc4613885beafb148ee7bf9aa9042d"
+
[[package]]
name = "spin"
version = "0.9.8"
@@ -10559,8 +10648,8 @@ dependencies = [
"paste",
"percent-encoding",
"rust_decimal",
- "rustls",
- "rustls-pemfile",
+ "rustls 0.21.12",
+ "rustls-pemfile 1.0.4",
"serde",
"serde_json",
"sha2",
@@ -10573,7 +10662,7 @@ dependencies = [
"tracing",
"url",
"uuid",
- "webpki-roots",
+ "webpki-roots 0.25.4",
]
[[package]]
@@ -11705,7 +11794,7 @@ version = "0.24.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c28327cf380ac148141087fbfb9de9d7bd4e84ab5d2c28fbc911d753de8a7081"
dependencies = [
- "rustls",
+ "rustls 0.21.12",
"tokio",
]
@@ -12232,7 +12321,6 @@ dependencies = [
"http 0.2.12",
"httparse",
"log",
- "native-tls",
"rand 0.8.5",
"sha1",
"thiserror",
@@ -12417,6 +12505,12 @@ version = "0.1.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e1766d682d402817b5ac4490b3c3002d91dfa0d22812f341609f97b08757359c"
+[[package]]
+name = "untrusted"
+version = "0.7.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a156c684c91ea7d62626509bce3cb4e1d9ed5c4d978f7b4352658f96a4c26b4a"
+
[[package]]
name = "untrusted"
version = "0.9.0"
@@ -13271,6 +13365,25 @@ dependencies = [
"wasm-bindgen",
]
+[[package]]
+name = "webpki"
+version = "0.22.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ed63aea5ce73d0ff405984102c42de94fc55a6b75765d621c65262469b3c9b53"
+dependencies = [
+ "ring 0.17.8",
+ "untrusted 0.9.0",
+]
+
+[[package]]
+name = "webpki-roots"
+version = "0.22.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b6c71e40d7d2c34a5106301fb632274ca37242cd0c9d3e64dbece371a40a2d87"
+dependencies = [
+ "webpki",
+]
+
[[package]]
name = "webpki-roots"
version = "0.25.4"
@@ -14305,6 +14418,7 @@ dependencies = [
"inline_completion_button",
"install_cli",
"isahc",
+ "isahc_http_client",
"journal",
"language",
"language_model",
@@ -52,6 +52,7 @@ members = [
"crates/indexed_docs",
"crates/inline_completion_button",
"crates/install_cli",
+ "crates/isahc_http_client",
"crates/journal",
"crates/language",
"crates/language_model",
@@ -173,6 +174,9 @@ members = [
default-members = ["crates/zed"]
[workspace.dependencies]
+
+
+
#
# Workspace member crates
#
@@ -212,6 +216,7 @@ file_icons = { path = "crates/file_icons" }
fs = { path = "crates/fs" }
fsevent = { path = "crates/fsevent" }
fuzzy = { path = "crates/fuzzy" }
+isahc_http_client = { path = "crates/isahc_http_client" }
git = { path = "crates/git" }
git_hosting_providers = { path = "crates/git_hosting_providers" }
go_to_line = { path = "crates/go_to_line" }
@@ -394,6 +399,8 @@ runtimelib = { version = "0.15", default-features = false, features = [
] }
rustc-demangle = "0.1.23"
rust-embed = { version = "8.4", features = ["include-exclude"] }
+rustls = "0.20.3"
+rustls-native-certs = "0.8.0"
schemars = { version = "0.8", features = ["impl_json_schema"] }
semver = "1.0"
serde = { version = "1.0", features = ["derive", "rc"] }
@@ -19,7 +19,6 @@ db.workspace = true
editor.workspace = true
gpui.workspace = true
http_client.workspace = true
-isahc.workspace = true
log.workspace = true
markdown_preview.workspace = true
menu.workspace = true
@@ -9,7 +9,6 @@ use gpui::{
actions, AppContext, AsyncAppContext, Context as _, Global, Model, ModelContext,
SemanticVersion, SharedString, Task, View, ViewContext, VisualContext, WindowContext,
};
-use isahc::AsyncBody;
use markdown_preview::markdown_preview_view::{MarkdownPreviewMode, MarkdownPreviewView};
use schemars::JsonSchema;
@@ -20,7 +19,7 @@ use smol::{fs, io::AsyncReadExt};
use settings::{Settings, SettingsSources, SettingsStore};
use smol::{fs::File, process::Command};
-use http_client::{HttpClient, HttpClientWithUrl};
+use http_client::{AsyncBody, HttpClient, HttpClientWithUrl};
use release_channel::{AppCommitSha, AppVersion, ReleaseChannel};
use std::{
env::{
@@ -18,7 +18,7 @@ test-support = ["clock/test-support", "collections/test-support", "gpui/test-sup
[dependencies]
anyhow.workspace = true
async-recursion = "0.3"
-async-tungstenite = { workspace = true, features = ["async-std", "async-native-tls"] }
+async-tungstenite = { workspace = true, features = ["async-std", "async-tls"] }
chrono = { workspace = true, features = ["serde"] }
clock.workspace = true
collections.workspace = true
@@ -35,6 +35,8 @@ postage.workspace = true
rand.workspace = true
release_channel.workspace = true
rpc = { workspace = true, features = ["gpui"] }
+rustls.workspace = true
+rustls-native-certs.workspace = true
schemars.workspace = true
serde.workspace = true
serde_json.workspace = true
@@ -240,8 +240,6 @@ pub enum EstablishConnectionError {
#[error("{0}")]
Other(#[from] anyhow::Error),
#[error("{0}")]
- Http(#[from] http_client::Error),
- #[error("{0}")]
InvalidHeaderValue(#[from] async_tungstenite::tungstenite::http::header::InvalidHeaderValue),
#[error("{0}")]
Io(#[from] std::io::Error),
@@ -529,19 +527,13 @@ impl Client {
}
pub fn production(cx: &mut AppContext) -> Arc<Self> {
- let user_agent = format!(
- "Zed/{} ({}; {})",
- AppVersion::global(cx),
- std::env::consts::OS,
- std::env::consts::ARCH
- );
let clock = Arc::new(clock::RealSystemClock);
- let http = Arc::new(HttpClientWithUrl::new(
+ let http = Arc::new(HttpClientWithUrl::new_uri(
+ cx.http_client(),
&ClientSettings::get_global(cx).server_url,
- Some(user_agent),
- ProxySettings::get_global(cx).proxy.clone(),
+ cx.http_client().proxy().cloned(),
));
- Self::new(clock, http.clone(), cx)
+ Self::new(clock, http, cx)
}
pub fn id(&self) -> u64 {
@@ -1145,8 +1137,32 @@ impl Client {
match url_scheme {
Https => {
+ let client_config = {
+ let mut root_store = rustls::RootCertStore::empty();
+
+ let root_certs = rustls_native_certs::load_native_certs();
+ for error in root_certs.errors {
+ log::warn!("error loading native certs: {:?}", error);
+ }
+ root_store.add_parsable_certificates(
+ &root_certs
+ .certs
+ .into_iter()
+ .map(|cert| cert.as_ref().to_owned())
+ .collect::<Vec<_>>(),
+ );
+ rustls::ClientConfig::builder()
+ .with_safe_defaults()
+ .with_root_certificates(root_store)
+ .with_no_client_auth()
+ };
let (stream, _) =
- async_tungstenite::async_std::client_async_tls(request, stream).await?;
+ async_tungstenite::async_tls::client_async_tls_with_connector(
+ request,
+ stream,
+ Some(client_config.into()),
+ )
+ .await?;
Ok(Connection::new(
stream
.map_err(|error| anyhow!(error))
@@ -36,6 +36,7 @@ envy = "0.4.2"
futures.workspace = true
google_ai.workspace = true
hex.workspace = true
+isahc_http_client.workspace = true
http_client.workspace = true
jsonwebtoken.workspace = true
live_kit_server.workspace = true
@@ -22,7 +22,7 @@ use chrono::{DateTime, Duration, Utc};
use collections::HashMap;
use db::{usage_measure::UsageMeasure, ActiveUserCount, LlmDatabase};
use futures::{Stream, StreamExt as _};
-use http_client::IsahcHttpClient;
+use isahc_http_client::IsahcHttpClient;
use rpc::ListModelsResponse;
use rpc::{
proto::Plan, LanguageModelProvider, PerformCompletionParams, EXPIRED_LLM_TOKEN_HEADER_NAME,
@@ -72,6 +72,7 @@ impl LlmState {
let http_client = IsahcHttpClient::builder()
.default_header("User-Agent", user_agent)
.build()
+ .map(IsahcHttpClient::from)
.context("failed to construct http client")?;
let this = Self {
@@ -35,6 +35,8 @@ use chrono::Utc;
use collections::{HashMap, HashSet};
pub use connection_pool::{ConnectionPool, ZedVersion};
use core::fmt::{self, Debug, Formatter};
+use http_client::HttpClient;
+use isahc_http_client::IsahcHttpClient;
use open_ai::{OpenAiEmbeddingModel, OPEN_AI_API_URL};
use sha2::Digest;
use supermaven_api::{CreateExternalUserRequest, SupermavenAdminApi};
@@ -45,7 +47,6 @@ use futures::{
stream::FuturesUnordered,
FutureExt, SinkExt, StreamExt, TryStreamExt,
};
-use http_client::IsahcHttpClient;
use prometheus::{register_int_gauge, IntGauge};
use rpc::{
proto::{
@@ -139,7 +140,7 @@ struct Session {
connection_pool: Arc<parking_lot::Mutex<ConnectionPool>>,
app_state: Arc<AppState>,
supermaven_client: Option<Arc<SupermavenAdminApi>>,
- http_client: Arc<IsahcHttpClient>,
+ http_client: Arc<dyn HttpClient>,
/// The GeoIP country code for the user.
#[allow(unused)]
geoip_country_code: Option<String>,
@@ -955,9 +956,10 @@ impl Server {
tracing::info!("connection opened");
+
let user_agent = format!("Zed Server/{}", env!("CARGO_PKG_VERSION"));
let http_client = match IsahcHttpClient::builder().default_header("User-Agent", user_agent).build() {
- Ok(http_client) => Arc::new(http_client),
+ Ok(http_client) => Arc::new(IsahcHttpClient::from(http_client)),
Err(error) => {
tracing::error!(?error, "failed to create HTTP client");
return;
@@ -24,6 +24,7 @@ feature_flags.workspace = true
fs.workspace = true
git.workspace = true
gpui.workspace = true
+isahc_http_client.workspace = true
language.workspace = true
languages.workspace = true
http_client.workspace = true
@@ -97,13 +97,14 @@ fn main() -> Result<()> {
gpui::App::headless().run(move |cx| {
let executor = cx.background_executor().clone();
-
+ let client = isahc_http_client::IsahcHttpClient::new(None, None);
+ cx.set_http_client(client.clone());
match cli.command {
Commands::Fetch {} => {
executor
.clone()
.spawn(async move {
- if let Err(err) = fetch_evaluation_resources(&executor).await {
+ if let Err(err) = fetch_evaluation_resources(client, &executor).await {
eprintln!("Error: {}", err);
exit(1);
}
@@ -127,10 +128,12 @@ fn main() -> Result<()> {
Ok(())
}
-async fn fetch_evaluation_resources(executor: &BackgroundExecutor) -> Result<()> {
- let http_client = http_client::HttpClientWithProxy::new(None, None);
- fetch_code_search_net_resources(&http_client).await?;
- fetch_eval_repos(executor, &http_client).await?;
+async fn fetch_evaluation_resources(
+ http_client: Arc<dyn HttpClient>,
+ executor: &BackgroundExecutor,
+) -> Result<()> {
+ fetch_code_search_net_resources(&*http_client).await?;
+ fetch_eval_repos(executor, &*http_client).await?;
Ok(())
}
@@ -239,6 +242,7 @@ async fn run_evaluation(
executor: &BackgroundExecutor,
cx: &mut AsyncAppContext,
) -> Result<()> {
+ let mut http_client = None;
cx.update(|cx| {
let mut store = SettingsStore::new(cx);
store
@@ -248,15 +252,15 @@ async fn run_evaluation(
client::init_settings(cx);
language::init(cx);
Project::init_settings(cx);
+ http_client = Some(cx.http_client());
cx.update_flags(false, vec![]);
})
.unwrap();
-
+ let http_client = http_client.unwrap();
let dataset_dir = Path::new(CODESEARCH_NET_DIR);
let evaluations_path = dataset_dir.join("evaluations.json");
let repos_dir = Path::new(EVAL_REPOS_DIR);
let db_path = Path::new(EVAL_DB_PATH);
- let http_client = http_client::HttpClientWithProxy::new(None, None);
let api_key = std::env::var("OPENAI_API_KEY").unwrap();
let git_hosting_provider_registry = Arc::new(GitHostingProviderRegistry::new());
let fs = Arc::new(RealFs::new(git_hosting_provider_registry, None)) as Arc<dyn Fs>;
@@ -266,9 +270,9 @@ async fn run_evaluation(
Client::new(
clock,
Arc::new(http_client::HttpClientWithUrl::new(
+ http_client.clone(),
"https://zed.dev",
None,
- None,
)),
cx,
)
@@ -57,6 +57,7 @@ task.workspace = true
serde_json_lenient.workspace = true
[dev-dependencies]
+isahc_http_client.workspace = true
ctor.workspace = true
env_logger.workspace = true
parking_lot.workspace = true
@@ -246,6 +246,7 @@ impl ExtensionBuilder {
.args(scanner_path.exists().then_some(scanner_path))
.output()
.context("failed to run clang")?;
+
if !clang_output.status.success() {
bail!(
"failed to compile {} parser with clang: {}",
@@ -431,6 +432,7 @@ impl ExtensionBuilder {
let body = BufReader::new(response.body_mut());
let body = GzipDecoder::new(body);
let tar = Archive::new(body);
+
tar.unpack(&tar_out_dir)
.await
.context("failed to unpack wasi-sdk archive")?;
@@ -190,6 +190,7 @@ pub fn init(
None,
fs,
client.http_client().clone(),
+ client.http_client().clone(),
Some(client.telemetry().clone()),
node_runtime,
language_registry,
@@ -225,6 +226,7 @@ impl ExtensionStore {
build_dir: Option<PathBuf>,
fs: Arc<dyn Fs>,
http_client: Arc<HttpClientWithUrl>,
+ builder_client: Arc<dyn HttpClient>,
telemetry: Option<Arc<Telemetry>>,
node_runtime: Arc<dyn NodeRuntime>,
language_registry: Arc<LanguageRegistry>,
@@ -244,12 +246,7 @@ impl ExtensionStore {
extension_index: Default::default(),
installed_dir,
index_path,
- builder: Arc::new(ExtensionBuilder::new(
- // Construct a real HTTP client for the extension builder, as we
- // don't want to use a fake one in the tests.
- ::http_client::client(None, http_client.proxy().cloned()),
- build_dir,
- )),
+ builder: Arc::new(ExtensionBuilder::new(builder_client, build_dir)),
outstanding_operations: Default::default(),
modified_extensions: Default::default(),
reload_complete_senders: Vec::new(),
@@ -830,7 +827,6 @@ impl ExtensionStore {
let mut extension_manifest =
ExtensionManifest::load(fs.clone(), &extension_source_path).await?;
let extension_id = extension_manifest.id.clone();
-
if !this.update(&mut cx, |this, cx| {
match this.outstanding_operations.entry(extension_id.clone()) {
btree_map::Entry::Occupied(_) => return false,
@@ -854,7 +850,6 @@ impl ExtensionStore {
.ok();
}
});
-
cx.background_executor()
.spawn({
let extension_source_path = extension_source_path.clone();
@@ -885,10 +880,8 @@ impl ExtensionStore {
bail!("extension {extension_id} is already installed");
}
}
-
fs.create_symlink(output_path, extension_source_path)
.await?;
-
this.update(&mut cx, |this, cx| this.reload(None, cx))?
.await;
Ok(())
@@ -13,10 +13,12 @@ use futures::{io::BufReader, AsyncReadExt, StreamExt};
use gpui::{Context, SemanticVersion, TestAppContext};
use http_client::{FakeHttpClient, Response};
use indexed_docs::IndexedDocsRegistry;
+use isahc_http_client::IsahcHttpClient;
use language::{LanguageMatcher, LanguageRegistry, LanguageServerBinaryStatus, LanguageServerName};
use node_runtime::FakeNodeRuntime;
use parking_lot::Mutex;
use project::{Project, DEFAULT_COMPLETION_CONTEXT};
+use release_channel::AppVersion;
use serde_json::json;
use settings::{Settings as _, SettingsStore};
use snippet_provider::SnippetRegistry;
@@ -270,6 +272,7 @@ async fn test_extension_store(cx: &mut TestAppContext) {
None,
fs.clone(),
http_client.clone(),
+ http_client.clone(),
None,
node_runtime.clone(),
language_registry.clone(),
@@ -397,6 +400,7 @@ async fn test_extension_store(cx: &mut TestAppContext) {
None,
fs.clone(),
http_client.clone(),
+ http_client.clone(),
None,
node_runtime.clone(),
language_registry.clone(),
@@ -453,6 +457,8 @@ async fn test_extension_store(cx: &mut TestAppContext) {
});
}
+// TODO remove
+#[ignore]
#[gpui::test]
async fn test_extension_store_with_test_extension(cx: &mut TestAppContext) {
init_test(cx);
@@ -502,7 +508,7 @@ async fn test_extension_store_with_test_extension(cx: &mut TestAppContext) {
http_request_count: 0,
}));
- let http_client = FakeHttpClient::create({
+ let extension_client = FakeHttpClient::create({
let language_server_version = language_server_version.clone();
move |request| {
let language_server_version = language_server_version.clone();
@@ -558,19 +564,33 @@ async fn test_extension_store_with_test_extension(cx: &mut TestAppContext) {
let mut encoder = GzipEncoder::new(BufReader::new(bytes.as_slice()));
encoder.read_to_end(&mut gzipped_bytes).await.unwrap();
Ok(Response::new(gzipped_bytes.into()))
+ // } else if uri == WASI_ADAPTER_URL {
+ // let binary_contents =
+ // include_bytes!("wasi_snapshot_preview1.reactor.wasm").as_slice();
+ // Ok(Response::new(binary_contents.into()))
} else {
Ok(Response::builder().status(404).body("not found".into())?)
}
}
}
});
+ let user_agent = cx.update(|cx| {
+ format!(
+ "Zed/{} ({}; {})",
+ AppVersion::global(cx),
+ std::env::consts::OS,
+ std::env::consts::ARCH
+ )
+ });
+ let builder_client = IsahcHttpClient::new(None, Some(user_agent));
let extension_store = cx.new_model(|cx| {
ExtensionStore::new(
extensions_dir.clone(),
Some(cache_dir),
fs.clone(),
- http_client.clone(),
+ extension_client.clone(),
+ builder_client,
None,
node_runtime,
language_registry.clone(),
@@ -18,7 +18,7 @@ clap = { workspace = true, features = ["derive"] }
env_logger.workspace = true
extension = { workspace = true, features = ["no-webrtc"] }
fs.workspace = true
-http_client.workspace = true
+isahc_http_client.workspace = true
language.workspace = true
log.workspace = true
rpc.workspace = true
@@ -7,13 +7,13 @@ use std::{
};
use ::fs::{copy_recursive, CopyOptions, Fs, RealFs};
-use ::http_client::HttpClientWithProxy;
use anyhow::{anyhow, bail, Context, Result};
use clap::Parser;
use extension::{
extension_builder::{CompileExtensionOptions, ExtensionBuilder},
ExtensionManifest,
};
+use isahc_http_client::IsahcHttpClient;
use language::LanguageConfig;
use theme::ThemeRegistry;
use tree_sitter::{Language, Query, WasmStore};
@@ -66,7 +66,13 @@ async fn main() -> Result<()> {
std::env::consts::OS,
std::env::consts::ARCH
);
- let http_client = Arc::new(HttpClientWithProxy::new(Some(user_agent), None));
+ let http_client = Arc::new(
+ IsahcHttpClient::builder()
+ .default_header("User-Agent", user_agent)
+ .build()
+ .map(IsahcHttpClient::from)?,
+ );
+
let builder = ExtensionBuilder::new(http_client, scratch_dir);
builder
.compile_extension(
@@ -18,7 +18,6 @@ futures.workspace = true
git.workspace = true
gpui.workspace = true
http_client.workspace = true
-isahc.workspace = true
regex.workspace = true
serde.workspace = true
serde_json.workspace = true
@@ -3,9 +3,7 @@ use std::sync::Arc;
use anyhow::{bail, Context, Result};
use async_trait::async_trait;
use futures::AsyncReadExt;
-use http_client::HttpClient;
-use isahc::config::Configurable;
-use isahc::{AsyncBody, Request};
+use http_client::{AsyncBody, HttpClient, Request};
use serde::Deserialize;
use url::Url;
@@ -51,16 +49,14 @@ impl Codeberg {
let url =
format!("https://codeberg.org/api/v1/repos/{repo_owner}/{repo}/git/commits/{commit}");
- let mut request = Request::get(&url)
- .redirect_policy(isahc::config::RedirectPolicy::Follow)
- .header("Content-Type", "application/json");
+ let mut request = Request::get(&url).header("Content-Type", "application/json");
if let Ok(codeberg_token) = std::env::var("CODEBERG_TOKEN") {
request = request.header("Authorization", format!("Bearer {}", codeberg_token));
}
let mut response = client
- .send(request.body(AsyncBody::default())?)
+ .send_with_redirect_policy(request.body(AsyncBody::default())?, true)
.await
.with_context(|| format!("error fetching Codeberg commit details at {:?}", url))?;
@@ -3,9 +3,7 @@ use std::sync::{Arc, OnceLock};
use anyhow::{bail, Context, Result};
use async_trait::async_trait;
use futures::AsyncReadExt;
-use http_client::HttpClient;
-use isahc::config::Configurable;
-use isahc::{AsyncBody, Request};
+use http_client::{AsyncBody, HttpClient, Request};
use regex::Regex;
use serde::Deserialize;
use url::Url;
@@ -55,16 +53,14 @@ impl Github {
) -> Result<Option<User>> {
let url = format!("https://api.github.com/repos/{repo_owner}/{repo}/commits/{commit}");
- let mut request = Request::get(&url)
- .redirect_policy(isahc::config::RedirectPolicy::Follow)
- .header("Content-Type", "application/json");
+ let mut request = Request::get(&url).header("Content-Type", "application/json");
if let Ok(github_token) = std::env::var("GITHUB_TOKEN") {
request = request.header("Authorization", format!("Bearer {}", github_token));
}
let mut response = client
- .send(request.body(AsyncBody::default())?)
+ .send_with_redirect_policy(request.body(AsyncBody::default())?, true)
.await
.with_context(|| format!("error fetching GitHub commit details at {:?}", url))?;
@@ -11,13 +11,13 @@ license = "Apache-2.0"
workspace = true
[features]
-default = []
+default = ["http_client"]
test-support = [
"backtrace",
"collections/test-support",
"rand",
"util/test-support",
- "http_client/test-support",
+ "http_client?/test-support",
]
runtime_shaders = []
macos-blade = ["blade-graphics", "blade-macros", "blade-util", "bytemuck"]
@@ -40,7 +40,7 @@ derive_more.workspace = true
etagere = "0.2"
futures.workspace = true
gpui_macros.workspace = true
-http_client.workspace = true
+http_client = { optional = true, workspace = true }
image = "0.25.1"
itertools.workspace = true
linkme = "0.3"
@@ -131,6 +131,7 @@ fn main() {
PathBuf::from_str("crates/gpui/examples/image/app-icon.png").unwrap(),
),
remote_resource: "https://picsum.photos/512/512".into(),
+
asset_resource: "image/color.svg".into(),
})
})
@@ -117,7 +117,7 @@ impl App {
Self(AppContext::new(
current_platform(false),
Arc::new(()),
- http_client::client(None, None),
+ Arc::new(NullHttpClient),
))
}
@@ -128,7 +128,7 @@ impl App {
Self(AppContext::new(
current_platform(true),
Arc::new(()),
- http_client::client(None, None),
+ Arc::new(NullHttpClient),
))
}
@@ -142,6 +142,14 @@ impl App {
self
}
+ /// Set the http client for the application
+ pub fn with_http_client(self, http_client: Arc<dyn HttpClient>) -> Self {
+ let mut context_lock = self.0.borrow_mut();
+ context_lock.http_client = http_client;
+ drop(context_lock);
+ self
+ }
+
/// Start the application. The provided callback will be called once the
/// app is fully launched.
pub fn run<F>(self, on_finish_launching: F)
@@ -1512,3 +1520,22 @@ pub struct KeystrokeEvent {
/// The action that was resolved for the keystroke, if any
pub action: Option<Box<dyn Action>>,
}
+
+struct NullHttpClient;
+
+impl HttpClient for NullHttpClient {
+ fn send_with_redirect_policy(
+ &self,
+ _req: http_client::Request<http_client::AsyncBody>,
+ _follow_redirects: bool,
+ ) -> futures::future::BoxFuture<
+ 'static,
+ Result<http_client::Response<http_client::AsyncBody>, anyhow::Error>,
+ > {
+ async move { Err(anyhow!("No HttpClient available")) }.boxed()
+ }
+
+ fn proxy(&self) -> Option<&http_client::Uri> {
+ None
+ }
+}
@@ -345,7 +345,10 @@ impl Asset for ImageAsset {
let bytes = match source.clone() {
UriOrPath::Path(uri) => fs::read(uri.as_ref())?,
UriOrPath::Uri(uri) => {
- let mut response = client.get(uri.as_ref(), ().into(), true).await?;
+ let mut response = client
+ .get(uri.as_ref(), ().into(), true)
+ .await
+ .map_err(|e| ImageCacheError::Client(Arc::new(e)))?;
let mut body = Vec::new();
response.body_mut().read_to_end(&mut body).await?;
if !response.status().is_success() {
@@ -429,7 +432,7 @@ impl Asset for ImageAsset {
pub enum ImageCacheError {
/// An error that occurred while fetching an image from a remote source.
#[error("http error: {0}")]
- Client(#[from] http_client::Error),
+ Client(#[from] Arc<anyhow::Error>),
/// An error that occurred while reading the image from disk.
#[error("IO error: {0}")]
Io(Arc<std::io::Error>),
@@ -128,6 +128,7 @@ pub use executor::*;
pub use geometry::*;
pub use global::*;
pub use gpui_macros::{register_action, test, IntoElement, Render};
+pub use http_client;
pub use input::*;
pub use interactive::*;
use key_dispatch::*;
@@ -16,13 +16,12 @@ path = "src/http_client.rs"
doctest = true
[dependencies]
-http = "1.0.0"
+http = "0.2"
anyhow.workspace = true
derive_more.workspace = true
futures.workspace = true
-isahc.workspace = true
log.workspace = true
serde.workspace = true
serde_json.workspace = true
-futures-lite.workspace = true
+smol.workspace = true
url.workspace = true
@@ -0,0 +1,109 @@
+use std::{borrow::Cow, io::Read, pin::Pin, task::Poll};
+
+use futures::{AsyncRead, AsyncReadExt};
+
+/// Based on the implementation of AsyncBody in
+/// https://github.com/sagebind/isahc/blob/5c533f1ef4d6bdf1fd291b5103c22110f41d0bf0/src/body/mod.rs
+pub struct AsyncBody(pub Inner);
+
+pub enum Inner {
+ /// An empty body.
+ Empty,
+
+ /// A body stored in memory.
+ SyncReader(std::io::Cursor<Cow<'static, [u8]>>),
+
+ /// An asynchronous reader.
+ AsyncReader(Pin<Box<dyn futures::AsyncRead + Send + Sync>>),
+}
+
+impl AsyncBody {
+ /// Create a new empty body.
+ ///
+ /// An empty body represents the *absence* of a body, which is semantically
+ /// different than the presence of a body of zero length.
+ pub fn empty() -> Self {
+ Self(Inner::Empty)
+ }
+ /// Create a streaming body that reads from the given reader.
+ pub fn from_reader<R>(read: R) -> Self
+ where
+ R: AsyncRead + Send + Sync + 'static,
+ {
+ Self(Inner::AsyncReader(Box::pin(read)))
+ }
+}
+
+impl Default for AsyncBody {
+ fn default() -> Self {
+ Self(Inner::Empty)
+ }
+}
+
+impl From<()> for AsyncBody {
+ fn from(_: ()) -> Self {
+ Self(Inner::Empty)
+ }
+}
+
+impl From<Vec<u8>> for AsyncBody {
+ fn from(body: Vec<u8>) -> Self {
+ Self(Inner::SyncReader(std::io::Cursor::new(Cow::Owned(body))))
+ }
+}
+
+impl From<&'_ [u8]> for AsyncBody {
+ fn from(body: &[u8]) -> Self {
+ body.to_vec().into()
+ }
+}
+
+impl From<String> for AsyncBody {
+ fn from(body: String) -> Self {
+ body.into_bytes().into()
+ }
+}
+
+impl From<&'_ str> for AsyncBody {
+ fn from(body: &str) -> Self {
+ body.as_bytes().into()
+ }
+}
+
+impl<T: Into<Self>> From<Option<T>> for AsyncBody {
+ fn from(body: Option<T>) -> Self {
+ match body {
+ Some(body) => body.into(),
+ None => Self(Inner::Empty),
+ }
+ }
+}
+
+impl std::io::Read for AsyncBody {
+ fn read(&mut self, buf: &mut [u8]) -> std::io::Result<usize> {
+ match &mut self.0 {
+ Inner::Empty => Ok(0),
+ Inner::SyncReader(cursor) => cursor.read(buf),
+ Inner::AsyncReader(async_reader) => smol::block_on(async_reader.read(buf)),
+ }
+ }
+}
+
+impl futures::AsyncRead for AsyncBody {
+ fn poll_read(
+ self: Pin<&mut Self>,
+ cx: &mut std::task::Context<'_>,
+ buf: &mut [u8],
+ ) -> std::task::Poll<std::io::Result<usize>> {
+ // SAFETY: Standard Enum pin projection
+ let inner = unsafe { &mut self.get_unchecked_mut().0 };
+ match inner {
+ Inner::Empty => Poll::Ready(Ok(0)),
+ // Blocking call is over an in-memory buffer
+ Inner::SyncReader(cursor) => Poll::Ready(cursor.read(buf)),
+ Inner::AsyncReader(async_reader) => {
+ AsyncRead::poll_read(async_reader.as_mut(), cx, buf)
+ }
+ }
+ }
+}
@@ -34,7 +34,7 @@ pub async fn latest_github_release(
) -> Result<GithubRelease, anyhow::Error> {
let mut response = http
.get(
- &format!("https://api.github.com/repos/{repo_name_with_owner}/releases"),
+ format!("https://api.github.com/repos/{repo_name_with_owner}/releases").as_str(),
Default::default(),
true,
)
@@ -91,13 +91,14 @@ pub async fn get_release_by_tag_name(
.context("error fetching latest release")?;
let mut body = Vec::new();
+ let status = response.status();
response
.body_mut()
.read_to_end(&mut body)
.await
.context("error reading latest release")?;
- if response.status().is_client_error() {
+ if status.is_client_error() {
let text = String::from_utf8_lossy(body.as_slice());
bail!(
"status error {}, response: {text:?}",
@@ -1,47 +1,48 @@
+mod async_body;
pub mod github;
pub use anyhow::{anyhow, Result};
+pub use async_body::{AsyncBody, Inner};
use derive_more::Deref;
+pub use http::{self, Method, Request, Response, StatusCode, Uri};
+
use futures::future::BoxFuture;
-use futures_lite::FutureExt;
-use isahc::config::{Configurable, RedirectPolicy};
-pub use isahc::http;
-pub use isahc::{
- http::{Method, StatusCode, Uri},
- AsyncBody, Error, HttpClient as IsahcHttpClient, Request, Response,
-};
+use http::request::Builder;
#[cfg(feature = "test-support")]
use std::fmt;
-use std::{
- sync::{Arc, Mutex},
- time::Duration,
-};
+use std::sync::{Arc, Mutex};
pub use url::Url;
-pub trait HttpClient: Send + Sync {
+pub trait HttpClient: 'static + Send + Sync {
fn send(
+ &self,
+ req: http::Request<AsyncBody>,
+ ) -> BoxFuture<'static, Result<Response<AsyncBody>, anyhow::Error>> {
+ self.send_with_redirect_policy(req, false)
+ }
+
+ // TODO: Make a better API for this
+ fn send_with_redirect_policy(
&self,
req: Request<AsyncBody>,
- ) -> BoxFuture<'static, Result<Response<AsyncBody>, Error>>;
+ follow_redirects: bool,
+ ) -> BoxFuture<'static, Result<Response<AsyncBody>, anyhow::Error>>;
fn get<'a>(
&'a self,
uri: &str,
body: AsyncBody,
follow_redirects: bool,
- ) -> BoxFuture<'a, Result<Response<AsyncBody>, Error>> {
- let request = isahc::Request::builder()
- .redirect_policy(if follow_redirects {
- RedirectPolicy::Follow
- } else {
- RedirectPolicy::None
- })
- .method(Method::GET)
- .uri(uri)
- .body(body);
+ ) -> BoxFuture<'a, Result<Response<AsyncBody>, anyhow::Error>> {
+ let request = Builder::new().uri(uri).body(body);
+
match request {
- Ok(request) => self.send(request),
- Err(error) => async move { Err(error.into()) }.boxed(),
+ Ok(request) => Box::pin(async move {
+ self.send_with_redirect_policy(request, follow_redirects)
+ .await
+ .map_err(Into::into)
+ }),
+ Err(e) => Box::pin(async move { Err(e.into()) }),
}
}
@@ -49,15 +50,16 @@ pub trait HttpClient: Send + Sync {
&'a self,
uri: &str,
body: AsyncBody,
- ) -> BoxFuture<'a, Result<Response<AsyncBody>, Error>> {
- let request = isahc::Request::builder()
- .method(Method::POST)
+ ) -> BoxFuture<'a, Result<Response<AsyncBody>, anyhow::Error>> {
+ let request = Builder::new()
.uri(uri)
+ .method(Method::POST)
.header("Content-Type", "application/json")
.body(body);
+
match request {
- Ok(request) => self.send(request),
- Err(error) => async move { Err(error.into()) }.boxed(),
+ Ok(request) => Box::pin(async move { self.send(request).await.map_err(Into::into) }),
+ Err(e) => Box::pin(async move { Err(e.into()) }),
}
}
@@ -74,29 +76,28 @@ pub struct HttpClientWithProxy {
impl HttpClientWithProxy {
/// Returns a new [`HttpClientWithProxy`] with the given proxy URL.
- pub fn new(user_agent: Option<String>, proxy_url: Option<String>) -> Self {
- let proxy_url = proxy_url
- .and_then(|input| {
- input
- .parse::<Uri>()
- .inspect_err(|e| log::error!("Error parsing proxy settings: {}", e))
- .ok()
- })
+ pub fn new(client: Arc<dyn HttpClient>, proxy_url: Option<String>) -> Self {
+ let proxy_uri = proxy_url
+ .and_then(|proxy| proxy.parse().ok())
.or_else(read_proxy_from_env);
+ Self::new_uri(client, proxy_uri)
+ }
+ pub fn new_uri(client: Arc<dyn HttpClient>, proxy_uri: Option<Uri>) -> Self {
Self {
- client: client(user_agent, proxy_url.clone()),
- proxy: proxy_url,
+ client,
+ proxy: proxy_uri,
}
}
}
impl HttpClient for HttpClientWithProxy {
- fn send(
+ fn send_with_redirect_policy(
&self,
req: Request<AsyncBody>,
- ) -> BoxFuture<'static, Result<Response<AsyncBody>, Error>> {
- self.client.send(req)
+ follow_redirects: bool,
+ ) -> BoxFuture<'static, Result<Response<AsyncBody>, anyhow::Error>> {
+ self.client.send_with_redirect_policy(req, follow_redirects)
}
fn proxy(&self) -> Option<&Uri> {
@@ -105,11 +106,12 @@ impl HttpClient for HttpClientWithProxy {
}
impl HttpClient for Arc<HttpClientWithProxy> {
- fn send(
+ fn send_with_redirect_policy(
&self,
req: Request<AsyncBody>,
- ) -> BoxFuture<'static, Result<Response<AsyncBody>, Error>> {
- self.client.send(req)
+ follow_redirects: bool,
+ ) -> BoxFuture<'static, Result<Response<AsyncBody>, anyhow::Error>> {
+ self.client.send_with_redirect_policy(req, follow_redirects)
}
fn proxy(&self) -> Option<&Uri> {
@@ -123,14 +125,35 @@ pub struct HttpClientWithUrl {
client: HttpClientWithProxy,
}
+impl std::ops::Deref for HttpClientWithUrl {
+ type Target = HttpClientWithProxy;
+
+ fn deref(&self) -> &Self::Target {
+ &self.client
+ }
+}
+
impl HttpClientWithUrl {
/// Returns a new [`HttpClientWithUrl`] with the given base URL.
pub fn new(
+ client: Arc<dyn HttpClient>,
base_url: impl Into<String>,
- user_agent: Option<String>,
proxy_url: Option<String>,
) -> Self {
- let client = HttpClientWithProxy::new(user_agent, proxy_url);
+ let client = HttpClientWithProxy::new(client, proxy_url);
+
+ Self {
+ base_url: Mutex::new(base_url.into()),
+ client,
+ }
+ }
+
+ pub fn new_uri(
+ client: Arc<dyn HttpClient>,
+ base_url: impl Into<String>,
+ proxy_uri: Option<Uri>,
+ ) -> Self {
+ let client = HttpClientWithProxy::new_uri(client, proxy_uri);
Self {
base_url: Mutex::new(base_url.into()),
@@ -195,11 +218,12 @@ impl HttpClientWithUrl {
}
impl HttpClient for Arc<HttpClientWithUrl> {
- fn send(
+ fn send_with_redirect_policy(
&self,
req: Request<AsyncBody>,
- ) -> BoxFuture<'static, Result<Response<AsyncBody>, Error>> {
- self.client.send(req)
+ follow_redirects: bool,
+ ) -> BoxFuture<'static, Result<Response<AsyncBody>, anyhow::Error>> {
+ self.client.send_with_redirect_policy(req, follow_redirects)
}
fn proxy(&self) -> Option<&Uri> {
@@ -208,11 +232,12 @@ impl HttpClient for Arc<HttpClientWithUrl> {
}
impl HttpClient for HttpClientWithUrl {
- fn send(
+ fn send_with_redirect_policy(
&self,
req: Request<AsyncBody>,
- ) -> BoxFuture<'static, Result<Response<AsyncBody>, Error>> {
- self.client.send(req)
+ follow_redirects: bool,
+ ) -> BoxFuture<'static, Result<Response<AsyncBody>, anyhow::Error>> {
+ self.client.send_with_redirect_policy(req, follow_redirects)
}
fn proxy(&self) -> Option<&Uri> {
@@ -220,26 +245,7 @@ impl HttpClient for HttpClientWithUrl {
}
}
-pub fn client(user_agent: Option<String>, proxy: Option<Uri>) -> Arc<dyn HttpClient> {
- let mut builder = isahc::HttpClient::builder()
- // Some requests to Qwen2 models on Runpod can take 32+ seconds,
- // especially if there's a cold boot involved. We may need to have
- // those requests use a different http client, because global timeouts
- // of 50 and 60 seconds, respectively, would be very high!
- .connect_timeout(Duration::from_secs(5))
- .low_speed_timeout(100, Duration::from_secs(30))
- .proxy(proxy.clone());
- if let Some(user_agent) = user_agent {
- builder = builder.default_header("User-Agent", user_agent);
- }
-
- Arc::new(HttpClientWithProxy {
- client: Arc::new(builder.build().unwrap()),
- proxy,
- })
-}
-
-fn read_proxy_from_env() -> Option<Uri> {
+pub fn read_proxy_from_env() -> Option<Uri> {
const ENV_VARS: &[&str] = &[
"ALL_PROXY",
"all_proxy",
@@ -258,23 +264,9 @@ fn read_proxy_from_env() -> Option<Uri> {
None
}
-impl HttpClient for isahc::HttpClient {
- fn send(
- &self,
- req: Request<AsyncBody>,
- ) -> BoxFuture<'static, Result<Response<AsyncBody>, Error>> {
- let client = self.clone();
- Box::pin(async move { client.send_async(req).await })
- }
-
- fn proxy(&self) -> Option<&Uri> {
- None
- }
-}
-
#[cfg(feature = "test-support")]
type FakeHttpHandler = Box<
- dyn Fn(Request<AsyncBody>) -> BoxFuture<'static, Result<Response<AsyncBody>, Error>>
+ dyn Fn(Request<AsyncBody>) -> BoxFuture<'static, Result<Response<AsyncBody>, anyhow::Error>>
+ Send
+ Sync
+ 'static,
@@ -289,7 +281,7 @@ pub struct FakeHttpClient {
impl FakeHttpClient {
pub fn create<Fut, F>(handler: F) -> Arc<HttpClientWithUrl>
where
- Fut: futures::Future<Output = Result<Response<AsyncBody>, Error>> + Send + 'static,
+ Fut: futures::Future<Output = Result<Response<AsyncBody>, anyhow::Error>> + Send + 'static,
F: Fn(Request<AsyncBody>) -> Fut + Send + Sync + 'static,
{
Arc::new(HttpClientWithUrl {
@@ -331,12 +323,13 @@ impl fmt::Debug for FakeHttpClient {
#[cfg(feature = "test-support")]
impl HttpClient for FakeHttpClient {
- fn send(
+ fn send_with_redirect_policy(
&self,
req: Request<AsyncBody>,
- ) -> BoxFuture<'static, Result<Response<AsyncBody>, Error>> {
+ _follow_redirects: bool,
+ ) -> BoxFuture<'static, Result<Response<AsyncBody>, anyhow::Error>> {
let future = (self.handler)(req);
- Box::pin(async move { future.await.map(Into::into) })
+ future
}
fn proxy(&self) -> Option<&Uri> {
@@ -0,0 +1,22 @@
+[package]
+name = "isahc_http_client"
+version = "0.1.0"
+edition = "2021"
+publish = false
+license = "Apache-2.0"
+
+[lints]
+workspace = true
+
+[features]
+test-support = []
+
+[lib]
+path = "src/isahc_http_client.rs"
+
+[dependencies]
+http_client.workspace = true
+isahc.workspace = true
+futures.workspace = true
+anyhow.workspace = true
+util.workspace = true
@@ -0,0 +1 @@
+../../LICENSE-APACHE
@@ -0,0 +1,93 @@
+use std::{mem, sync::Arc, time::Duration};
+
+use futures::future::BoxFuture;
+use isahc::config::RedirectPolicy;
+use util::maybe;
+
+pub use isahc::config::Configurable;
+pub struct IsahcHttpClient(isahc::HttpClient);
+
+pub use http_client::*;
+
+impl IsahcHttpClient {
+ pub fn new(proxy: Option<Uri>, user_agent: Option<String>) -> Arc<IsahcHttpClient> {
+ let mut builder = isahc::HttpClient::builder()
+ .connect_timeout(Duration::from_secs(5))
+ .low_speed_timeout(100, Duration::from_secs(5))
+ .proxy(proxy.clone());
+ if let Some(agent) = user_agent {
+ builder = builder.default_header("User-Agent", agent);
+ }
+ Arc::new(IsahcHttpClient(builder.build().unwrap()))
+ }
+ pub fn builder() -> isahc::HttpClientBuilder {
+ isahc::HttpClientBuilder::new()
+ }
+}
+
+impl From<isahc::HttpClient> for IsahcHttpClient {
+ fn from(client: isahc::HttpClient) -> Self {
+ Self(client)
+ }
+}
+
+impl HttpClient for IsahcHttpClient {
+ fn proxy(&self) -> Option<&Uri> {
+ None
+ }
+
+ fn send_with_redirect_policy(
+ &self,
+ req: http_client::http::Request<http_client::AsyncBody>,
+ follow_redirects: bool,
+ ) -> BoxFuture<'static, Result<http_client::Response<http_client::AsyncBody>, anyhow::Error>>
+ {
+ let req = maybe!({
+ let (mut parts, body) = req.into_parts();
+ let mut builder = isahc::Request::builder()
+ .method(parts.method)
+ .uri(parts.uri)
+ .version(parts.version);
+
+ let headers = builder.headers_mut()?;
+ mem::swap(headers, &mut parts.headers);
+
+ let extensions = builder.extensions_mut()?;
+ mem::swap(extensions, &mut parts.extensions);
+
+ let isahc_body = match body.0 {
+ http_client::Inner::Empty => isahc::AsyncBody::empty(),
+ http_client::Inner::AsyncReader(reader) => isahc::AsyncBody::from_reader(reader),
+ http_client::Inner::SyncReader(reader) => {
+ isahc::AsyncBody::from_bytes_static(reader.into_inner())
+ }
+ };
+
+ builder
+ .redirect_policy(if follow_redirects {
+ RedirectPolicy::Follow
+ } else {
+ RedirectPolicy::None
+ })
+ .body(isahc_body)
+ .ok()
+ });
+
+ let client = self.0.clone();
+
+ Box::pin(async move {
+ match req {
+ Some(req) => client
+ .send_async(req)
+ .await
+ .map_err(Into::into)
+ .map(|response| {
+ let (parts, body) = response.into_parts();
+ let body = http_client::AsyncBody::from_reader(body);
+ http_client::Response::from_parts(parts, body)
+ }),
+ None => Err(anyhow::anyhow!("Request was malformed")),
+ }
+ })
+ }
+}
@@ -19,7 +19,6 @@ schemars = ["dep:schemars"]
anyhow.workspace = true
futures.workspace = true
http_client.workspace = true
-isahc.workspace = true
schemars = { workspace = true, optional = true }
serde.workspace = true
serde_json.workspace = true
@@ -1,7 +1,6 @@
use anyhow::{anyhow, Context, Result};
use futures::{io::BufReader, stream::BoxStream, AsyncBufReadExt, AsyncReadExt, StreamExt};
-use http_client::{AsyncBody, HttpClient, Method, Request as HttpRequest};
-use isahc::config::Configurable;
+use http_client::{http, AsyncBody, HttpClient, Method, Request as HttpRequest};
use schemars::JsonSchema;
use serde::{Deserialize, Serialize};
use serde_json::{value::RawValue, Value};
@@ -262,18 +261,14 @@ pub async fn stream_chat_completion(
client: &dyn HttpClient,
api_url: &str,
request: ChatRequest,
- low_speed_timeout: Option<Duration>,
+ _: Option<Duration>,
) -> Result<BoxStream<'static, Result<ChatResponseDelta>>> {
let uri = format!("{api_url}/api/chat");
- let mut request_builder = HttpRequest::builder()
+ let request_builder = http::Request::builder()
.method(Method::POST)
.uri(uri)
.header("Content-Type", "application/json");
- if let Some(low_speed_timeout) = low_speed_timeout {
- request_builder = request_builder.low_speed_timeout(100, low_speed_timeout);
- };
-
let request = request_builder.body(AsyncBody::from(serde_json::to_string(&request)?))?;
let mut response = client.send(request).await?;
if response.status().is_success() {
@@ -305,18 +300,14 @@ pub async fn stream_chat_completion(
pub async fn get_models(
client: &dyn HttpClient,
api_url: &str,
- low_speed_timeout: Option<Duration>,
+ _: Option<Duration>,
) -> Result<Vec<LocalModelListing>> {
let uri = format!("{api_url}/api/tags");
- let mut request_builder = HttpRequest::builder()
+ let request_builder = HttpRequest::builder()
.method(Method::GET)
.uri(uri)
.header("Accept", "application/json");
- if let Some(low_speed_timeout) = low_speed_timeout {
- request_builder = request_builder.low_speed_timeout(100, low_speed_timeout);
- };
-
let request = request_builder.body(AsyncBody::default())?;
let mut response = client.send(request).await?;
@@ -354,13 +345,13 @@ pub async fn preload_model(client: Arc<dyn HttpClient>, api_url: &str, model: &s
let mut response = match client.send(request).await {
Ok(response) => response,
- Err(err) => {
+ Err(error) => {
// Be ok with a timeout during preload of the model
- if err.is_timeout() {
- return Ok(());
- } else {
- return Err(err.into());
- }
+ // if err.is_timeout() {
+ // return Ok(());
+ // } else {
+ return Err(error);
+ //}
}
};
@@ -26,7 +26,7 @@ use gpui::{
AppContext, AsyncAppContext, Context, Entity, EventEmitter, Model, ModelContext, PromptLevel,
Task, WeakModel,
};
-use http_client::{AsyncBody, Error, HttpClient, Request, Response, Uri};
+use http_client::{AsyncBody, HttpClient, Request, Response, Uri};
use language::{
language_settings::{
all_language_settings, language_settings, AllLanguageSettings, LanguageSettings,
@@ -7339,7 +7339,7 @@ impl HttpClient for BlockedHttpClient {
fn send(
&self,
_req: Request<AsyncBody>,
- ) -> BoxFuture<'static, Result<Response<AsyncBody>, Error>> {
+ ) -> BoxFuture<'static, Result<Response<AsyncBody>, anyhow::Error>> {
Box::pin(async {
Err(std::io::Error::new(
std::io::ErrorKind::PermissionDenied,
@@ -7352,6 +7352,14 @@ impl HttpClient for BlockedHttpClient {
fn proxy(&self) -> Option<&Uri> {
None
}
+
+ fn send_with_redirect_policy(
+ &self,
+ req: Request<AsyncBody>,
+ _: bool,
+ ) -> BoxFuture<'static, Result<Response<AsyncBody>, anyhow::Error>> {
+ self.send(req)
+ }
}
struct SshLspAdapterDelegate {
@@ -51,6 +51,7 @@ workspace.workspace = true
worktree.workspace = true
[dev-dependencies]
+isahc_http_client.workspace = true
env_logger.workspace = true
client = { workspace = true, features = ["test-support"] }
fs = { workspace = true, features = ["test-support"] }
@@ -2,6 +2,7 @@ use client::Client;
use futures::channel::oneshot;
use gpui::App;
use http_client::HttpClientWithUrl;
+use isahc_http_client::IsahcHttpClient;
use language::language_settings::AllLanguageSettings;
use project::Project;
use semantic_index::{OpenAiEmbeddingModel, OpenAiEmbeddingProvider, SemanticDb};
@@ -26,8 +27,12 @@ fn main() {
});
let clock = Arc::new(FakeSystemClock::default());
- let http = Arc::new(HttpClientWithUrl::new("http://localhost:11434", None, None));
+ let http = Arc::new(HttpClientWithUrl::new(
+ IsahcHttpClient::new(None, None),
+ "http://localhost:11434",
+ None,
+ ));
let client = client::Client::new(clock, http.clone(), cx);
Client::set_global(client.clone(), cx);
@@ -1,5 +1,5 @@
use anyhow::{Context as _, Result};
-use futures::{future::BoxFuture, AsyncReadExt, FutureExt};
+use futures::{future::BoxFuture, AsyncReadExt as _, FutureExt};
use http_client::HttpClient;
use serde::{Deserialize, Serialize};
use std::sync::Arc;
@@ -47,6 +47,7 @@ file_finder.workspace = true
file_icons.workspace = true
fs.workspace = true
futures.workspace = true
+isahc_http_client.workspace = true
git.workspace = true
git_hosting_providers.workspace = true
go_to_line.workspace = true
@@ -11,7 +11,7 @@ use assistant::PromptBuilder;
use chrono::Offset;
use clap::{command, Parser};
use cli::FORCE_CLI_MODE_ENV_VAR_NAME;
-use client::{parse_zed_link, Client, DevServerToken, UserStore};
+use client::{parse_zed_link, Client, DevServerToken, ProxySettings, UserStore};
use collab_ui::channel_view::ChannelView;
use db::kvp::KEY_VALUE_STORE;
use editor::Editor;
@@ -23,6 +23,8 @@ use gpui::{
Action, App, AppContext, AsyncAppContext, Context, DismissEvent, Global, Task,
UpdateGlobal as _, VisualContext,
};
+use http_client::{read_proxy_from_env, Uri};
+use isahc_http_client::IsahcHttpClient;
use language::LanguageRegistry;
use log::LevelFilter;
@@ -327,7 +329,10 @@ fn main() {
init_logger();
log::info!("========== starting zed ==========");
- let app = App::new().with_assets(Assets);
+
+ let app = App::new()
+ .with_assets(Assets)
+ .with_http_client(IsahcHttpClient::new(None, None));
let (installation_id, existing_installation_id_found) = app
.background_executor()
@@ -436,6 +441,26 @@ fn main() {
if let Some(build_sha) = option_env!("ZED_COMMIT_SHA") {
AppCommitSha::set_global(AppCommitSha(build_sha.into()), cx);
}
+ settings::init(cx);
+ client::init_settings(cx);
+ let user_agent = format!(
+ "Zed/{} ({}; {})",
+ AppVersion::global(cx),
+ std::env::consts::OS,
+ std::env::consts::ARCH
+ );
+ let proxy_str = ProxySettings::get_global(cx).proxy.to_owned();
+ let proxy_url = proxy_str
+ .as_ref()
+ .and_then(|input| {
+ input
+ .parse::<Uri>()
+ .inspect_err(|e| log::error!("Error parsing proxy settings: {}", e))
+ .ok()
+ })
+ .or_else(read_proxy_from_env);
+ let http = IsahcHttpClient::new(proxy_url, Some(user_agent));
+ cx.set_http_client(http);
<dyn Fs>::set_global(fs.clone(), cx);
@@ -444,11 +469,9 @@ fn main() {
OpenListener::set_global(cx, open_listener.clone());
- settings::init(cx);
handle_settings_file_changes(user_settings_file_rx, cx, handle_settings_changed);
handle_keymap_file_changes(user_keymap_file_rx, cx, handle_keymap_changed);
- client::init_settings(cx);
let client = Client::production(cx);
cx.set_http_client(client.http_client().clone());
let mut languages = LanguageRegistry::new(cx.background_executor().clone());
@@ -43,7 +43,10 @@ script/generate-licenses
# Build binary in release mode
export RUSTFLAGS="${RUSTFLAGS:-} -C link-args=-Wl,--disable-new-dtags,-rpath,\$ORIGIN/../lib"
-cargo build --release --target "${target_triple}" --package zed --package cli --package remote_server
+cargo build --release --target "${target_triple}" --package zed --package cli
+# Build remote_server in separate invocation to prevent feature unification from other crates
+# from influencing dynamic libraries required by it.
+cargo build --release --target "${target_triple}" --package remote_server
# Strip the binary of all debug symbols
# Later, we probably want to do something like this: https://github.com/GabrielMajeri/separate-symbols
@@ -51,6 +54,10 @@ strip --strip-debug "${target_dir}/${target_triple}/release/zed"
strip --strip-debug "${target_dir}/${target_triple}/release/cli"
strip --strip-debug "${target_dir}/${target_triple}/release/remote_server"
+
+# Ensure that remote_server does not depend on libssl nor libcrypto, as we got rid of these deps.
+ldd "${target_dir}/${target_triple}/release/remote_server" | grep -q 'libcrypto\|libssl'
+
suffix=""
if [ "$channel" != "stable" ]; then
suffix="-$channel"