Detailed changes
@@ -9,6 +9,7 @@ dependencies = [
"action_log",
"agent-client-protocol",
"anyhow",
+ "async-channel 2.5.0",
"base64 0.22.1",
"buffer_diff",
"chrono",
@@ -33,7 +34,6 @@ dependencies = [
"serde",
"serde_json",
"settings",
- "smol",
"task",
"telemetry",
"terminal",
@@ -148,6 +148,8 @@ dependencies = [
"agent_servers",
"agent_settings",
"anyhow",
+ "async-channel 2.5.0",
+ "async-io",
"chrono",
"client",
"clock",
@@ -194,7 +196,6 @@ dependencies = [
"settings",
"shell_command_parser",
"smallvec",
- "smol",
"sqlez",
"streaming_diff",
"strsim",
@@ -256,6 +257,7 @@ dependencies = [
"action_log",
"agent-client-protocol",
"anyhow",
+ "async-channel 2.5.0",
"async-trait",
"chrono",
"client",
@@ -280,7 +282,6 @@ dependencies = [
"serde",
"serde_json",
"settings",
- "smol",
"task",
"tempfile",
"terminal",
@@ -327,6 +328,7 @@ dependencies = [
"agent_settings",
"ai_onboarding",
"anyhow",
+ "async-channel 2.5.0",
"audio",
"base64 0.22.1",
"buffer_diff",
@@ -347,6 +349,7 @@ dependencies = [
"file_icons",
"fs",
"futures 0.3.32",
+ "futures-lite 1.13.0",
"fuzzy",
"git",
"gpui",
@@ -392,7 +395,6 @@ dependencies = [
"serde_json",
"serde_json_lenient",
"settings",
- "smol",
"streaming_diff",
"task",
"telemetry",
@@ -890,7 +892,7 @@ version = "2.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8034a681df4aed8b8edbd7fbe472401ecf009251c8b40556b304567052e294c5"
dependencies = [
- "async-lock 3.4.2",
+ "async-lock",
"blocking",
"futures-lite 2.6.1",
]
@@ -904,7 +906,7 @@ dependencies = [
"async-channel 2.5.0",
"async-executor",
"async-io",
- "async-lock 3.4.2",
+ "async-lock",
"blocking",
"futures-lite 2.6.1",
"once_cell",
@@ -928,15 +930,6 @@ dependencies = [
"windows-sys 0.61.2",
]
-[[package]]
-name = "async-lock"
-version = "2.8.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "287272293e9d8c41773cec55e365490fe034813a2f172f502d6ddcf75b2f582b"
-dependencies = [
- "event-listener 2.5.3",
-]
-
[[package]]
name = "async-lock"
version = "3.4.2"
@@ -976,7 +969,7 @@ checksum = "fc50921ec0055cdd8a16de48773bfeec5c972598674347252c0399676be7da75"
dependencies = [
"async-channel 2.5.0",
"async-io",
- "async-lock 3.4.2",
+ "async-lock",
"async-signal",
"async-task",
"blocking",
@@ -1004,7 +997,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "43c070bbf59cd3570b6b2dd54cd772527c7c3620fce8be898406dd3ed6adc64c"
dependencies = [
"async-io",
- "async-lock 3.4.2",
+ "async-lock",
"atomic-waker",
"cfg-if",
"futures-core",
@@ -1025,7 +1018,7 @@ dependencies = [
"async-channel 1.9.0",
"async-global-executor",
"async-io",
- "async-lock 3.4.2",
+ "async-lock",
"async-process",
"crossbeam-utils",
"futures-channel",
@@ -2899,6 +2892,7 @@ name = "client"
version = "0.1.0"
dependencies = [
"anyhow",
+ "async-channel 2.5.0",
"async-tungstenite",
"base64 0.22.1",
"chrono",
@@ -2966,6 +2960,7 @@ name = "cloud_api_client"
version = "0.1.0"
dependencies = [
"anyhow",
+ "async-lock",
"cloud_api_types",
"futures 0.3.32",
"gpui",
@@ -2973,7 +2968,6 @@ dependencies = [
"http_client",
"parking_lot",
"serde_json",
- "smol",
"thiserror 2.0.17",
"yawc",
]
@@ -3120,6 +3114,7 @@ version = "0.44.0"
dependencies = [
"agent",
"anyhow",
+ "async-channel 2.5.0",
"async-trait",
"async-tungstenite",
"aws-config",
@@ -3485,15 +3480,19 @@ name = "context_server"
version = "0.1.0"
dependencies = [
"anyhow",
+ "async-channel 2.5.0",
+ "async-process",
"async-trait",
"base64 0.22.1",
"collections",
"futures 0.3.32",
+ "futures-lite 1.13.0",
"gpui",
"http_client",
"log",
"net",
"parking_lot",
+ "pollster 0.4.0",
"postage",
"rand 0.9.2",
"schemars",
@@ -3502,7 +3501,6 @@ dependencies = [
"settings",
"sha2",
"slotmap",
- "smol",
"tempfile",
"terminal",
"tiny_http",
@@ -4031,6 +4029,7 @@ dependencies = [
name = "crashes"
version = "0.1.0"
dependencies = [
+ "async-process",
"cfg-if",
"crash-handler",
"futures 0.3.32",
@@ -4042,7 +4041,6 @@ dependencies = [
"release_channel",
"serde",
"serde_json",
- "smol",
"system_specs",
"windows 0.61.3",
"zstd",
@@ -4540,7 +4538,6 @@ dependencies = [
"log",
"paths",
"release_channel",
- "smol",
"sqlez",
"sqlez_macros",
"tempfile",
@@ -5371,6 +5368,7 @@ dependencies = [
"file_icons",
"fs",
"futures 0.3.32",
+ "futures-lite 1.13.0",
"fuzzy",
"git",
"gpui",
@@ -5401,7 +5399,6 @@ dependencies = [
"serde_json",
"settings",
"smallvec",
- "smol",
"snippet",
"sum_tree",
"task",
@@ -6461,6 +6458,7 @@ version = "0.1.0"
dependencies = [
"anyhow",
"ashpd",
+ "async-channel 2.5.0",
"async-tar",
"async-trait",
"collections",
@@ -7125,6 +7123,7 @@ version = "0.1.0"
dependencies = [
"anyhow",
"askpass",
+ "async-channel 2.5.0",
"async-trait",
"collections",
"derive_more",
@@ -7174,6 +7173,7 @@ name = "git_graph"
version = "0.1.0"
dependencies = [
"anyhow",
+ "async-channel 2.5.0",
"collections",
"db",
"editor",
@@ -7190,7 +7190,6 @@ dependencies = [
"serde_json",
"settings",
"smallvec",
- "smol",
"theme",
"theme_settings",
"time",
@@ -7237,6 +7236,7 @@ dependencies = [
"editor",
"file_icons",
"futures 0.3.32",
+ "futures-lite 1.13.0",
"fuzzy",
"git",
"gpui",
@@ -7264,7 +7264,6 @@ dependencies = [
"serde_json",
"settings",
"smallvec",
- "smol",
"strum 0.27.2",
"telemetry",
"theme",
@@ -9289,6 +9288,7 @@ dependencies = [
"encoding_rs",
"fs",
"futures 0.3.32",
+ "futures-lite 1.13.0",
"fuzzy",
"globset",
"gpui",
@@ -9311,7 +9311,6 @@ dependencies = [
"settings",
"shellexpand 2.1.2",
"smallvec",
- "smol",
"streaming-iterator",
"strsim",
"sum_tree",
@@ -9409,6 +9408,7 @@ name = "language_model_core"
version = "0.1.0"
dependencies = [
"anyhow",
+ "async-lock",
"cloud_llm_client",
"futures 0.3.32",
"gpui_shared_string",
@@ -9417,7 +9417,6 @@ dependencies = [
"schemars",
"serde",
"serde_json",
- "smol",
"strum 0.27.2",
"thiserror 2.0.17",
]
@@ -9429,6 +9428,7 @@ dependencies = [
"ai_onboarding",
"anthropic",
"anyhow",
+ "async-lock",
"aws-config",
"aws-credential-types",
"aws_http_client",
@@ -9470,7 +9470,6 @@ dependencies = [
"serde",
"serde_json",
"settings",
- "smol",
"strum 0.27.2",
"tiktoken-rs",
"tokio",
@@ -9498,7 +9497,6 @@ dependencies = [
"semver",
"serde",
"serde_json",
- "smol",
"thiserror 2.0.17",
"x_ai",
]
@@ -10725,6 +10723,7 @@ dependencies = [
"clock",
"collections",
"ctor",
+ "futures-lite 1.13.0",
"gpui",
"indoc",
"itertools 0.14.0",
@@ -10737,7 +10736,6 @@ dependencies = [
"serde",
"settings",
"smallvec",
- "smol",
"sum_tree",
"text",
"theme",
@@ -11011,7 +11009,7 @@ dependencies = [
"channel",
"client",
"component",
- "db",
+ "futures-lite 1.13.0",
"gpui",
"rpc",
"sum_tree",
@@ -11605,7 +11603,7 @@ dependencies = [
"ashpd",
"async-fs",
"async-io",
- "async-lock 3.4.2",
+ "async-lock",
"blocking",
"cbc",
"cipher",
@@ -11849,6 +11847,7 @@ name = "outline"
version = "0.1.0"
dependencies = [
"editor",
+ "futures 0.3.32",
"fuzzy",
"gpui",
"indoc",
@@ -11861,7 +11860,6 @@ dependencies = [
"rope",
"serde_json",
"settings",
- "smol",
"theme",
"theme_settings",
"ui",
@@ -11875,10 +11873,12 @@ name = "outline_panel"
version = "0.1.0"
dependencies = [
"anyhow",
+ "async-channel 2.5.0",
"collections",
"db",
"editor",
"file_icons",
+ "futures 0.3.32",
"fuzzy",
"gpui",
"itertools 0.14.0",
@@ -11894,7 +11894,6 @@ dependencies = [
"serde_json",
"settings",
"smallvec",
- "smol",
"theme",
"theme_settings",
"ui",
@@ -13242,6 +13241,7 @@ dependencies = [
"aho-corasick",
"anyhow",
"askpass",
+ "async-channel 2.5.0",
"async-trait",
"base64 0.22.1",
"buffer_diff",
@@ -14206,7 +14206,6 @@ dependencies = [
"serde",
"serde_json",
"settings",
- "smol",
"task",
"telemetry",
"ui",
@@ -14419,6 +14418,7 @@ dependencies = [
"agent",
"anyhow",
"askpass",
+ "async-channel 2.5.0",
"cargo_toml",
"clap",
"client",
@@ -15509,6 +15509,7 @@ dependencies = [
"editor",
"fs",
"futures 0.3.32",
+ "futures-lite 1.13.0",
"gpui",
"itertools 0.14.0",
"language",
@@ -15520,7 +15521,6 @@ dependencies = [
"serde",
"serde_json",
"settings",
- "smol",
"theme",
"theme_settings",
"tracing",
@@ -16046,6 +16046,7 @@ dependencies = [
"agent_settings",
"agent_ui",
"anyhow",
+ "async-channel 2.5.0",
"chrono",
"client",
"clock",
@@ -16073,7 +16074,6 @@ dependencies = [
"serde",
"serde_json",
"settings",
- "smol",
"theme",
"theme_settings",
"ui",
@@ -16258,7 +16258,7 @@ dependencies = [
"async-executor",
"async-fs",
"async-io",
- "async-lock 3.4.2",
+ "async-lock",
"async-net",
"async-process",
"blocking",
@@ -16425,7 +16425,7 @@ dependencies = [
"libsqlite3-sys",
"log",
"parking_lot",
- "smol",
+ "pollster 0.4.0",
"sqlformat",
"thread_local",
"util",
@@ -17359,7 +17359,6 @@ dependencies = [
"serde",
"serde_json",
"settings",
- "smol",
"theme",
"theme_settings",
"ui",
@@ -17529,8 +17528,10 @@ version = "0.1.0"
dependencies = [
"alacritty_terminal",
"anyhow",
+ "async-channel 2.5.0",
"collections",
"futures 0.3.32",
+ "futures-lite 1.13.0",
"gpui",
"itertools 0.14.0",
"libc",
@@ -17542,7 +17543,6 @@ dependencies = [
"schemars",
"serde",
"settings",
- "smol",
"sysinfo 0.37.2",
"task",
"theme",
@@ -21690,6 +21690,7 @@ dependencies = [
"feature_flags",
"fs",
"futures 0.3.32",
+ "futures-lite 1.13.0",
"git",
"gpui",
"http_client",
@@ -21731,7 +21732,8 @@ name = "worktree"
version = "0.1.0"
dependencies = [
"anyhow",
- "async-lock 2.8.0",
+ "async-channel 2.5.0",
+ "async-lock",
"chardetng",
"clock",
"collections",
@@ -21754,7 +21756,6 @@ dependencies = [
"serde_json",
"settings",
"smallvec",
- "smol",
"sum_tree",
"text",
"tracing",
@@ -22087,7 +22088,7 @@ dependencies = [
"async-broadcast",
"async-executor",
"async-io",
- "async-lock 3.4.2",
+ "async-lock",
"async-process",
"async-recursion",
"async-task",
@@ -506,13 +506,15 @@ async-compat = "0.2.1"
async-compression = { version = "0.4", features = ["bzip2", "gzip", "futures-io"] }
async-dispatcher = "0.1"
async-fs = "2.1"
-async-lock = "2.1"
+async-io = "2.6.0"
+async-lock = "3.4.2"
async-pipe = { git = "https://github.com/zed-industries/async-pipe-rs", rev = "82d00a04211cf4e1236029aa03e6b6ce2a74c553" }
async-recursion = "1.0.0"
async-tar = "0.5.1"
async-task = "4.7"
async-trait = "0.1"
async-tungstenite = "0.31.0"
+async-process = "2.5.0"
async_zip = { version = "0.0.18", features = ["deflate", "deflate64"] }
aws-config = { version = "1.8.10", features = ["behavior-version-latest"] }
aws-credential-types = { version = "1.2.8", features = [
@@ -18,6 +18,7 @@ test-support = ["gpui/test-support", "project/test-support", "dep:parking_lot",
[dependencies]
action_log.workspace = true
agent-client-protocol.workspace = true
+async-channel.workspace = true
base64.workspace = true
anyhow.workspace = true
buffer_diff.workspace = true
@@ -40,7 +41,6 @@ prompt_store.workspace = true
serde.workspace = true
serde_json.workspace = true
settings.workspace = true
-smol.workspace = true
task.workspace = true
telemetry.workspace = true
terminal.workspace = true
@@ -2992,6 +2992,7 @@ fn markdown_for_raw_output(
mod tests {
use super::*;
use anyhow::anyhow;
+ use futures::stream::StreamExt as _;
use futures::{channel::mpsc, future::LocalBoxFuture, select};
use gpui::{App, AsyncApp, TestAppContext, WeakEntity};
use indoc::indoc;
@@ -2999,7 +3000,6 @@ mod tests {
use rand::{distr, prelude::*};
use serde_json::json;
use settings::SettingsStore;
- use smol::stream::StreamExt as _;
use std::{
any::Any,
cell::RefCell,
@@ -3204,7 +3204,7 @@ mod tests {
// Create a real PTY terminal that runs a command which prints output then sleeps
// We use printf instead of echo and chain with && sleep to ensure proper execution
- let (completion_tx, _completion_rx) = smol::channel::unbounded();
+ let (completion_tx, _completion_rx) = async_channel::unbounded();
let (program, args) = ShellBuilder::new(&Shell::System, false).build(
Some("printf 'output_before_kill\\n' && sleep 60".to_owned()),
&[],
@@ -318,7 +318,7 @@ pub trait AgentSessionList {
Task::ready(Err(anyhow::anyhow!("delete_sessions not supported")))
}
- fn watch(&self, _cx: &mut App) -> Option<smol::channel::Receiver<SessionListUpdate>> {
+ fn watch(&self, _cx: &mut App) -> Option<async_channel::Receiver<SessionListUpdate>> {
None
}
@@ -19,6 +19,7 @@ workspace = true
[dependencies]
acp_thread.workspace = true
action_log.workspace = true
+async-channel.workspace = true
agent-client-protocol.workspace = true
agent_servers.workspace = true
agent_settings.workspace = true
@@ -59,7 +60,6 @@ serde_json.workspace = true
settings.workspace = true
shell_command_parser.workspace = true
smallvec.workspace = true
-smol.workspace = true
sqlez.workspace = true
streaming_diff.workspace = true
strsim.workspace = true
@@ -77,6 +77,7 @@ zed_env_vars.workspace = true
zstd.workspace = true
[dev-dependencies]
+async-io.workspace = true
agent_servers = { workspace = true, "features" = ["test-support"] }
client = { workspace = true, "features" = ["test-support"] }
clock = { workspace = true, "features" = ["test-support"] }
@@ -1647,14 +1647,14 @@ impl acp_thread::AgentTelemetry for NativeAgentConnection {
pub struct NativeAgentSessionList {
thread_store: Entity<ThreadStore>,
- updates_tx: smol::channel::Sender<acp_thread::SessionListUpdate>,
- updates_rx: smol::channel::Receiver<acp_thread::SessionListUpdate>,
+ updates_tx: async_channel::Sender<acp_thread::SessionListUpdate>,
+ updates_rx: async_channel::Receiver<acp_thread::SessionListUpdate>,
_subscription: Subscription,
}
impl NativeAgentSessionList {
fn new(thread_store: Entity<ThreadStore>, cx: &mut App) -> Self {
- let (tx, rx) = smol::channel::unbounded();
+ let (tx, rx) = async_channel::unbounded();
let this_tx = tx.clone();
let subscription = cx.observe(&thread_store, move |_, _| {
this_tx
@@ -1706,7 +1706,7 @@ impl AgentSessionList for NativeAgentSessionList {
fn watch(
&self,
_cx: &mut App,
- ) -> Option<smol::channel::Receiver<acp_thread::SessionListUpdate>> {
+ ) -> Option<async_channel::Receiver<acp_thread::SessionListUpdate>> {
Some(self.updates_rx.clone())
}
@@ -1670,7 +1670,7 @@ async fn retry_on_rate_limit<R>(mut request: impl AsyncFnMut() -> Result<R>) ->
eprintln!("Attempt #{attempt}: Retry after {retry_after:?} + jitter of {jitter:?}");
// This code does not use the gpui::executor
#[allow(clippy::disallowed_methods)]
- smol::Timer::after(retry_after + jitter).await;
+ async_io::Timer::after(retry_after + jitter).await;
} else {
return response;
}
@@ -729,7 +729,7 @@ async fn retry_on_rate_limit<R>(mut request: impl AsyncFnMut() -> Result<R>) ->
let jitter = retry_after.mul_f64(rand::rng().random_range(0.0..1.0));
eprintln!("Attempt #{attempt}: Retry after {retry_after:?} + jitter of {jitter:?}");
#[allow(clippy::disallowed_methods)]
- smol::Timer::after(retry_after + jitter).await;
+ async_io::Timer::after(retry_after + jitter).await;
} else {
return response;
}
@@ -20,6 +20,7 @@ doctest = false
acp_tools.workspace = true
acp_thread.workspace = true
action_log.workspace = true
+async-channel.workspace = true
agent-client-protocol.workspace = true
anyhow.workspace = true
async-trait.workspace = true
@@ -44,7 +45,6 @@ reqwest_client = { workspace = true, optional = true }
serde.workspace = true
serde_json.workspace = true
settings.workspace = true
-smol.workspace = true
task.workspace = true
tempfile.workspace = true
thiserror.workspace = true
@@ -85,13 +85,13 @@ pub struct AcpSession {
pub struct AcpSessionList {
connection: Rc<acp::ClientSideConnection>,
- updates_tx: smol::channel::Sender<acp_thread::SessionListUpdate>,
- updates_rx: smol::channel::Receiver<acp_thread::SessionListUpdate>,
+ updates_tx: async_channel::Sender<acp_thread::SessionListUpdate>,
+ updates_rx: async_channel::Receiver<acp_thread::SessionListUpdate>,
}
impl AcpSessionList {
fn new(connection: Rc<acp::ClientSideConnection>) -> Self {
- let (tx, rx) = smol::channel::unbounded();
+ let (tx, rx) = async_channel::unbounded();
Self {
connection,
updates_tx: tx,
@@ -150,7 +150,7 @@ impl AgentSessionList for AcpSessionList {
fn watch(
&self,
_cx: &mut App,
- ) -> Option<smol::channel::Receiver<acp_thread::SessionListUpdate>> {
+ ) -> Option<async_channel::Receiver<acp_thread::SessionListUpdate>> {
Some(self.updates_rx.clone())
}
@@ -30,6 +30,7 @@ acp_thread.workspace = true
action_log.workspace = true
agent-client-protocol.workspace = true
agent.workspace = true
+async-channel.workspace = true
agent_servers.workspace = true
agent_settings.workspace = true
ai_onboarding.workspace = true
@@ -54,6 +55,7 @@ feature_flags.workspace = true
file_icons.workspace = true
fs.workspace = true
futures.workspace = true
+futures-lite.workspace = true
git.workspace = true
fuzzy.workspace = true
gpui.workspace = true
@@ -91,7 +93,6 @@ serde.workspace = true
serde_json.workspace = true
serde_json_lenient.workspace = true
settings.workspace = true
-smol.workspace = true
streaming_diff.workspace = true
task.workspace = true
telemetry.workspace = true
@@ -3,6 +3,7 @@ use agent_settings::AgentSettings;
use anyhow::{Context as _, Result};
use collections::HashSet;
use editor::{Anchor, AnchorRangeExt, MultiBuffer, MultiBufferSnapshot, ToOffset as _, ToPoint};
+use futures::FutureExt;
use futures::{
SinkExt, Stream, StreamExt, TryStreamExt as _,
channel::mpsc,
@@ -28,7 +29,6 @@ use rope::Rope;
use schemars::JsonSchema;
use serde::{Deserialize, Serialize};
use settings::Settings as _;
-use smol::future::FutureExt;
use std::{
cmp,
future::Future,
@@ -1862,12 +1862,12 @@ pub mod evals {
use eval_utils::{EvalOutput, NoProcessor};
use fs::FakeFs;
use futures::channel::mpsc;
+ use futures::stream::StreamExt as _;
use gpui::{AppContext, TestAppContext, UpdateGlobal as _};
use language::Buffer;
use language_model::{LanguageModelRegistry, SelectedModel};
use project::Project;
use prompt_store::PromptBuilder;
- use smol::stream::StreamExt as _;
use std::str::FromStr;
use std::sync::Arc;
use util::test::marked_text_ranges;
@@ -222,6 +222,7 @@ impl ThreadHistory {
mod tests {
use super::*;
use acp_thread::AgentSessionListResponse;
+ use futures_lite::future::yield_now;
use gpui::TestAppContext;
use std::{
any::Any,
@@ -239,13 +240,13 @@ mod tests {
#[derive(Clone)]
struct TestSessionList {
sessions: Vec<AgentSessionInfo>,
- updates_tx: smol::channel::Sender<SessionListUpdate>,
- updates_rx: smol::channel::Receiver<SessionListUpdate>,
+ updates_tx: async_channel::Sender<SessionListUpdate>,
+ updates_rx: async_channel::Receiver<SessionListUpdate>,
}
impl TestSessionList {
fn new(sessions: Vec<AgentSessionInfo>) -> Self {
- let (tx, rx) = smol::channel::unbounded();
+ let (tx, rx) = async_channel::unbounded();
Self {
sessions,
updates_tx: tx,
@@ -267,7 +268,7 @@ mod tests {
Task::ready(Ok(AgentSessionListResponse::new(self.sessions.clone())))
}
- fn watch(&self, _cx: &mut App) -> Option<smol::channel::Receiver<SessionListUpdate>> {
+ fn watch(&self, _cx: &mut App) -> Option<async_channel::Receiver<SessionListUpdate>> {
Some(self.updates_rx.clone())
}
@@ -286,8 +287,8 @@ mod tests {
second_page_sessions: Vec<AgentSessionInfo>,
requested_cursors: Arc<Mutex<Vec<Option<String>>>>,
async_responses: bool,
- updates_tx: smol::channel::Sender<SessionListUpdate>,
- updates_rx: smol::channel::Receiver<SessionListUpdate>,
+ updates_tx: async_channel::Sender<SessionListUpdate>,
+ updates_rx: async_channel::Receiver<SessionListUpdate>,
}
impl PaginatedTestSessionList {
@@ -295,7 +296,7 @@ mod tests {
first_page_sessions: Vec<AgentSessionInfo>,
second_page_sessions: Vec<AgentSessionInfo>,
) -> Self {
- let (tx, rx) = smol::channel::unbounded();
+ let (tx, rx) = async_channel::unbounded();
Self {
first_page_sessions,
second_page_sessions,
@@ -353,7 +354,7 @@ mod tests {
if self.async_responses {
cx.foreground_executor().spawn(async move {
- smol::future::yield_now().await;
+ yield_now().await;
Ok(respond())
})
} else {
@@ -361,7 +362,7 @@ mod tests {
}
}
- fn watch(&self, _cx: &mut App) -> Option<smol::channel::Receiver<SessionListUpdate>> {
+ fn watch(&self, _cx: &mut App) -> Option<async_channel::Receiver<SessionListUpdate>> {
Some(self.updates_rx.clone())
}
@@ -411,8 +411,8 @@ pub struct ThreadMetadataStore {
threads_by_main_paths: HashMap<PathList, HashSet<acp::SessionId>>,
reload_task: Option<Shared<Task<()>>>,
session_subscriptions: HashMap<acp::SessionId, Subscription>,
- pending_thread_ops_tx: smol::channel::Sender<DbOperation>,
- in_flight_archives: HashMap<acp::SessionId, (Task<()>, smol::channel::Sender<()>)>,
+ pending_thread_ops_tx: async_channel::Sender<DbOperation>,
+ in_flight_archives: HashMap<acp::SessionId, (Task<()>, async_channel::Sender<()>)>,
_db_operations_task: Task<()>,
}
@@ -448,7 +448,7 @@ impl ThreadMetadataStore {
let thread = std::thread::current();
let test_name = thread.name().unwrap_or("unknown_test");
let db_name = format!("THREAD_METADATA_DB_{}", test_name);
- let db = smol::block_on(db::open_test_db::<ThreadMetadataDb>(&db_name));
+ let db = gpui::block_on(db::open_test_db::<ThreadMetadataDb>(&db_name));
let thread_store = cx.new(|cx| Self::new(ThreadMetadataDb(db), cx));
cx.set_global(GlobalThreadMetadataStore(thread_store));
}
@@ -633,7 +633,7 @@ impl ThreadMetadataStore {
pub fn archive(
&mut self,
session_id: &acp::SessionId,
- archive_job: Option<(Task<()>, smol::channel::Sender<()>)>,
+ archive_job: Option<(Task<()>, async_channel::Sender<()>)>,
cx: &mut Context<Self>,
) {
self.update_archived(session_id, true, cx);
@@ -908,7 +908,7 @@ impl ThreadMetadataStore {
})
.detach();
- let (tx, rx) = smol::channel::unbounded();
+ let (tx, rx) = async_channel::unbounded();
let _db_operations_task = cx.background_spawn({
let db = db.clone();
async move {
@@ -1441,7 +1441,7 @@ mod tests {
fn clear_thread_metadata_remote_connection_backfill(cx: &mut TestAppContext) {
let kvp = cx.update(|cx| KeyValueStore::global(cx));
- smol::block_on(kvp.delete_kvp("thread-metadata-remote-connection-backfill".to_string()))
+ gpui::block_on(kvp.delete_kvp("thread-metadata-remote-connection-backfill".to_string()))
.unwrap();
}
@@ -1464,7 +1464,7 @@ mod tests {
let thread = std::thread::current();
let test_name = thread.name().unwrap_or("unknown_test");
let db_name = format!("THREAD_METADATA_DB_{}", test_name);
- let db = ThreadMetadataDb(smol::block_on(db::open_test_db::<ThreadMetadataDb>(
+ let db = ThreadMetadataDb(gpui::block_on(db::open_test_db::<ThreadMetadataDb>(
&db_name,
)));
@@ -1065,7 +1065,7 @@ impl PickerDelegate for ProjectPickerDelegate {
})
.collect();
- let mut sibling_matches = smol::block_on(fuzzy::match_strings(
+ let mut sibling_matches = gpui::block_on(fuzzy::match_strings(
&sibling_candidates,
query,
smart_case,
@@ -1099,7 +1099,7 @@ impl PickerDelegate for ProjectPickerDelegate {
})
.collect();
- let mut recent_matches = smol::block_on(fuzzy::match_strings(
+ let mut recent_matches = gpui::block_on(fuzzy::match_strings(
&recent_candidates,
query,
smart_case,
@@ -17,6 +17,7 @@ test-support = ["clock/test-support", "collections/test-support", "gpui/test-sup
[dependencies]
anyhow.workspace = true
+async-channel.workspace = true
async-tungstenite = { workspace = true, features = ["tokio", "tokio-rustls-manual-roots"] }
base64.workspace = true
chrono = { workspace = true, features = ["serde"] }
@@ -2164,8 +2164,8 @@ mod tests {
});
let server = FakeServer::for_client(user_id, &client, cx).await;
- let (done_tx1, done_rx1) = smol::channel::unbounded();
- let (done_tx2, done_rx2) = smol::channel::unbounded();
+ let (done_tx1, done_rx1) = async_channel::unbounded();
+ let (done_tx2, done_rx2) = async_channel::unbounded();
AnyProtoClient::from(client.clone()).add_entity_message_handler(
move |entity: Entity<TestEntity>, _: TypedEnvelope<proto::JoinProject>, cx| {
match entity.read_with(&cx, |entity, _| entity.id) {
@@ -2235,8 +2235,8 @@ mod tests {
let server = FakeServer::for_client(user_id, &client, cx).await;
let entity = cx.new(|_| TestEntity::default());
- let (done_tx1, _done_rx1) = smol::channel::unbounded();
- let (done_tx2, done_rx2) = smol::channel::unbounded();
+ let (done_tx1, _done_rx1) = async_channel::unbounded();
+ let (done_tx2, done_rx2) = async_channel::unbounded();
let subscription1 = client.add_message_handler(
entity.downgrade(),
move |_, _: TypedEnvelope<proto::Ping>, _| {
@@ -2270,7 +2270,7 @@ mod tests {
let server = FakeServer::for_client(user_id, &client, cx).await;
let entity = cx.new(|_| TestEntity::default());
- let (done_tx, done_rx) = smol::channel::unbounded();
+ let (done_tx, done_rx) = async_channel::unbounded();
let subscription = client.add_message_handler(
entity.clone().downgrade(),
move |entity: Entity<TestEntity>, _: TypedEnvelope<proto::Ping>, mut cx| {
@@ -20,6 +20,6 @@ gpui_tokio.workspace = true
http_client.workspace = true
parking_lot.workspace = true
serde_json.workspace = true
-smol.workspace = true
+async-lock.workspace = true
thiserror.workspace = true
yawc.workspace = true
@@ -1,7 +1,7 @@
use std::sync::Arc;
+use async_lock::{RwLock, RwLockUpgradableReadGuard, RwLockWriteGuard};
use cloud_api_types::OrganizationId;
-use smol::lock::{RwLock, RwLockUpgradableReadGuard, RwLockWriteGuard};
use crate::{ClientApiError, CloudApiClient};
@@ -76,6 +76,7 @@ uuid.workspace = true
[dev-dependencies]
agent = { workspace = true, features = ["test-support"] }
async-trait.workspace = true
+async-channel.workspace = true
buffer_diff.workspace = true
call = { workspace = true, features = ["test-support"] }
@@ -2732,9 +2732,9 @@ async fn test_lsp_pull_diagnostics(
let closure_workspace_diagnostics_pulls_result_ids =
workspace_diagnostics_pulls_result_ids.clone();
let (workspace_diagnostic_cancel_tx, closure_workspace_diagnostic_cancel_rx) =
- smol::channel::bounded::<()>(1);
+ async_channel::bounded::<()>(1);
let (closure_workspace_diagnostic_received_tx, workspace_diagnostic_received_rx) =
- smol::channel::bounded::<()>(1);
+ async_channel::bounded::<()>(1);
let capabilities = lsp::ServerCapabilities {
diagnostic_provider: Some(lsp::DiagnosticServerCapabilities::Options(
@@ -16,10 +16,13 @@ test-support = ["gpui/test-support"]
[dependencies]
anyhow.workspace = true
+async-channel.workspace = true
+async-process.workspace = true
async-trait.workspace = true
base64.workspace = true
collections.workspace = true
futures.workspace = true
+futures-lite.workspace = true
gpui.workspace = true
http_client = { workspace = true, features = ["test-support"] }
log.workspace = true
@@ -33,7 +36,6 @@ serde.workspace = true
settings.workspace = true
sha2.workspace = true
slotmap.workspace = true
-smol.workspace = true
tempfile.workspace = true
tiny_http.workspace = true
url = { workspace = true, features = ["serde"] }
@@ -42,3 +44,4 @@ terminal.workspace = true
[dev-dependencies]
gpui = { workspace = true, features = ["test-support"] }
+pollster.workspace = true
@@ -1,13 +1,13 @@
use anyhow::{Context as _, Result, anyhow};
use collections::HashMap;
use futures::{FutureExt, StreamExt, channel::oneshot, future, select};
+use futures_lite::future::yield_now;
use gpui::{AppContext as _, AsyncApp, BackgroundExecutor, Task};
use parking_lot::Mutex;
use postage::barrier;
use serde::{Deserialize, Serialize, de::DeserializeOwned};
use serde_json::{Value, value::RawValue};
use slotmap::SlotMap;
-use smol::channel;
use std::{
fmt,
path::PathBuf,
@@ -49,7 +49,7 @@ pub enum RequestId {
pub(crate) struct Client {
server_id: ContextServerId,
next_id: AtomicI32,
- outbound_tx: channel::Sender<String>,
+ outbound_tx: async_channel::Sender<String>,
name: Arc<str>,
subscription_set: Arc<Mutex<NotificationSubscriptionSet>>,
response_handlers: Arc<Mutex<Option<HashMap<RequestId, ResponseHandler>>>>,
@@ -197,7 +197,7 @@ impl Client {
request_timeout: Option<Duration>,
cx: AsyncApp,
) -> Result<Self> {
- let (outbound_tx, outbound_rx) = channel::unbounded::<String>();
+ let (outbound_tx, outbound_rx) = async_channel::unbounded::<String>();
let (output_done_tx, output_done_rx) = barrier::channel();
let subscription_set = Arc::new(Mutex::new(NotificationSubscriptionSet::default()));
@@ -304,7 +304,7 @@ impl Client {
}
}
- smol::future::yield_now().await;
+ yield_now().await;
Ok(())
}
@@ -324,7 +324,7 @@ impl Client {
/// writes them to the server's stdin, and manages the lifecycle of response handlers.
async fn handle_output(
transport: Arc<dyn Transport>,
- outbound_rx: channel::Receiver<String>,
+ outbound_rx: async_channel::Receiver<String>,
output_done_tx: barrier::Sender,
response_handlers: Arc<Mutex<Option<HashMap<RequestId, ResponseHandler>>>>,
last_transport_error: Arc<Mutex<Option<anyhow::Error>>>,
@@ -1,6 +1,8 @@
use ::serde::{Deserialize, Serialize};
use anyhow::{Context as _, Result};
use collections::HashMap;
+use futures::AsyncReadExt;
+use futures::stream::StreamExt;
use futures::{
AsyncBufReadExt, AsyncRead, AsyncWrite, AsyncWriteExt, FutureExt,
channel::mpsc::{UnboundedReceiver, UnboundedSender, unbounded},
@@ -12,7 +14,6 @@ use net::async_net::{UnixListener, UnixStream};
use schemars::JsonSchema;
use serde::de::DeserializeOwned;
use serde_json::{json, value::RawValue};
-use smol::stream::StreamExt;
use std::{
any::TypeId,
cell::RefCell,
@@ -201,7 +202,7 @@ impl McpServer {
handlers: Rc<RefCell<HashMap<&'static str, RequestHandler>>>,
cx: &mut AsyncApp,
) {
- let (read, write) = smol::io::split(stream);
+ let (read, write) = stream.split();
let (incoming_tx, mut incoming_rx) = unbounded();
let (outgoing_tx, outgoing_rx) = unbounded();
@@ -1962,7 +1962,7 @@ mod tests {
#[test]
fn test_fetch_protected_resource_metadata() {
- smol::block_on(async {
+ gpui::block_on(async {
let client = make_fake_http_client(|req| {
Box::pin(async move {
let uri = req.uri().to_string();
@@ -2008,7 +2008,7 @@ mod tests {
#[test]
fn test_fetch_protected_resource_metadata_prefers_www_authenticate_url() {
- smol::block_on(async {
+ gpui::block_on(async {
let client = make_fake_http_client(|req| {
Box::pin(async move {
let uri = req.uri().to_string();
@@ -2046,7 +2046,7 @@ mod tests {
#[test]
fn test_fetch_protected_resource_metadata_rejects_cross_origin_url() {
- smol::block_on(async {
+ gpui::block_on(async {
let client = make_fake_http_client(|req| {
Box::pin(async move {
let uri = req.uri().to_string();
@@ -2089,7 +2089,7 @@ mod tests {
#[test]
fn test_fetch_auth_server_metadata() {
- smol::block_on(async {
+ gpui::block_on(async {
let client = make_fake_http_client(|req| {
Box::pin(async move {
let uri = req.uri().to_string();
@@ -2134,7 +2134,7 @@ mod tests {
#[test]
fn test_fetch_auth_server_metadata_falls_back_to_oidc() {
- smol::block_on(async {
+ gpui::block_on(async {
let client = make_fake_http_client(|req| {
Box::pin(async move {
let uri = req.uri().to_string();
@@ -2167,7 +2167,7 @@ mod tests {
#[test]
fn test_fetch_auth_server_metadata_rejects_issuer_mismatch() {
- smol::block_on(async {
+ gpui::block_on(async {
let client = make_fake_http_client(|req| {
Box::pin(async move {
let uri = req.uri().to_string();
@@ -2205,7 +2205,7 @@ mod tests {
#[test]
fn test_full_discover_with_cimd() {
- smol::block_on(async {
+ gpui::block_on(async {
let client = make_fake_http_client(|req| {
Box::pin(async move {
let uri = req.uri().to_string();
@@ -2257,7 +2257,7 @@ mod tests {
#[test]
fn test_full_discover_with_dcr_fallback() {
- smol::block_on(async {
+ gpui::block_on(async {
let client = make_fake_http_client(|req| {
Box::pin(async move {
let uri = req.uri().to_string();
@@ -2320,7 +2320,7 @@ mod tests {
#[test]
fn test_discover_fails_without_pkce_support() {
- smol::block_on(async {
+ gpui::block_on(async {
let client = make_fake_http_client(|req| {
Box::pin(async move {
let uri = req.uri().to_string();
@@ -2370,7 +2370,7 @@ mod tests {
#[test]
fn test_exchange_code_success() {
- smol::block_on(async {
+ gpui::block_on(async {
let client = make_fake_http_client(|req| {
Box::pin(async move {
let uri = req.uri().to_string();
@@ -2420,7 +2420,7 @@ mod tests {
#[test]
fn test_refresh_tokens_success() {
- smol::block_on(async {
+ gpui::block_on(async {
let client = make_fake_http_client(|req| {
Box::pin(async move {
let uri = req.uri().to_string();
@@ -2459,7 +2459,7 @@ mod tests {
#[test]
fn test_exchange_code_failure() {
- smol::block_on(async {
+ gpui::block_on(async {
let client = make_fake_http_client(|_req| {
Box::pin(async move { json_response(400, r#"{"error": "invalid_grant"}"#) })
});
@@ -2494,7 +2494,7 @@ mod tests {
#[test]
fn test_perform_dcr() {
- smol::block_on(async {
+ gpui::block_on(async {
let client = make_fake_http_client(|_req| {
Box::pin(async move {
json_response(
@@ -2522,7 +2522,7 @@ mod tests {
#[test]
fn test_perform_dcr_failure() {
- smol::block_on(async {
+ gpui::block_on(async {
let client = make_fake_http_client(|_req| {
Box::pin(
async move { json_response(403, r#"{"error": "registration_not_allowed"}"#) },
@@ -2693,7 +2693,7 @@ mod tests {
#[test]
fn test_mcp_oauth_provider_refresh_without_refresh_token_returns_false() {
- smol::block_on(async {
+ gpui::block_on(async {
let session = make_test_session("token", None, None);
let provider = McpOAuthTokenProvider::new(
session,
@@ -2710,7 +2710,7 @@ mod tests {
#[test]
fn test_mcp_oauth_provider_refresh_updates_session_and_notifies_channel() {
- smol::block_on(async {
+ gpui::block_on(async {
let session = make_test_session("old-access", Some("my-refresh-token"), None);
let (tx, mut rx) = futures::channel::mpsc::unbounded();
@@ -2744,7 +2744,7 @@ mod tests {
#[test]
fn test_mcp_oauth_provider_refresh_preserves_old_refresh_token_when_server_omits_it() {
- smol::block_on(async {
+ gpui::block_on(async {
let session = make_test_session("old-access", Some("original-refresh"), None);
let (tx, mut rx) = futures::channel::mpsc::unbounded();
@@ -2776,7 +2776,7 @@ mod tests {
#[test]
fn test_mcp_oauth_provider_refresh_returns_false_on_http_error() {
- smol::block_on(async {
+ gpui::block_on(async {
let session = make_test_session("old-access", Some("my-refresh"), None);
let http_client = make_fake_http_client(|_req| {
@@ -5,7 +5,6 @@ use futures::{Stream, StreamExt};
use gpui::BackgroundExecutor;
use http_client::{AsyncBody, HttpClient, Request, Response, http::Method};
use parking_lot::Mutex as SyncMutex;
-use smol::channel;
use std::{pin::Pin, sync::Arc};
use crate::oauth::{self, OAuthTokenProvider, WwwAuthenticate};
@@ -43,10 +42,10 @@ pub struct HttpTransport {
endpoint: String,
session_id: Arc<SyncMutex<Option<String>>>,
executor: BackgroundExecutor,
- response_tx: channel::Sender<String>,
- response_rx: channel::Receiver<String>,
- error_tx: channel::Sender<String>,
- error_rx: channel::Receiver<String>,
+ response_tx: async_channel::Sender<String>,
+ response_rx: async_channel::Receiver<String>,
+ error_tx: async_channel::Sender<String>,
+ error_rx: async_channel::Receiver<String>,
/// Static headers to include in every request (e.g. from server config).
headers: HashMap<String, String>,
/// When set, the transport attaches `Authorization: Bearer` headers and
@@ -71,8 +70,8 @@ impl HttpTransport {
executor: BackgroundExecutor,
token_provider: Option<Arc<dyn OAuthTokenProvider>>,
) -> Self {
- let (response_tx, response_rx) = channel::unbounded();
- let (error_tx, error_rx) = channel::unbounded();
+ let (response_tx, response_rx) = async_channel::unbounded();
+ let (error_tx, error_rx) = async_channel::unbounded();
Self {
http_client,
@@ -241,62 +240,63 @@ impl HttpTransport {
let error_tx = self.error_tx.clone();
// Spawn a task to handle the SSE stream
- smol::spawn(async move {
- let reader = futures::io::BufReader::new(response.body_mut());
- let mut lines = futures::AsyncBufReadExt::lines(reader);
-
- let mut data_buffer = Vec::new();
- let mut in_message = false;
-
- while let Some(line_result) = lines.next().await {
- match line_result {
- Ok(line) => {
- if line.is_empty() {
- // Empty line signals end of event
- if !data_buffer.is_empty() {
- let message = data_buffer.join("\n");
-
- // Filter out ping messages and empty data
- if !message.trim().is_empty() && message != "ping" {
- if let Err(e) = response_tx.send(message).await {
- log::error!("Failed to send SSE message: {}", e);
- break;
+ self.executor
+ .spawn(async move {
+ let reader = futures::io::BufReader::new(response.body_mut());
+ let mut lines = futures::AsyncBufReadExt::lines(reader);
+
+ let mut data_buffer = Vec::new();
+ let mut in_message = false;
+
+ while let Some(line_result) = lines.next().await {
+ match line_result {
+ Ok(line) => {
+ if line.is_empty() {
+ // Empty line signals end of event
+ if !data_buffer.is_empty() {
+ let message = data_buffer.join("\n");
+
+ // Filter out ping messages and empty data
+ if !message.trim().is_empty() && message != "ping" {
+ if let Err(e) = response_tx.send(message).await {
+ log::error!("Failed to send SSE message: {}", e);
+ break;
+ }
}
+ data_buffer.clear();
}
- data_buffer.clear();
- }
- in_message = false;
- } else if let Some(data) = line.strip_prefix("data: ") {
- // Handle data lines
- let data = data.trim();
- if !data.is_empty() {
- // Check if this is a ping message
- if data == "ping" {
- log::trace!("Received SSE ping");
- continue;
+ in_message = false;
+ } else if let Some(data) = line.strip_prefix("data: ") {
+ // Handle data lines
+ let data = data.trim();
+ if !data.is_empty() {
+ // Check if this is a ping message
+ if data == "ping" {
+ log::trace!("Received SSE ping");
+ continue;
+ }
+ data_buffer.push(data.to_string());
+ in_message = true;
}
- data_buffer.push(data.to_string());
- in_message = true;
+ } else if line.starts_with("event:")
+ || line.starts_with("id:")
+ || line.starts_with("retry:")
+ {
+ // Ignore other SSE fields
+ continue;
+ } else if in_message {
+ // Continuation of data
+ data_buffer.push(line);
}
- } else if line.starts_with("event:")
- || line.starts_with("id:")
- || line.starts_with("retry:")
- {
- // Ignore other SSE fields
- continue;
- } else if in_message {
- // Continuation of data
- data_buffer.push(line);
}
- }
- Err(e) => {
- let _ = error_tx.send(format!("SSE stream error: {}", e)).await;
- break;
+ Err(e) => {
+ let _ = error_tx.send(format!("SSE stream error: {}", e)).await;
+ break;
+ }
}
}
- }
- })
- .detach();
+ })
+ .detach();
Ok(())
}
@@ -2,6 +2,7 @@ use std::path::PathBuf;
use std::pin::Pin;
use anyhow::{Context as _, Result};
+use async_process::Child;
use async_trait::async_trait;
use futures::io::{BufReader, BufWriter};
use futures::{
@@ -9,8 +10,6 @@ use futures::{
};
use gpui::AsyncApp;
use settings::Settings as _;
-use smol::channel;
-use smol::process::Child;
use terminal::terminal_settings::TerminalSettings;
use util::TryFutureExt as _;
use util::shell_builder::ShellBuilder;
@@ -19,9 +18,9 @@ use crate::client::ModelContextServerBinary;
use crate::transport::Transport;
pub struct StdioTransport {
- stdout_sender: channel::Sender<String>,
- stdin_receiver: channel::Receiver<String>,
- stderr_receiver: channel::Receiver<String>,
+ stdout_sender: async_channel::Sender<String>,
+ stdin_receiver: async_channel::Receiver<String>,
+ stderr_receiver: async_channel::Receiver<String>,
server: Child,
}
@@ -55,9 +54,9 @@ impl StdioTransport {
let stdout = server.stdout.take().unwrap();
let stderr = server.stderr.take().unwrap();
- let (stdin_sender, stdin_receiver) = channel::unbounded::<String>();
- let (stdout_sender, stdout_receiver) = channel::unbounded::<String>();
- let (stderr_sender, stderr_receiver) = channel::unbounded::<String>();
+ let (stdin_sender, stdin_receiver) = async_channel::unbounded::<String>();
+ let (stdout_sender, stdout_receiver) = async_channel::unbounded::<String>();
+ let (stderr_sender, stderr_receiver) = async_channel::unbounded::<String>();
cx.spawn(async move |_| Self::handle_output(stdin, stdout_receiver).log_err().await)
.detach();
@@ -76,7 +75,7 @@ impl StdioTransport {
})
}
- async fn handle_input<Stdout>(stdin: Stdout, inbound_rx: channel::Sender<String>)
+ async fn handle_input<Stdout>(stdin: Stdout, inbound_rx: async_channel::Sender<String>)
where
Stdout: AsyncRead + Unpin + Send + 'static,
{
@@ -95,7 +94,7 @@ impl StdioTransport {
async fn handle_output<Stdin>(
stdin: Stdin,
- outbound_rx: channel::Receiver<String>,
+ outbound_rx: async_channel::Receiver<String>,
) -> Result<()>
where
Stdin: AsyncWrite + Unpin + Send + 'static,
@@ -112,7 +111,7 @@ impl StdioTransport {
Ok(())
}
- async fn handle_err<Stderr>(stderr: Stderr, stderr_tx: channel::Sender<String>)
+ async fn handle_err<Stderr>(stderr: Stderr, stderr_tx: async_channel::Sender<String>)
where
Stderr: AsyncRead + Unpin + Send + 'static,
{
@@ -6,6 +6,7 @@ edition.workspace = true
license = "GPL-3.0-or-later"
[dependencies]
+async-process.workspace = true
cfg-if.workspace = true
crash-handler.workspace = true
futures.workspace = true
@@ -14,7 +15,6 @@ minidumper.workspace = true
parking_lot.workspace = true
paths.workspace = true
release_channel.workspace = true
-smol.workspace = true
serde.workspace = true
serde_json.workspace = true
system_specs.workspace = true
@@ -8,7 +8,7 @@ use serde::{Deserialize, Serialize};
use std::mem;
#[cfg(not(target_os = "windows"))]
-use smol::process::Command;
+use async_process::Command;
use system_specs::GpuSpecs;
#[cfg(target_os = "macos")]
@@ -57,7 +57,11 @@ fn should_install_crash_handler() -> bool {
/// The synchronous portion (signal handlers, panic hook) runs inline.
/// The async keepalive task is passed to `spawn` so the caller decides
/// which executor to schedule it on.
-pub fn init(crash_init: InitCrashHandler, spawn: impl FnOnce(BoxFuture<'static, ()>)) {
+pub fn init<F: Future<Output = ()> + Send + Sync + 'static>(
+ crash_init: InitCrashHandler,
+ spawn: impl FnOnce(BoxFuture<'static, ()>),
+ wait_timer: impl (Fn(Duration) -> F) + Send + Sync + 'static,
+) {
if !should_install_crash_handler() {
let old_hook = panic::take_hook();
panic::set_hook(Box::new(move |info| {
@@ -102,12 +106,18 @@ pub fn init(crash_init: InitCrashHandler, spawn: impl FnOnce(BoxFuture<'static,
info!("crash signal handlers installed");
- spawn(Box::pin(connect_and_keepalive(crash_init, handler)));
+ spawn(Box::pin(connect_and_keepalive(
+ crash_init, handler, wait_timer,
+ )));
}
/// Spawn the crash-handler subprocess, connect the IPC client, and run the
/// keepalive ping loop. Called on a background executor by [`init`].
-async fn connect_and_keepalive(crash_init: InitCrashHandler, handler: CrashHandler) {
+async fn connect_and_keepalive<F: Future<Output = ()> + Send + Sync + 'static>(
+ crash_init: InitCrashHandler,
+ handler: CrashHandler,
+ wait_timer: impl (Fn(Duration) -> F) + Send + Sync + 'static,
+) {
let exe = env::current_exe().expect("unable to find ourselves");
let zed_pid = process::id();
let socket_name = paths::temp_dir().join(format!("zed-crash-handler-{zed_pid}"));
@@ -134,9 +144,7 @@ async fn connect_and_keepalive(crash_init: InitCrashHandler, handler: CrashHandl
break;
}
elapsed += retry_frequency;
- // Crash reporting is called outside of gpui in the remote server right now
- #[allow(clippy::disallowed_methods)]
- smol::Timer::after(retry_frequency).await;
+ wait_timer(retry_frequency).await;
}
let client = maybe_client.unwrap();
let client = Arc::new(client);
@@ -157,9 +165,7 @@ async fn connect_and_keepalive(crash_init: InitCrashHandler, handler: CrashHandl
loop {
client.ping().ok();
- // Crash reporting is called outside of gpui in the remote server right now
- #[allow(clippy::disallowed_methods)]
- smol::Timer::after(Duration::from_secs(10)).await;
+ wait_timer(Duration::from_secs(10)).await;
}
}
@@ -23,7 +23,6 @@ inventory.workspace = true
log.workspace = true
paths.workspace = true
release_channel.workspace = true
-smol.workspace = true
sqlez.workspace = true
sqlez_macros.workspace = true
util.workspace = true
@@ -9,7 +9,6 @@ use gpui::{App, AppContext, Global};
pub use indoc::indoc;
pub use inventory;
pub use paths::database_dir;
-pub use smol;
pub use sqlez;
pub use sqlez_macros;
pub use uuid;
@@ -18,6 +17,7 @@ pub use release_channel::RELEASE_CHANNEL;
use sqlez::domain::Migrator;
use sqlez::thread_safe_connection::ThreadSafeConnection;
use sqlez_macros::sql;
+use std::fs::create_dir_all;
use std::future::Future;
use std::path::Path;
use std::sync::atomic::AtomicBool;
@@ -62,7 +62,7 @@ impl AppDatabase {
pub fn new() -> Self {
let db_dir = database_dir();
let scope = RELEASE_CHANNEL.dev_name();
- let connection = smol::block_on(open_db::<AppMigrator>(db_dir, scope));
+ let connection = gpui::block_on(open_db::<AppMigrator>(db_dir, scope));
Self(connection)
}
@@ -71,7 +71,7 @@ impl AppDatabase {
#[cfg(any(test, feature = "test-support"))]
pub fn test_new() -> Self {
let name = format!("test-db-{}", uuid::Uuid::new_v4());
- let connection = smol::block_on(open_test_db::<AppMigrator>(&name));
+ let connection = gpui::block_on(open_test_db::<AppMigrator>(&name));
Self(connection)
}
@@ -151,8 +151,7 @@ pub async fn open_db<M: Migrator + 'static>(db_dir: &Path, scope: &str) -> Threa
let main_db_dir = db_dir.join(format!("0-{}", scope));
let connection = maybe!(async {
- smol::fs::create_dir_all(&main_db_dir)
- .await
+ create_dir_all(&main_db_dir)
.context("Could not create db directory")
.log_err()?;
let db_path = main_db_dir.join(Path::new(DB_FILE_NAME));
@@ -379,7 +378,7 @@ mod tests {
for _ in 0..10 {
let tmp_path = tempdir.path().to_path_buf();
let guard = thread::spawn(move || {
- let good_db = smol::block_on(open_db::<GoodDB>(
+ let good_db = gpui::block_on(open_db::<GoodDB>(
tmp_path.as_path(),
release_channel::ReleaseChannel::Dev.dev_name(),
));
@@ -243,7 +243,7 @@ impl std::ops::Deref for GlobalKeyValueStore {
static GLOBAL_KEY_VALUE_STORE: std::sync::LazyLock<GlobalKeyValueStore> =
std::sync::LazyLock::new(|| {
let db_dir = crate::database_dir();
- GlobalKeyValueStore(smol::block_on(crate::open_db::<GlobalKeyValueStore>(
+ GlobalKeyValueStore(gpui::block_on(crate::open_db::<GlobalKeyValueStore>(
db_dir, "global",
)))
});
@@ -970,7 +970,7 @@ fn main() {
match &command {
Command::ImportBatch(import_args) => {
- smol::block_on(async {
+ gpui::block_on(async {
match import_args.provider {
BatchProvider::Anthropic => {
let client = anthropic_client::AnthropicClient::batch(&paths::LLM_CACHE_DB)
@@ -1029,7 +1029,7 @@ fn main() {
output_dir,
fresh: synth_args.fresh,
};
- smol::block_on(async {
+ gpui::block_on(async {
if let Err(e) = run_synthesize(config).await {
eprintln!("Error: {:?}", e);
std::process::exit(1);
@@ -46,6 +46,7 @@ emojis.workspace = true
feature_flags.workspace = true
file_icons.workspace = true
futures.workspace = true
+futures-lite.workspace = true
fuzzy.workspace = true
fs.workspace = true
git.workspace = true
@@ -75,7 +76,6 @@ serde.workspace = true
serde_json.workspace = true
settings.workspace = true
smallvec.workspace = true
-smol.workspace = true
snippet.workspace = true
sum_tree.workspace = true
task.workspace = true
@@ -2577,9 +2577,9 @@ pub mod tests {
};
use lsp::LanguageServerId;
+ use futures::stream::StreamExt;
use rand::{Rng, prelude::*};
use settings::{SettingsContent, SettingsStore};
- use smol::stream::StreamExt;
use std::{env, sync::Arc};
use text::PointUtf16;
use theme::{LoadThemes, SyntaxTheme};
@@ -4,10 +4,11 @@ use super::{
fold_map::{Chunk, FoldRows},
tab_map::{self, TabEdit, TabPoint, TabSnapshot},
};
+
+use futures_lite::future::yield_now;
use gpui::{App, AppContext as _, Context, Entity, Font, LineWrapper, Pixels, Task};
use language::{LanguageAwareStyling, Point};
use multi_buffer::{MultiBufferSnapshot, RowInfo};
-use smol::future::yield_now;
use std::{cmp, collections::VecDeque, mem, ops::Range, sync::LazyLock, time::Duration};
use sum_tree::{Bias, Cursor, Dimensions, SumTree};
use text::Patch;
@@ -205,7 +206,7 @@ impl WrapMap {
}];
if total_rows < WRAP_YIELD_ROW_INTERVAL {
- let edits = smol::block_on(new_snapshot.update(
+ let edits = gpui::block_on(new_snapshot.update(
tab_snapshot,
&tab_edits,
wrap_width,
@@ -299,7 +300,7 @@ impl WrapMap {
< WRAP_YIELD_ROW_INTERVAL
&& let Some((tab_snapshot, tab_edits)) = pending_edits.pop_back()
{
- let wrap_edits = smol::block_on(snapshot.update(
+ let wrap_edits = gpui::block_on(snapshot.update(
tab_snapshot,
&tab_edits,
wrap_width,
@@ -1357,10 +1358,10 @@ mod tests {
display_map::{fold_map::FoldMap, inlay_map::InlayMap, tab_map::TabMap},
test::test_font,
};
+ use futures::stream::StreamExt;
use gpui::{LineFragment, px, test::observe};
use rand::prelude::*;
use settings::SettingsStore;
- use smol::stream::StreamExt;
use std::{cmp, env, num::NonZeroU32};
use text::Rope;
use theme::LoadThemes;
@@ -1196,12 +1196,12 @@ mod tests {
test::editor_lsp_test_context::EditorLspTestContext,
};
use collections::BTreeSet;
+ use futures::stream::StreamExt;
use gpui::App;
use indoc::indoc;
use markdown::parser::MarkdownEvent;
use project::InlayId;
use settings::InlayHintSettingsContent;
- use smol::stream::StreamExt;
use std::sync::atomic;
use std::sync::atomic::AtomicUsize;
use text::Bias;
@@ -3,6 +3,7 @@ use std::time::Duration;
use crate::Editor;
use collections::{HashMap, HashSet};
+use futures_lite::FutureExt as _;
use gpui::AsyncApp;
use gpui::{App, Entity, Task};
use language::Buffer;
@@ -14,7 +15,6 @@ use project::LocationLink;
use project::Project;
use project::TaskSourceKind;
use project::lsp_store::lsp_ext_command::GetLspRunnables;
-use smol::future::FutureExt as _;
use task::ResolvedTask;
use task::TaskContext;
use text::BufferId;
@@ -13,6 +13,7 @@ use serde_json::json;
use crate::{Editor, ToPoint};
use collections::HashSet;
use futures::Future;
+use futures::stream::StreamExt;
use gpui::{Context, Entity, Focusable as _, VisualTestContext, Window};
use indoc::indoc;
use language::{
@@ -21,7 +22,6 @@ use language::{
};
use lsp::{notification, request};
use project::Project;
-use smol::stream::StreamExt;
use workspace::{AppState, MultiWorkspace, Workspace, WorkspaceHandle};
use super::editor_test_context::{AssertionContextManager, EditorTestContext};
@@ -19,6 +19,7 @@ path = "tests/integration/main.rs"
[dependencies]
anyhow.workspace = true
+async-channel.workspace = true
async-tar.workspace = true
async-trait.workspace = true
collections.workspace = true
@@ -1,6 +1,8 @@
use crate::{FakeFs, FakeFsEntry, Fs, RemoveOptions, RenameOptions};
use anyhow::{Context as _, Result, bail};
+use async_channel::Sender;
use collections::{HashMap, HashSet};
+use futures::FutureExt as _;
use futures::future::{self, BoxFuture, join_all};
use git::{
Oid, RunHook,
@@ -21,7 +23,6 @@ use gpui::{AsyncApp, BackgroundExecutor, SharedString, Task};
use ignore::gitignore::GitignoreBuilder;
use parking_lot::Mutex;
use rope::Rope;
-use smol::{channel::Sender, future::FutureExt as _};
use std::{path::PathBuf, sync::Arc, sync::atomic::AtomicBool};
use text::LineEnding;
use util::{paths::PathStyle, rel_path::RelPath};
@@ -47,7 +48,7 @@ pub struct FakeCommitSnapshot {
#[derive(Debug, Clone)]
pub struct FakeGitRepositoryState {
pub commit_history: Vec<FakeCommitSnapshot>,
- pub event_emitter: smol::channel::Sender<PathBuf>,
+ pub event_emitter: async_channel::Sender<PathBuf>,
pub unmerged_paths: HashMap<RepoPath, UnmergedStatus>,
pub head_contents: HashMap<RepoPath, String>,
pub index_contents: HashMap<RepoPath, String>,
@@ -68,7 +69,7 @@ pub struct FakeGitRepositoryState {
}
impl FakeGitRepositoryState {
- pub fn new(event_emitter: smol::channel::Sender<PathBuf>) -> Self {
+ pub fn new(event_emitter: async_channel::Sender<PathBuf>) -> Self {
FakeGitRepositoryState {
event_emitter,
head_contents: Default::default(),
@@ -1070,7 +1070,7 @@ impl Fs for RealFs {
use util::{ResultExt as _, paths::SanitizedPath};
let executor = self.executor.clone();
- let (tx, rx) = smol::channel::unbounded();
+ let (tx, rx) = async_channel::unbounded();
let pending_paths: Arc<Mutex<Vec<PathEvent>>> = Default::default();
let watcher = Arc::new(fs_watcher::FsWatcher::new(tx, pending_paths.clone()));
@@ -1307,8 +1307,8 @@ struct FakeFsState {
root: FakeFsEntry,
next_inode: u64,
next_mtime: SystemTime,
- git_event_tx: smol::channel::Sender<PathBuf>,
- event_txs: Vec<(PathBuf, smol::channel::Sender<Vec<PathEvent>>)>,
+ git_event_tx: async_channel::Sender<PathBuf>,
+ event_txs: Vec<(PathBuf, async_channel::Sender<Vec<PathEvent>>)>,
events_paused: bool,
buffered_events: Vec<PathEvent>,
metadata_call_count: usize,
@@ -1578,7 +1578,7 @@ impl FakeFs {
const SYSTEMTIME_INTERVAL: Duration = Duration::from_nanos(100);
pub fn new(executor: gpui::BackgroundExecutor) -> Arc<Self> {
- let (tx, rx) = smol::channel::bounded::<PathBuf>(10);
+ let (tx, rx) = async_channel::bounded::<PathBuf>(10);
let this = Arc::new_cyclic(|this| Self {
this: this.clone(),
@@ -2543,7 +2543,7 @@ impl FakeFsEntry {
#[cfg(feature = "test-support")]
struct FakeWatcher {
- tx: smol::channel::Sender<Vec<PathEvent>>,
+ tx: async_channel::Sender<Vec<PathEvent>>,
original_path: PathBuf,
fs_state: Arc<Mutex<FakeFsState>>,
prefixes: Mutex<Vec<PathBuf>>,
@@ -3012,7 +3012,7 @@ impl Fs for FakeFs {
Arc<dyn Watcher>,
) {
self.simulate_random_delay().await;
- let (tx, rx) = smol::channel::unbounded();
+ let (tx, rx) = async_channel::unbounded();
let path = path.to_path_buf();
self.state.lock().event_txs.push((path.clone(), tx.clone()));
let executor = self.executor.clone();
@@ -11,14 +11,14 @@ use util::{ResultExt, paths::SanitizedPath};
use crate::{PathEvent, PathEventKind, Watcher};
pub struct FsWatcher {
- tx: smol::channel::Sender<()>,
+ tx: async_channel::Sender<()>,
pending_path_events: Arc<Mutex<Vec<PathEvent>>>,
registrations: Mutex<BTreeMap<Arc<std::path::Path>, WatcherRegistrationId>>,
}
impl FsWatcher {
pub fn new(
- tx: smol::channel::Sender<()>,
+ tx: async_channel::Sender<()>,
pending_path_events: Arc<Mutex<Vec<PathEvent>>>,
) -> Self {
Self {
@@ -439,7 +439,7 @@ async fn test_realfs_atomic_write(executor: BackgroundExecutor) {
// drop(file); // We still hold the file handle here
let content = std::fs::read_to_string(&file_to_be_replaced).unwrap();
assert_eq!(content, "Hello");
- smol::block_on(fs.atomic_write(file_to_be_replaced.clone(), "World".into())).unwrap();
+ gpui::block_on(fs.atomic_write(file_to_be_replaced.clone(), "World".into())).unwrap();
let content = std::fs::read_to_string(&file_to_be_replaced).unwrap();
assert_eq!(content, "World");
}
@@ -449,7 +449,7 @@ async fn test_realfs_atomic_write_non_existing_file(executor: BackgroundExecutor
let fs = RealFs::new(None, executor);
let temp_dir = TempDir::new().unwrap();
let file_to_be_replaced = temp_dir.path().join("file.txt");
- smol::block_on(fs.atomic_write(file_to_be_replaced.clone(), "Hello".into())).unwrap();
+ gpui::block_on(fs.atomic_write(file_to_be_replaced.clone(), "Hello".into())).unwrap();
let content = std::fs::read_to_string(&file_to_be_replaced).unwrap();
assert_eq!(content, "Hello");
}
@@ -594,7 +594,7 @@ async fn test_realfs_broken_symlink_metadata(executor: BackgroundExecutor) {
let path = tempdir.path();
let fs = RealFs::new(None, executor);
let symlink_path = path.join("symlink");
- smol::block_on(fs.create_symlink(&symlink_path, PathBuf::from("file_a.txt"))).unwrap();
+ gpui::block_on(fs.create_symlink(&symlink_path, PathBuf::from("file_a.txt"))).unwrap();
let metadata = fs
.metadata(&symlink_path)
.await
@@ -614,7 +614,7 @@ async fn test_realfs_symlink_loop_metadata(executor: BackgroundExecutor) {
let path = tempdir.path();
let fs = RealFs::new(None, executor);
let symlink_path = path.join("symlink");
- smol::block_on(fs.create_symlink(&symlink_path, PathBuf::from("symlink"))).unwrap();
+ gpui::block_on(fs.create_symlink(&symlink_path, PathBuf::from("symlink"))).unwrap();
let metadata = fs
.metadata(&symlink_path)
.await
@@ -32,6 +32,7 @@ rope.workspace = true
schemars.workspace = true
serde.workspace = true
smallvec.workspace = true
+async-channel.workspace = true
smol.workspace = true
sum_tree.workspace = true
text.workspace = true
@@ -3,6 +3,7 @@ use crate::stash::GitStash;
use crate::status::{DiffTreeType, GitStatus, StatusCode, TreeDiff};
use crate::{Oid, RunHook, SHORT_SHA_LENGTH};
use anyhow::{Context as _, Result, anyhow, bail};
+use async_channel::Sender;
use collections::HashMap;
use futures::channel::oneshot;
use futures::future::BoxFuture;
@@ -15,7 +16,6 @@ use rope::Rope;
use schemars::JsonSchema;
use serde::Deserialize;
use smallvec::SmallVec;
-use smol::channel::Sender;
use smol::io::{AsyncBufReadExt, AsyncReadExt, BufReader};
use text::LineEnding;
@@ -124,7 +124,7 @@ struct CommitDataRequest {
}
pub struct CommitDataReader {
- request_tx: smol::channel::Sender<CommitDataRequest>,
+ request_tx: async_channel::Sender<CommitDataRequest>,
_task: Task<()>,
}
@@ -2998,7 +2998,7 @@ impl GitRepository for RealGitRepository {
fn commit_data_reader(&self) -> Result<CommitDataReader> {
let git_binary = self.git_binary()?;
- let (request_tx, request_rx) = smol::channel::bounded::<CommitDataRequest>(64);
+ let (request_tx, request_rx) = async_channel::bounded::<CommitDataRequest>(64);
let task = self.executor.spawn(async move {
if let Err(error) = run_commit_data_reader(git_binary, request_rx).await {
@@ -3024,7 +3024,7 @@ impl GitRepository for RealGitRepository {
async fn run_commit_data_reader(
git: GitBinary,
- request_rx: smol::channel::Receiver<CommitDataRequest>,
+ request_rx: async_channel::Receiver<CommitDataRequest>,
) -> Result<()> {
let mut process = git
.build_command(&["cat-file", "--batch"])
@@ -21,6 +21,7 @@ test-support = [
[dependencies]
anyhow.workspace = true
+async-channel.workspace = true
collections.workspace = true
db.workspace = true
editor.workspace = true
@@ -33,7 +34,6 @@ project.workspace = true
search.workspace = true
settings.workspace = true
smallvec.workspace = true
-smol.workspace = true
theme.workspace = true
theme_settings.workspace = true
time.workspace = true
@@ -1396,7 +1396,7 @@ impl GitGraph {
return;
}
- let (request_tx, request_rx) = smol::channel::unbounded::<Oid>();
+ let (request_tx, request_rx) = async_channel::unbounded::<Oid>();
repo.update(cx, |repo, cx| {
repo.search_commits(
@@ -28,6 +28,7 @@ db.workspace = true
editor.workspace = true
file_icons.workspace = true
futures.workspace = true
+futures-lite.workspace = true
fuzzy.workspace = true
git.workspace = true
gpui.workspace = true
@@ -52,7 +53,6 @@ serde.workspace = true
serde_json.workspace = true
settings.workspace = true
smallvec.workspace = true
-smol.workspace = true
strum.workspace = true
telemetry.workspace = true
theme.workspace = true
@@ -14,6 +14,7 @@ use editor::{
multibuffer_context_lines,
scroll::Autoscroll,
};
+use futures_lite::future::yield_now;
use git::repository::DiffType;
use git::{
@@ -34,7 +35,6 @@ use project::{
},
};
use settings::{Settings, SettingsStore};
-use smol::future::yield_now;
use std::any::{Any, TypeId};
use std::sync::Arc;
use theme::ActiveTheme;
@@ -122,6 +122,8 @@ pub use util::{FutureExt, Timeout};
pub use view::*;
pub use window::*;
+pub use pollster::block_on;
+
/// The context trait, allows the different contexts in GPUI to be used
/// interchangeably for certain operations.
pub trait AppContext {
@@ -186,7 +186,7 @@ pub fn observe<T: 'static>(entity: &Entity<T>, cx: &mut TestAppContext) -> Obser
let (tx, rx) = async_channel::unbounded();
let _subscription = cx.update(|cx| {
cx.observe(entity, move |_, _| {
- let _ = pollster::block_on(tx.send(()));
+ let _ = gpui::block_on(tx.send(()));
})
});
let rx = Box::pin(rx);
@@ -358,8 +358,8 @@ impl WgpuAtlasTexture {
#[cfg(all(test, not(target_family = "wasm")))]
mod tests {
use super::*;
+ use gpui::block_on;
use gpui::{ImageId, RenderImageParams};
- use pollster::block_on;
use std::sync::Arc;
fn test_device_and_queue() -> anyhow::Result<(Arc<wgpu::Device>, Arc<wgpu::Queue>)> {
@@ -42,7 +42,7 @@ impl WgpuContext {
// Select an adapter by actually testing surface configuration with the real device.
// This is the only reliable way to determine compatibility on hybrid GPU systems.
let (adapter, device, queue, dual_source_blending) =
- pollster::block_on(Self::select_adapter_and_device(
+ gpui::block_on(Self::select_adapter_and_device(
&instance,
device_id_filter,
surface,
@@ -35,6 +35,7 @@ ec4rs.workspace = true
encoding_rs.workspace = true
fs.workspace = true
futures.workspace = true
+futures-lite.workspace = true
fuzzy.workspace = true
globset.workspace = true
gpui.workspace = true
@@ -55,7 +56,6 @@ serde_json.workspace = true
settings.workspace = true
shellexpand.workspace = true
smallvec.workspace = true
-smol.workspace = true
streaming-iterator.workspace = true
strsim.workspace = true
sum_tree.workspace = true
@@ -27,6 +27,7 @@ use collections::{HashMap, HashSet};
use encoding_rs::Encoding;
use fs::MTime;
use futures::channel::oneshot;
+use futures_lite::future::yield_now;
use gpui::{
App, AppContext as _, Context, Entity, EventEmitter, HighlightStyle, SharedString, StyledText,
Task, TextStyle,
@@ -36,7 +37,6 @@ use lsp::LanguageServerId;
use parking_lot::Mutex;
use settings::WorktreeId;
use smallvec::SmallVec;
-use smol::future::yield_now;
use std::{
any::Any,
borrow::Cow,
@@ -3,6 +3,7 @@ use crate::Buffer;
use clock::ReplicaId;
use collections::BTreeMap;
use futures::FutureExt as _;
+use futures_lite::future::yield_now;
use gpui::{App, AppContext as _, BorrowAppContext, Entity};
use gpui::{HighlightStyle, TestAppContext};
use indoc::indoc;
@@ -559,7 +560,7 @@ async fn test_normalize_whitespace(cx: &mut gpui::TestAppContext) {
// Spawn a task to format the buffer's whitespace.
// Pause so that the formatting task starts running.
let format = buffer.update(cx, |buffer, cx| buffer.remove_trailing_whitespace(cx));
- smol::future::yield_now().await;
+ yield_now().await;
// Edit the buffer while the normalization task is running.
let version_before_edit = buffer.update(cx, |buffer, _| buffer.version());
@@ -36,6 +36,7 @@ use http_client::HttpClient;
pub use language_core::highlight_map::{HighlightId, HighlightMap};
+use futures::future::FutureExt as _;
pub use language_core::{
BlockCommentConfig, BracketPair, BracketPairConfig, BracketPairContent, BracketsConfig,
BracketsPatternConfig, CodeLabel, CodeLabelBuilder, DebugVariablesConfig, DebuggerTextObject,
@@ -60,7 +61,6 @@ use regex::Regex;
use semver::Version;
use serde_json::Value;
use settings::WorktreeId;
-use smol::future::FutureExt as _;
use std::{
ffi::OsStr,
fmt::Debug,
@@ -33,7 +33,7 @@ use sum_tree::Bias;
use text::{Point, Rope};
use theme::Theme;
use unicase::UniCase;
-use util::{ResultExt, maybe, post_inc};
+use util::{maybe, post_inc};
pub struct LanguageRegistry {
state: RwLock<LanguageRegistryState>,
@@ -1092,18 +1092,6 @@ impl LanguageRegistry {
) -> mpsc::UnboundedReceiver<(LanguageServerName, BinaryStatus)> {
self.lsp_binary_status_tx.subscribe()
}
-
- pub async fn delete_server_container(&self, name: LanguageServerName) {
- log::info!("deleting server container");
- let Some(dir) = self.language_server_download_dir(&name) else {
- return;
- };
-
- smol::fs::remove_dir_all(dir)
- .await
- .context("server container removal")
- .log_err();
- }
}
impl LanguageRegistryState {
@@ -14,6 +14,7 @@ doctest = false
[dependencies]
anyhow.workspace = true
+async-lock.workspace = true
cloud_llm_client.workspace = true
futures.workspace = true
gpui_shared_string.workspace = true
@@ -22,6 +23,5 @@ partial-json-fixer.workspace = true
schemars.workspace = true
serde.workspace = true
serde_json.workspace = true
-smol.workspace = true
strum.workspace = true
thiserror.workspace = true
@@ -1,5 +1,5 @@
+use async_lock::{Semaphore, SemaphoreGuardArc};
use futures::Stream;
-use smol::lock::{Semaphore, SemaphoreGuardArc};
use std::{
future::Future,
pin::Pin,
@@ -13,6 +13,7 @@ path = "src/language_models.rs"
[dependencies]
ai_onboarding.workspace = true
+async-lock.workspace = true
anthropic = { workspace = true, features = ["schemars"] }
anyhow.workspace = true
aws-config = { workspace = true, features = ["behavior-version-latest"] }
@@ -55,7 +56,6 @@ schemars.workspace = true
serde.workspace = true
serde_json.workspace = true
settings.workspace = true
-smol.workspace = true
strum.workspace = true
tiktoken-rs.workspace = true
tokio = { workspace = true, features = ["rt", "rt-multi-thread"] }
@@ -2,6 +2,7 @@ use std::pin::Pin;
use std::sync::Arc;
use anyhow::{Context as _, Result, anyhow};
+use async_lock::OnceCell;
use aws_config::stalled_stream_protection::StalledStreamProtectionConfig;
use aws_config::{BehaviorVersion, Region};
use aws_credential_types::{Credentials, Token};
@@ -40,7 +41,6 @@ use schemars::JsonSchema;
use serde::{Deserialize, Serialize};
use serde_json::Value;
use settings::{BedrockAvailableModel as AvailableModel, Settings, SettingsStore};
-use smol::lock::OnceCell;
use std::sync::LazyLock;
use strum::{EnumIter, IntoEnumIterator, IntoStaticStr};
use ui::{ButtonLink, ConfiguredApiCard, Divider, List, ListBulletItem, prelude::*};
@@ -25,7 +25,6 @@ schemars.workspace = true
semver.workspace = true
serde.workspace = true
serde_json.workspace = true
-smol.workspace = true
thiserror.workspace = true
x_ai = { workspace = true, features = ["schemars"] }
@@ -8,8 +8,9 @@ use cloud_llm_client::{
ZED_VERSION_HEADER_NAME,
};
use futures::{
- AsyncBufReadExt, FutureExt, Stream, StreamExt,
+ AsyncBufReadExt, AsyncReadExt as _, FutureExt, Stream, StreamExt,
future::BoxFuture,
+ io::BufReader,
stream::{self, BoxStream},
};
use google_ai::GoogleModelMode;
@@ -31,7 +32,6 @@ use language_model::{
use schemars::JsonSchema;
use semver::Version;
use serde::{Deserialize, Serialize, de::DeserializeOwned};
-use smol::io::{AsyncReadExt, BufReader};
use std::collections::VecDeque;
use std::pin::Pin;
use std::str::FromStr;
@@ -26,6 +26,7 @@ anyhow.workspace = true
clock.workspace = true
collections.workspace = true
ctor.workspace = true
+futures-lite.workspace = true
buffer_diff.workspace = true
gpui.workspace = true
itertools.workspace = true
@@ -34,7 +35,6 @@ log.workspace = true
parking_lot.workspace = true
rand.workspace = true
rope.workspace = true
-smol.workspace = true
settings.workspace = true
serde.workspace = true
smallvec.workspace = true
@@ -16,6 +16,7 @@ use buffer_diff::{
};
use clock::ReplicaId;
use collections::{BTreeMap, Bound, HashMap, HashSet};
+use futures_lite::future::yield_now;
use gpui::{App, Context, Entity, EventEmitter};
use itertools::Itertools;
use language::{
@@ -33,7 +34,6 @@ use gpui::AppContext as _;
use rope::DimensionPair;
use settings::Settings;
use smallvec::SmallVec;
-use smol::future::yield_now;
use std::{
any::type_name,
borrow::Cow,
@@ -24,8 +24,8 @@ test-support = [
anyhow.workspace = true
channel.workspace = true
client.workspace = true
+futures-lite.workspace = true
component.workspace = true
-db.workspace = true
gpui.workspace = true
rpc.workspace = true
sum_tree.workspace = true
@@ -1,7 +1,7 @@
use anyhow::{Context as _, Result};
use channel::ChannelStore;
use client::{ChannelId, Client, UserStore};
-use db::smol::stream::StreamExt;
+use futures_lite::stream::StreamExt;
use gpui::{App, AppContext as _, AsyncApp, Context, Entity, EventEmitter, Global, Task};
use rpc::{Notification, TypedEnvelope, proto};
use std::{ops::Range, sync::Arc};
@@ -20,7 +20,6 @@ language.workspace = true
ordered-float.workspace = true
picker.workspace = true
settings.workspace = true
-smol.workspace = true
theme.workspace = true
theme_settings.workspace = true
ui.workspace = true
@@ -40,3 +39,4 @@ rope.workspace = true
serde_json.workspace = true
settings = { workspace = true, features = ["test-support"] }
workspace = { workspace = true, features = ["test-support"] }
+futures.workspace = true
@@ -454,13 +454,13 @@ mod tests {
use std::time::Duration;
use super::*;
+ use futures::stream::StreamExt as _;
use gpui::{TestAppContext, UpdateGlobal, VisualTestContext};
use indoc::indoc;
use language::FakeLspAdapter;
use project::{FakeFs, Project};
use serde_json::json;
use settings::SettingsStore;
- use smol::stream::StreamExt as _;
use util::{path, rel_path::rel_path};
use workspace::{AppState, MultiWorkspace, Workspace};
@@ -14,11 +14,13 @@ doctest = false
[dependencies]
anyhow.workspace = true
+async-channel.workspace = true
collections.workspace = true
db.workspace = true
editor.workspace = true
file_icons.workspace = true
fuzzy.workspace = true
+futures.workspace = true
gpui.workspace = true
itertools.workspace = true
language.workspace = true
@@ -31,7 +33,6 @@ serde.workspace = true
serde_json.workspace = true
settings.workspace = true
smallvec.workspace = true
-smol.workspace = true
theme.workspace = true
theme_settings.workspace = true
ui.workspace = true
@@ -46,7 +46,6 @@ use project::{File, Fs, GitEntry, GitTraversal, Project, ProjectItem};
use search::{BufferSearchBar, ProjectSearchView};
use serde::{Deserialize, Serialize};
use settings::{Settings, SettingsStore};
-use smol::channel;
use theme::SyntaxTheme;
use theme_settings::ThemeSettings;
use ui::{
@@ -155,7 +154,7 @@ struct SearchState {
kind: SearchKind,
query: String,
matches: Vec<(Range<editor::Anchor>, Arc<OnceLock<SearchData>>)>,
- highlight_search_match_tx: channel::Sender<HighlightArguments>,
+ highlight_search_match_tx: async_channel::Sender<HighlightArguments>,
_search_match_highlighter: Task<()>,
_search_match_notify: Task<()>,
}
@@ -176,8 +175,8 @@ impl SearchState {
window: &mut Window,
cx: &mut Context<OutlinePanel>,
) -> Self {
- let (highlight_search_match_tx, highlight_search_match_rx) = channel::unbounded();
- let (notify_tx, notify_rx) = channel::unbounded::<()>();
+ let (highlight_search_match_tx, highlight_search_match_rx) = async_channel::unbounded();
+ let (notify_tx, notify_rx) = async_channel::unbounded::<()>();
Self {
kind,
query,
@@ -5236,6 +5235,7 @@ impl GenerationState {
#[cfg(test)]
mod tests {
use db::indoc;
+ use futures::stream::StreamExt as _;
use gpui::{TestAppContext, UpdateGlobal, VisualTestContext, WindowHandle};
use language::{self, FakeLspAdapter, markdown_lang, rust_lang};
use pretty_assertions::assert_eq;
@@ -5245,7 +5245,6 @@ mod tests {
project_search::{self, perform_project_search},
};
use serde_json::json;
- use smol::stream::StreamExt as _;
use util::path;
use workspace::{MultiWorkspace, OpenOptions, OpenVisible, ToolbarItemView};
@@ -38,6 +38,7 @@ aho-corasick.workspace = true
anyhow.workspace = true
askpass.workspace = true
async-trait.workspace = true
+async-channel.workspace = true
base64.workspace = true
buffer_diff.workspace = true
circular-buffer.workspace = true
@@ -45,7 +45,7 @@ pub struct BufferStore {
#[derive(Default)]
struct RemoteProjectSearchState {
// List of ongoing project search chunks from our remote host. Used by the side issuing a search RPC request.
- chunks: HashMap<u64, smol::channel::Sender<BufferId>>,
+ chunks: HashMap<u64, async_channel::Sender<BufferId>>,
// Monotonously-increasing handle to hand out to remote host in order to identify the project search result chunk.
next_id: u64,
// Used by the side running the actual search for match candidates to potentially cancel the search prematurely.
@@ -1709,8 +1709,8 @@ impl BufferStore {
pub(crate) fn register_project_search_result_handle(
&mut self,
- ) -> (u64, smol::channel::Receiver<BufferId>) {
- let (tx, rx) = smol::channel::unbounded();
+ ) -> (u64, async_channel::Receiver<BufferId>) {
+ let (tx, rx) = async_channel::unbounded();
let handle = util::post_inc(&mut self.project_search.next_id);
let _old_entry = self.project_search.chunks.insert(handle, tx);
debug_assert!(_old_entry.is_none());
@@ -307,7 +307,7 @@ pub struct JobInfo {
struct GraphCommitDataHandler {
_task: Task<()>,
- commit_data_request: smol::channel::Sender<Oid>,
+ commit_data_request: async_channel::Sender<Oid>,
}
enum GraphCommitHandlerState {
@@ -4727,7 +4727,7 @@ impl Repository {
&mut self,
log_source: LogSource,
search_args: SearchCommitArgs,
- request_tx: smol::channel::Sender<Oid>,
+ request_tx: async_channel::Sender<Oid>,
cx: &mut Context<Self>,
) {
let repository_state = self.repository_state.clone();
@@ -4827,7 +4827,7 @@ impl Repository {
cx: &mut AsyncApp,
) -> Result<(), SharedString> {
let (request_tx, request_rx) =
- smol::channel::unbounded::<Vec<Arc<InitialGraphCommitData>>>();
+ async_channel::unbounded::<Vec<Arc<InitialGraphCommitData>>>();
let task = cx.background_executor().spawn({
let log_source = log_source.clone();
@@ -4898,8 +4898,8 @@ impl Repository {
self.graph_commit_data_handler = GraphCommitHandlerState::Starting;
let state = self.repository_state.clone();
- let (result_tx, result_rx) = smol::channel::bounded::<(Oid, GraphCommitData)>(64);
- let (request_tx, request_rx) = smol::channel::unbounded::<Oid>();
+ let (result_tx, result_rx) = async_channel::bounded::<(Oid, GraphCommitData)>(64);
+ let (request_tx, request_rx) = async_channel::unbounded::<Oid>();
let foreground_task = cx.spawn(async move |this, cx| {
while let Ok((sha, commit_data)) = result_rx.recv().await {
@@ -1178,7 +1178,7 @@ impl LocalLspStore {
async move {
let actions = params.actions.unwrap_or_default();
let message = params.message.clone();
- let (tx, rx) = smol::channel::bounded::<MessageActionItem>(1);
+ let (tx, rx) = async_channel::bounded::<MessageActionItem>(1);
let level = match params.typ {
lsp::MessageType::ERROR => PromptLevel::Critical,
lsp::MessageType::WARNING => PromptLevel::Warning,
@@ -1224,7 +1224,7 @@ impl LocalLspStore {
let name = name.to_string();
let mut cx = cx.clone();
- let (tx, _) = smol::channel::bounded(1);
+ let (tx, _) = async_channel::bounded(1);
let level = match params.typ {
lsp::MessageType::ERROR => PromptLevel::Critical,
lsp::MessageType::WARNING => PromptLevel::Warning,
@@ -13817,7 +13817,7 @@ pub struct LanguageServerPromptRequest {
pub message: String,
pub actions: Vec<MessageActionItem>,
pub lsp_name: String,
- pub(crate) response_channel: smol::channel::Sender<MessageActionItem>,
+ pub(crate) response_channel: async_channel::Sender<MessageActionItem>,
}
impl LanguageServerPromptRequest {
@@ -13826,7 +13826,7 @@ impl LanguageServerPromptRequest {
message: String,
actions: Vec<MessageActionItem>,
lsp_name: String,
- response_channel: smol::channel::Sender<MessageActionItem>,
+ response_channel: async_channel::Sender<MessageActionItem>,
) -> Self {
let id = NEXT_PROMPT_REQUEST_ID.fetch_add(1, atomic::Ordering::AcqRel);
LanguageServerPromptRequest {
@@ -13853,7 +13853,7 @@ impl LanguageServerPromptRequest {
actions: Vec<MessageActionItem>,
lsp_name: String,
) -> Self {
- let (tx, _rx) = smol::channel::unbounded();
+ let (tx, _rx) = async_channel::unbounded();
LanguageServerPromptRequest::new(level, message, actions, lsp_name, tx)
}
}
@@ -5193,7 +5193,7 @@ impl Project {
envelope: TypedEnvelope<proto::LanguageServerPromptRequest>,
mut cx: AsyncApp,
) -> Result<proto::LanguageServerPromptResponse> {
- let (tx, rx) = smol::channel::bounded(1);
+ let (tx, rx) = async_channel::bounded(1);
let actions: Vec<_> = envelope
.payload
.actions
@@ -10,6 +10,7 @@ use std::{
};
use anyhow::Context;
+use async_channel::{Receiver, Sender, bounded, unbounded};
use collections::HashSet;
use fs::Fs;
use futures::FutureExt as _;
@@ -19,7 +20,6 @@ use language::{Buffer, BufferSnapshot};
use parking_lot::Mutex;
use postage::oneshot;
use rpc::{AnyProtoClient, proto};
-use smol::channel::{Receiver, Sender, bounded, unbounded};
use util::{ResultExt, maybe, paths::compare_rel_paths, rel_path::RelPath};
use worktree::{Entry, ProjectEntryId, Snapshot, Worktree, WorktreeSettings};
@@ -2,12 +2,12 @@ use anyhow::Result;
use collections::HashMap;
use gpui::{App, AppContext as _, Context, Entity, Task, WeakEntity};
+use async_channel::bounded;
use futures::{FutureExt, future::Shared};
use itertools::Itertools as _;
use language::LanguageName;
use remote::RemoteClient;
use settings::{Settings, SettingsLocation};
-use smol::channel::bounded;
use std::{
path::{Path, PathBuf},
sync::Arc,
@@ -44,7 +44,6 @@ semver.workspace = true
serde.workspace = true
serde_json.workspace = true
settings.workspace = true
-smol.workspace = true
task.workspace = true
telemetry.workspace = true
ui.workspace = true
@@ -951,7 +951,7 @@ impl PickerDelegate for RecentProjectsDelegate {
.map(|(id, folder)| StringMatchCandidate::new(id, folder.name.as_ref()))
.collect();
- smol::block_on(fuzzy::match_strings(
+ gpui::block_on(fuzzy::match_strings(
&candidates,
query,
smart_case,
@@ -977,7 +977,7 @@ impl PickerDelegate for RecentProjectsDelegate {
})
.collect();
- let mut project_group_matches = smol::block_on(fuzzy::match_strings(
+ let mut project_group_matches = gpui::block_on(fuzzy::match_strings(
&project_group_candidates,
query,
smart_case,
@@ -1009,7 +1009,7 @@ impl PickerDelegate for RecentProjectsDelegate {
})
.collect();
- let mut recent_matches = smol::block_on(fuzzy::match_strings(
+ let mut recent_matches = gpui::block_on(fuzzy::match_strings(
&recent_candidates,
query,
smart_case,
@@ -12,7 +12,7 @@ use dev_container::{
use editor::Editor;
use extension_host::ExtensionStore;
-use futures::{FutureExt, channel::oneshot, future::Shared};
+use futures::{FutureExt, StreamExt as _, channel::oneshot, future::Shared};
use gpui::{
Action, AnyElement, App, ClickEvent, ClipboardItem, Context, DismissEvent, Entity,
EventEmitter, FocusHandle, Focusable, PromptLevel, ScrollHandle, Subscription, Task,
@@ -31,7 +31,6 @@ use settings::{
RemoteProject, RemoteSettingsContent, Settings as _, SettingsStore, update_settings_file,
watch_config_file,
};
-use smol::stream::StreamExt as _;
use std::{
borrow::Cow,
collections::BTreeSet,
@@ -235,7 +235,7 @@ impl PickerDelegate for SidebarRecentProjectsDelegate {
})
.collect();
} else {
- let mut matches = smol::block_on(fuzzy::match_strings(
+ let mut matches = gpui::block_on(fuzzy::match_strings(
&candidates,
query,
smart_case,
@@ -122,7 +122,7 @@ impl picker::PickerDelegate for WslPickerDelegate {
let query = query.trim_start();
let smart_case = query.chars().any(|c| c.is_uppercase());
- self.matches = smol::block_on(fuzzy::match_strings(
+ self.matches = gpui::block_on(fuzzy::match_strings(
candidates.as_slice(),
query,
smart_case,
@@ -23,6 +23,7 @@ test-support = ["fs/test-support"]
[dependencies]
anyhow.workspace = true
+async-channel.workspace = true
askpass.workspace = true
clap.workspace = true
client.workspace = true
@@ -41,6 +41,7 @@ use rpc::proto::{self, Envelope, REMOTE_SERVER_PROJECT_ID};
use rpc::{AnyProtoClient, TypedEnvelope};
use settings::{Settings, SettingsStore, watch_config_file};
use smol::{
+ Timer,
channel::{Receiver, Sender},
io::AsyncReadExt,
stream::StreamExt as _,
@@ -181,7 +182,7 @@ fn init_logging_server(log_file_path: &Path) -> Result<Receiver<Vec<u8>>> {
.open(log_file_path)
.context("Failed to open log file in append mode")?;
- let (tx, rx) = smol::channel::unbounded();
+ let (tx, rx) = async_channel::unbounded();
let target = Box::new(MultiWrite {
file: log_file,
@@ -472,6 +473,9 @@ pub fn execute_run(
|task| {
app.background_executor().spawn(task).detach();
},
+ // we are running outside gpui
+ #[allow(clippy::disallowed_methods)]
+ |duration| FutureExt::map(Timer::after(duration), |_| ()),
);
let log_rx = init_logging_server(&log_file)?;
log::info!(
@@ -727,6 +731,9 @@ pub(crate) fn execute_proxy(
|task| {
smol::spawn(task).detach();
},
+ // we are running outside gpui
+ #[allow(clippy::disallowed_methods)]
+ |duration| FutureExt::map(Timer::after(duration), |_| ()),
);
log::info!("starting proxy process. PID: {}", std::process::id());
@@ -755,7 +762,7 @@ pub(crate) fn execute_proxy(
);
kill_running_server(pid, &server_paths)?;
}
- smol::block_on(spawn_server(&server_paths)).map_err(ExecuteProxyError::SpawnServer)?;
+ gpui::block_on(spawn_server(&server_paths)).map_err(ExecuteProxyError::SpawnServer)?;
std::fs::read_to_string(&server_paths.pid_file)
.and_then(|contents| {
contents.parse::<u32>().map_err(|_| {
@@ -826,7 +833,7 @@ pub(crate) fn execute_proxy(
}
});
- if let Err(forwarding_result) = smol::block_on(async move {
+ if let Err(forwarding_result) = gpui::block_on(async move {
futures::select! {
result = stdin_task.fuse() => result.map_err(ExecuteProxyError::StdinTask),
result = stdout_task.fuse() => result.map_err(ExecuteProxyError::StdoutTask),
@@ -834,7 +841,7 @@ pub(crate) fn execute_proxy(
}
}) {
log::error!("encountered error while forwarding messages: {forwarding_result:#}",);
- if !matches!(smol::block_on(check_server_running(server_pid)), Ok(true)) {
+ if !matches!(gpui::block_on(check_server_running(server_pid)), Ok(true)) {
log::error!("server exited unexpectedly");
return Err(ExecuteProxyError::ServerNotRunning(
ProxyLaunchError::ServerNotRunning,
@@ -27,6 +27,7 @@ bitflags.workspace = true
collections.workspace = true
editor.workspace = true
fs.workspace = true
+futures-lite.workspace = true
futures.workspace = true
gpui.workspace = true
language.workspace = true
@@ -36,7 +37,6 @@ project.workspace = true
serde.workspace = true
serde_json.workspace = true
settings.workspace = true
-smol.workspace = true
theme.workspace = true
theme_settings.workspace = true
ui.workspace = true
@@ -1907,10 +1907,10 @@ mod tests {
DisplayPoint, Editor, MultiBuffer, PathKey, SearchSettings, SelectionEffects,
display_map::DisplayRow, test::editor_test_context::EditorTestContext,
};
+ use futures::stream::StreamExt as _;
use gpui::{Hsla, TestAppContext, UpdateGlobal, VisualTestContext};
use language::{Buffer, Point};
use settings::{SearchSettingsContent, SettingsStore};
- use smol::stream::StreamExt as _;
use unindent::Unindent as _;
use util_macros::perf;
@@ -467,7 +467,7 @@ impl ProjectSearch {
while let Some(new_ranges) = new_ranges.next().await {
// `new_ranges.next().await` likely never gets hit while still pending so `async_task`
// will not reschedule, starving other front end tasks, insert a yield point for that here
- smol::future::yield_now().await;
+ futures_lite::future::yield_now().await;
project_search
.update(cx, |project_search, cx| {
project_search.match_ranges.extend(new_ranges);
@@ -17,6 +17,7 @@ default = []
[dependencies]
acp_thread.workspace = true
action_log.workspace = true
+async-channel.workspace = true
agent.workspace = true
agent-client-protocol.workspace = true
agent_settings.workspace = true
@@ -37,7 +38,6 @@ remote_connection.workspace = true
serde.workspace = true
serde_json.workspace = true
settings.workspace = true
-smol.workspace = true
theme.workspace = true
theme_settings.workspace = true
ui.workspace = true
@@ -2912,7 +2912,7 @@ impl Sidebar {
session_id: &acp::SessionId,
neighbor: Option<&ThreadMetadata>,
thread_folder_paths: Option<&PathList>,
- in_flight_archive: Option<(Task<()>, smol::channel::Sender<()>)>,
+ in_flight_archive: Option<(Task<()>, async_channel::Sender<()>)>,
window: &mut Window,
cx: &mut Context<Self>,
) {
@@ -2998,12 +2998,12 @@ impl Sidebar {
session_id: &acp::SessionId,
roots: Vec<thread_worktree_archive::RootPlan>,
cx: &mut Context<Self>,
- ) -> Option<(Task<()>, smol::channel::Sender<()>)> {
+ ) -> Option<(Task<()>, async_channel::Sender<()>)> {
if roots.is_empty() {
return None;
}
- let (cancel_tx, cancel_rx) = smol::channel::bounded::<()>(1);
+ let (cancel_tx, cancel_rx) = async_channel::bounded::<()>(1);
let session_id = session_id.clone();
let task = cx.spawn(async move |_this, cx| {
match Self::archive_worktree_roots(roots, cancel_rx, cx).await {
@@ -3031,7 +3031,7 @@ impl Sidebar {
async fn archive_worktree_roots(
roots: Vec<thread_worktree_archive::RootPlan>,
- cancel_rx: smol::channel::Receiver<()>,
+ cancel_rx: async_channel::Receiver<()>,
cx: &mut gpui::AsyncApp,
) -> anyhow::Result<ArchiveWorktreeOutcome> {
let mut completed_persists: Vec<(i64, thread_worktree_archive::RootPlan)> = Vec::new();
@@ -16,7 +16,7 @@ indoc.workspace = true
libsqlite3-sys.workspace = true
log.workspace = true
parking_lot.workspace = true
-smol.workspace = true
+pollster.workspace = true
sqlformat.workspace = true
thread_local = "1.1.4"
util.workspace = true
@@ -344,7 +344,7 @@ mod test {
PRAGMA case_sensitive_like=TRUE;
"});
- let _ = smol::block_on(builder.build()).unwrap().deref();
+ let _ = pollster::block_on(builder.build()).unwrap().deref();
}));
}
@@ -23,7 +23,6 @@ project.workspace = true
schemars.workspace = true
serde.workspace = true
settings.workspace = true
-smol.workspace = true
ui.workspace = true
util.workspace = true
workspace.workspace = true
@@ -441,7 +441,7 @@ impl TabSwitcherDelegate {
))
})
.collect::<Vec<_>>();
- smol::block_on(fuzzy::match_strings(
+ gpui::block_on(fuzzy::match_strings(
&candidates,
&query,
true,
@@ -20,10 +20,12 @@ path = "src/terminal.rs"
doctest = false
[dependencies]
+async-channel.workspace = true
alacritty_terminal.workspace = true
anyhow.workspace = true
collections.workspace = true
futures.workspace = true
+futures-lite.workspace = true
gpui.workspace = true
itertools.workspace = true
libc.workspace = true
@@ -34,7 +36,6 @@ schemars.workspace = true
serde.workspace = true
settings.workspace = true
sysinfo.workspace = true
-smol.workspace = true
task.workspace = true
theme.workspace = true
theme_settings.workspace = true
@@ -26,6 +26,7 @@ use alacritty_terminal::{
},
};
use anyhow::{Context as _, Result, bail};
+use futures_lite::future::yield_now;
use log::trace;
use futures::{
@@ -39,12 +40,12 @@ use mappings::mouse::{
scroll_report,
};
+use async_channel::{Receiver, Sender};
use collections::{HashMap, VecDeque};
use futures::StreamExt;
use pty_info::{ProcessIdGetter, PtyProcessInfo};
use serde::{Deserialize, Serialize};
use settings::Settings;
-use smol::channel::{Receiver, Sender};
use task::{HideStrategy, Shell, SpawnInTerminal};
use terminal_hyperlinks::RegexSearches;
use terminal_settings::{AlternateScroll, CursorShape, TerminalSettings};
@@ -736,7 +737,7 @@ impl TerminalBuilder {
}
if events.is_empty() && !wakeup {
- smol::future::yield_now().await;
+ yield_now().await;
break 'outer;
}
@@ -749,7 +750,7 @@ impl TerminalBuilder {
this.process_event(event, cx);
}
})?;
- smol::future::yield_now().await;
+ yield_now().await;
}
}
anyhow::Ok(())
@@ -2564,6 +2565,7 @@ mod tests {
index::{Column, Line, Point as AlacPoint},
term::cell::Cell,
};
+ use async_channel::Receiver;
use collections::HashMap;
use gpui::{
Entity, Modifiers, MouseButton, MouseDownEvent, MouseMoveEvent, MouseUpEvent, Pixels,
@@ -2571,7 +2573,6 @@ mod tests {
};
use parking_lot::Mutex;
use rand::{Rng, distr, rngs::StdRng};
- use smol::channel::Receiver;
use task::{Shell, ShellBuilder};
#[cfg(not(target_os = "windows"))]
@@ -2590,7 +2591,7 @@ mod tests {
command: &str,
args: &[&str],
) -> (Entity<Terminal>, Receiver<Option<ExitStatus>>) {
- let (completion_tx, completion_rx) = smol::channel::unbounded();
+ let (completion_tx, completion_rx) = async_channel::unbounded();
let args: Vec<String> = args.iter().map(|s| s.to_string()).collect();
let (program, args) =
ShellBuilder::new(&Shell::System, false).build(Some(command.to_owned()), &args);
@@ -2743,7 +2744,7 @@ mod tests {
cx.executor().allow_parking();
- let (completion_tx, completion_rx) = smol::channel::unbounded();
+ let (completion_tx, completion_rx) = async_channel::unbounded();
let builder = cx
.update(|cx| {
TerminalBuilder::new(
@@ -2769,7 +2770,7 @@ mod tests {
// Build an empty command, which will result in a tty shell spawned.
let terminal = cx.new(|cx| builder.subscribe(cx));
- let (event_tx, event_rx) = smol::channel::unbounded::<Event>();
+ let (event_tx, event_rx) = async_channel::unbounded::<Event>();
cx.update(|cx| {
cx.subscribe(&terminal, move |_, e, _| {
event_tx.send_blocking(e.clone()).unwrap();
@@ -2840,7 +2841,7 @@ mod tests {
.unwrap();
let terminal = cx.new(|cx| builder.subscribe(cx));
- let (event_tx, event_rx) = smol::channel::unbounded::<Event>();
+ let (event_tx, event_rx) = async_channel::unbounded::<Event>();
cx.update(|cx| {
cx.subscribe(&terminal, move |_, e, _| {
event_tx.send_blocking(e.clone()).unwrap();
@@ -2875,7 +2876,7 @@ mod tests {
async fn test_terminal_no_exit_on_spawn_failure(cx: &mut TestAppContext) {
cx.executor().allow_parking();
- let (completion_tx, completion_rx) = smol::channel::unbounded();
+ let (completion_tx, completion_rx) = async_channel::unbounded();
let (program, args) = ShellBuilder::new(&Shell::System, false)
.build(Some("asdasdasdasd".to_owned()), &["@@@@@".to_owned()]);
let builder = cx
@@ -37,6 +37,7 @@ collections.workspace = true
component.workspace = true
db.workspace = true
feature_flags.workspace = true
+futures-lite.workspace = true
fs.workspace = true
futures.workspace = true
git.workspace = true
@@ -17,7 +17,6 @@ mod persistence;
pub mod searchable;
mod security_modal;
pub mod shared_screen;
-use db::smol::future::yield_now;
pub use shared_screen::SharedScreen;
pub mod focus_follows_mouse;
mod status_bar;
@@ -3310,7 +3309,7 @@ impl Workspace {
// Yield between synthetic keystrokes so deferred focus and
// other effects can settle before dispatching the next key.
- yield_now().await;
+ futures_lite::future::yield_now().await;
}
*keystrokes.borrow_mut() = Default::default();
@@ -31,6 +31,7 @@ test-support = [
[dependencies]
anyhow.workspace = true
+async-channel.workspace = true
async-lock.workspace = true
chardetng.workspace = true
clock.workspace = true
@@ -53,7 +54,6 @@ serde.workspace = true
serde_json.workspace = true
settings.workspace = true
smallvec.workspace = true
-smol.workspace = true
sum_tree.workspace = true
text.workspace = true
tracing.workspace = true
@@ -32,6 +32,7 @@ use gpui::{
use ignore::IgnoreStack;
use language::DiskState;
+use async_channel::{self, Sender};
use parking_lot::Mutex;
use paths::{local_settings_folder_name, local_vscode_folder_name};
use postage::{
@@ -46,7 +47,6 @@ use rpc::{
pub use settings::WorktreeId;
use settings::{Settings, SettingsLocation, SettingsStore};
use smallvec::{SmallVec, smallvec};
-use smol::channel::{self, Sender};
use std::{
any::Any,
borrow::Borrow as _,
@@ -128,8 +128,8 @@ impl fmt::Debug for LoadedBinaryFile {
pub struct LocalWorktree {
snapshot: LocalSnapshot,
- scan_requests_tx: channel::Sender<ScanRequest>,
- path_prefixes_to_scan_tx: channel::Sender<PathPrefixScanRequest>,
+ scan_requests_tx: async_channel::Sender<ScanRequest>,
+ path_prefixes_to_scan_tx: async_channel::Sender<PathPrefixScanRequest>,
is_scanning: (watch::Sender<bool>, watch::Receiver<bool>),
snapshot_subscriptions: VecDeque<(usize, oneshot::Sender<()>)>,
_background_scanner_tasks: Vec<Task<()>>,
@@ -479,8 +479,8 @@ impl Worktree {
.block_on(snapshot.insert_entry(entry, fs.as_ref()));
}
- let (scan_requests_tx, scan_requests_rx) = channel::unbounded();
- let (path_prefixes_to_scan_tx, path_prefixes_to_scan_rx) = channel::unbounded();
+ let (scan_requests_tx, scan_requests_rx) = async_channel::unbounded();
+ let (path_prefixes_to_scan_tx, path_prefixes_to_scan_rx) = async_channel::unbounded();
let mut worktree = LocalWorktree {
share_private_files,
next_entry_id,
@@ -1101,8 +1101,8 @@ impl LocalWorktree {
}
fn restart_background_scanners(&mut self, cx: &Context<Worktree>) {
- let (scan_requests_tx, scan_requests_rx) = channel::unbounded();
- let (path_prefixes_to_scan_tx, path_prefixes_to_scan_rx) = channel::unbounded();
+ let (scan_requests_tx, scan_requests_rx) = async_channel::unbounded();
+ let (path_prefixes_to_scan_tx, path_prefixes_to_scan_rx) = async_channel::unbounded();
self.scan_requests_tx = scan_requests_tx;
self.path_prefixes_to_scan_tx = path_prefixes_to_scan_tx;
@@ -1120,8 +1120,8 @@ impl LocalWorktree {
fn start_background_scanner(
&mut self,
- scan_requests_rx: channel::Receiver<ScanRequest>,
- path_prefixes_to_scan_rx: channel::Receiver<PathPrefixScanRequest>,
+ scan_requests_rx: async_channel::Receiver<ScanRequest>,
+ path_prefixes_to_scan_rx: async_channel::Receiver<PathPrefixScanRequest>,
cx: &Context<Worktree>,
) {
let snapshot = self.snapshot();
@@ -3903,8 +3903,8 @@ struct BackgroundScanner {
fs_case_sensitive: bool,
status_updates_tx: UnboundedSender<ScanState>,
executor: BackgroundExecutor,
- scan_requests_rx: channel::Receiver<ScanRequest>,
- path_prefixes_to_scan_rx: channel::Receiver<PathPrefixScanRequest>,
+ scan_requests_rx: async_channel::Receiver<ScanRequest>,
+ path_prefixes_to_scan_rx: async_channel::Receiver<PathPrefixScanRequest>,
next_entry_id: Arc<AtomicUsize>,
phase: BackgroundScannerPhase,
watcher: Arc<dyn Watcher>,
@@ -4009,7 +4009,7 @@ impl BackgroundScanner {
Box::pin(futures::stream::pending())
};
- let (scan_job_tx, scan_job_rx) = channel::unbounded();
+ let (scan_job_tx, scan_job_rx) = async_channel::unbounded();
{
let mut state = self.state.lock().await;
state.snapshot.scan_id += 1;
@@ -4417,7 +4417,7 @@ impl BackgroundScanner {
self.state.lock().await.snapshot.scan_id += 1;
- let (scan_job_tx, scan_job_rx) = channel::unbounded();
+ let (scan_job_tx, scan_job_rx) = async_channel::unbounded();
log::debug!(
"received fs events {:?}",
relative_paths
@@ -4482,7 +4482,7 @@ impl BackgroundScanner {
.await;
(state.snapshot.clone(), ignore_stack, abs_path)
};
- let (scan_job_tx, scan_job_rx) = channel::unbounded();
+ let (scan_job_tx, scan_job_rx) = async_channel::unbounded();
self.update_ignore_statuses_for_paths(
scan_job_tx,
prev_snapshot,
@@ -4494,7 +4494,7 @@ impl BackgroundScanner {
}
async fn forcibly_load_paths(&self, paths: &[Arc<RelPath>]) -> bool {
- let (scan_job_tx, scan_job_rx) = channel::unbounded();
+ let (scan_job_tx, scan_job_rx) = async_channel::unbounded();
{
let mut state = self.state.lock().await;
let root_path = state.snapshot.abs_path.clone();
@@ -4529,7 +4529,7 @@ impl BackgroundScanner {
async fn scan_dirs(
&self,
enable_progress_updates: bool,
- scan_jobs_rx: channel::Receiver<ScanJob>,
+ scan_jobs_rx: async_channel::Receiver<ScanJob>,
) {
if self
.status_updates_tx
@@ -5031,7 +5031,7 @@ impl BackgroundScanner {
prev_snapshot: LocalSnapshot,
ignores_to_update: Vec<(Arc<Path>, IgnoreStack)>,
) {
- let (ignore_queue_tx, ignore_queue_rx) = channel::unbounded();
+ let (ignore_queue_tx, ignore_queue_rx) = async_channel::unbounded();
{
for (parent_abs_path, ignore_stack) in ignores_to_update {
ignore_queue_tx
@@ -336,7 +336,7 @@ fn main() {
session_id.clone(),
KeyValueStore::from_app_db(&app_db),
));
-
+ let background_executor = app.background_executor();
crashes::init(
InitCrashHandler {
session_id,
@@ -357,6 +357,7 @@ fn main() {
|task| {
app.background_executor().spawn(task).detach();
},
+ move |duration| background_executor.timer(duration),
);
let (open_listener, mut open_rx) = OpenListener::new();
@@ -322,7 +322,7 @@ fn connect_to_cli(
futures::channel::mpsc::channel::<CliRequest>(16);
thread::spawn(move || {
while let Ok(cli_request) = request_rx.recv() {
- if smol::block_on(async_request_tx.send(cli_request)).is_err() {
+ if gpui::block_on(async_request_tx.send(cli_request)).is_err() {
break;
}
}
@@ -441,7 +441,7 @@ mod tests {
let mut cx = VisualTestAppContext::new(gpui_platform::current_platform(false));
let app_state = init_visual_test(&mut cx);
- smol::block_on(async {
+ gpui::block_on(async {
app_state
.fs
.as_fake()
@@ -456,7 +456,7 @@ mod tests {
.await;
});
- let workspace_result = smol::block_on(open_test_workspace(app_state, &mut cx));
+ let workspace_result = gpui::block_on(open_test_workspace(app_state, &mut cx));
assert!(
workspace_result.is_ok(),
"Failed to open workspace: {:?}",
@@ -482,7 +482,7 @@ mod tests {
let mut cx = VisualTestAppContext::new(gpui_platform::current_platform(false));
let app_state = init_visual_test(&mut cx);
- smol::block_on(async {
+ gpui::block_on(async {
app_state
.fs
.as_fake()
@@ -498,10 +498,10 @@ mod tests {
.await;
});
- let workspace = smol::block_on(open_test_workspace(app_state, &mut cx))
+ let workspace = gpui::block_on(open_test_workspace(app_state, &mut cx))
.expect("Failed to open workspace");
- smol::block_on(async {
+ gpui::block_on(async {
wait_for_ui_stabilization(&cx).await;
let screenshot_result = cx.capture_screenshot(workspace.into());