Detailed changes
@@ -18,7 +18,9 @@ env:
jobs:
tests:
name: Run tests
- runs-on: self-hosted
+ runs-on:
+ - self-hosted
+ - test
env:
RUSTFLAGS: -D warnings
steps:
@@ -39,7 +41,9 @@ jobs:
bundle:
name: Bundle app
- runs-on: self-hosted
+ runs-on:
+ - self-hosted
+ - bundle
env:
MACOS_CERTIFICATE: ${{ secrets.MACOS_CERTIFICATE }}
MACOS_CERTIFICATE_PASSWORD: ${{ secrets.MACOS_CERTIFICATE_PASSWORD }}
@@ -60,6 +64,11 @@ jobs:
target: aarch64-apple-darwin
profile: minimal
+ - name: Install Node
+ uses: actions/setup-node@v2
+ with:
+ node-version: '16'
+
- name: Checkout repo
uses: actions/checkout@v2
with:
@@ -4,9 +4,9 @@ version = 3
[[package]]
name = "addr2line"
-version = "0.14.1"
+version = "0.17.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "a55f82cfe485775d02112886f4169bde0c5894d75e79ead7eafe7e40a25e45f7"
+checksum = "b9ecd88a8c8378ca913a680cd98f0f13ac67383d35993f86c90a70e3f137816b"
dependencies = [
"gimli",
]
@@ -336,7 +336,7 @@ dependencies = [
[[package]]
name = "async-pipe"
version = "0.1.3"
-source = "git+https://github.com/routerify/async-pipe-rs?rev=feeb77e83142a9ff837d0767652ae41bfc5d8e47#feeb77e83142a9ff837d0767652ae41bfc5d8e47"
+source = "git+https://github.com/zed-industries/async-pipe-rs?rev=82d00a04211cf4e1236029aa03e6b6ce2a74c553#82d00a04211cf4e1236029aa03e6b6ce2a74c553"
dependencies = [
"futures",
"log",
@@ -558,11 +558,12 @@ checksum = "cdb031dd78e28731d87d56cc8ffef4a8f36ca26c38fe2de700543e627f8a464a"
[[package]]
name = "backtrace"
-version = "0.3.56"
+version = "0.3.64"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "9d117600f438b1707d4e4ae15d3595657288f8235a0eb593e80ecc98ab34e1bc"
+checksum = "5e121dee8023ce33ab248d9ce1493df03c3b38a659b240096fcbd7048ff9c31f"
dependencies = [
"addr2line",
+ "cc",
"cfg-if 1.0.0",
"libc",
"miniz_oxide 0.4.4",
@@ -1438,6 +1439,21 @@ dependencies = [
"const-oid",
]
+[[package]]
+name = "dhat"
+version = "0.3.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "47003dc9f6368a88e85956c3b2573a7e6872746a3e5d762a8885da3a136a0381"
+dependencies = [
+ "backtrace",
+ "lazy_static",
+ "parking_lot",
+ "rustc-hash",
+ "serde",
+ "serde_json",
+ "thousands",
+]
+
[[package]]
name = "diagnostics"
version = "0.1.0"
@@ -2124,9 +2140,9 @@ dependencies = [
[[package]]
name = "gimli"
-version = "0.23.0"
+version = "0.26.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "f6503fe142514ca4799d4c26297c4248239fe8838d827db6bd6065c6ed29a6ce"
+checksum = "78cc372d058dcf6d5ecd98510e7fbc9e5aec4d21de70f65fea8fecebcd881bd4"
[[package]]
name = "glob"
@@ -2187,6 +2203,7 @@ dependencies = [
"core-graphics",
"core-text",
"ctor",
+ "dhat",
"env_logger",
"etagere",
"font-kit",
@@ -2766,9 +2783,9 @@ dependencies = [
[[package]]
name = "lock_api"
-version = "0.4.2"
+version = "0.4.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "dd96ffd135b2fd7b973ac026d28085defbe8983df057ced3eb4f2130b0831312"
+checksum = "88943dd7ef4a2e5a4bfa2753aaab3013e34ce2533d1996fb18ef591e315e2b3b"
dependencies = [
"scopeguard",
]
@@ -2810,6 +2827,7 @@ version = "0.1.0"
dependencies = [
"anyhow",
"async-pipe",
+ "collections",
"ctor",
"env_logger",
"futures",
@@ -3134,9 +3152,12 @@ dependencies = [
[[package]]
name = "object"
-version = "0.23.0"
+version = "0.27.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "a9a7ab5d64814df0fe4a4b5ead45ed6c5f181ee3ff04ba344313a6c80446c5d4"
+checksum = "67ac1d3f9a1d3616fd9a60c8d74296f22406a238b6a72f5cc1e6f314df4ffbf9"
+dependencies = [
+ "memchr",
+]
[[package]]
name = "once_cell"
@@ -3224,9 +3245,9 @@ checksum = "427c3892f9e783d91cc128285287e70a59e206ca452770ece88a76f7a3eddd72"
[[package]]
name = "parking_lot"
-version = "0.11.1"
+version = "0.11.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "6d7744ac029df22dca6284efe4e898991d28e3085c706c972bcd7da4a27a15eb"
+checksum = "7d17b78036a60663b797adeaee46f5c9dfebb86948d1255007a1d6be0271ff99"
dependencies = [
"instant",
"lock_api",
@@ -3235,9 +3256,9 @@ dependencies = [
[[package]]
name = "parking_lot_core"
-version = "0.8.3"
+version = "0.8.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "fa7a782938e745763fe6907fc6ba86946d72f49fe7e21de074e08128a99fb018"
+checksum = "d76e8e1493bcac0d2766c42737f34458f1c8c50c0d23bcb24ea953affb273216"
dependencies = [
"cfg-if 1.0.0",
"instant",
@@ -3832,9 +3853,9 @@ dependencies = [
[[package]]
name = "redox_syscall"
-version = "0.2.5"
+version = "0.2.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "94341e4e44e24f6b591b59e47a8a027df12e008d73fd5672dbea9cc22f4507d9"
+checksum = "8383f39639269cde97d255a32bdb68c047337295414940c68bdd30c2e13203ff"
dependencies = [
"bitflags",
]
@@ -5071,6 +5092,12 @@ dependencies = [
"syn",
]
+[[package]]
+name = "thousands"
+version = "0.2.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "3bf63baf9f5039dadc247375c29eb13706706cfde997d0330d05aa63a77d8820"
+
[[package]]
name = "thread_local"
version = "1.1.3"
@@ -5927,9 +5954,11 @@ dependencies = [
"async-tungstenite",
"base64 0.13.0",
"clap 3.0.0-beta.2",
+ "client",
"collections",
"comrak",
"ctor",
+ "editor",
"either",
"env_logger",
"envy",
@@ -5938,12 +5967,15 @@ dependencies = [
"handlebars",
"http-auth-basic",
"jwt-simple",
+ "language",
"lazy_static",
"lipsum",
+ "lsp",
"oauth2",
"oauth2-surf",
"parking_lot",
"postage",
+ "project",
"rand 0.8.3",
"rpc",
"rust-embed",
@@ -5958,7 +5990,7 @@ dependencies = [
"time 0.2.27",
"toml",
"util",
- "zed",
+ "workspace",
]
[[package]]
@@ -101,11 +101,15 @@ impl ChatPanel {
cx.dispatch_action(LoadMoreMessages);
}
});
- let _observe_status = cx.spawn(|this, mut cx| {
+ let _observe_status = cx.spawn_weak(|this, mut cx| {
let mut status = rpc.status();
async move {
while let Some(_) = status.recv().await {
- this.update(&mut cx, |_, cx| cx.notify());
+ if let Some(this) = this.upgrade(&cx) {
+ this.update(&mut cx, |_, cx| cx.notify());
+ } else {
+ break;
+ }
}
}
});
@@ -597,7 +597,7 @@ mod tests {
use surf::http::Response;
#[gpui::test]
- async fn test_channel_messages(mut cx: TestAppContext) {
+ async fn test_channel_messages(cx: &mut TestAppContext) {
cx.foreground().forbid_parking();
let user_id = 5;
@@ -609,7 +609,7 @@ mod tests {
let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http_client, cx));
let channel_list = cx.add_model(|cx| ChannelList::new(user_store, client.clone(), cx));
- channel_list.read_with(&cx, |list, _| assert_eq!(list.available_channels(), None));
+ channel_list.read_with(cx, |list, _| assert_eq!(list.available_channels(), None));
// Get the available channels.
let get_channels = server.receive::<proto::GetChannels>().await.unwrap();
@@ -625,7 +625,7 @@ mod tests {
)
.await;
channel_list.next_notification(&cx).await;
- channel_list.read_with(&cx, |list, _| {
+ channel_list.read_with(cx, |list, _| {
assert_eq!(
list.available_channels().unwrap(),
&[ChannelDetails {
@@ -652,12 +652,12 @@ mod tests {
// Join a channel and populate its existing messages.
let channel = channel_list
- .update(&mut cx, |list, cx| {
+ .update(cx, |list, cx| {
let channel_id = list.available_channels().unwrap()[0].id;
list.get_channel(channel_id, cx)
})
.unwrap();
- channel.read_with(&cx, |channel, _| assert!(channel.messages().is_empty()));
+ channel.read_with(cx, |channel, _| assert!(channel.messages().is_empty()));
let join_channel = server.receive::<proto::JoinChannel>().await.unwrap();
server
.respond(
@@ -708,7 +708,7 @@ mod tests {
new_count: 2,
}
);
- channel.read_with(&cx, |channel, _| {
+ channel.read_with(cx, |channel, _| {
assert_eq!(
channel
.messages_in_range(0..2)
@@ -723,7 +723,7 @@ mod tests {
// Receive a new message.
server.send(proto::ChannelMessageSent {
- channel_id: channel.read_with(&cx, |channel, _| channel.details.id),
+ channel_id: channel.read_with(cx, |channel, _| channel.details.id),
message: Some(proto::ChannelMessage {
id: 12,
body: "c".into(),
@@ -756,7 +756,7 @@ mod tests {
new_count: 1,
}
);
- channel.read_with(&cx, |channel, _| {
+ channel.read_with(cx, |channel, _| {
assert_eq!(
channel
.messages_in_range(2..3)
@@ -767,7 +767,7 @@ mod tests {
});
// Scroll up to view older messages.
- channel.update(&mut cx, |channel, cx| {
+ channel.update(cx, |channel, cx| {
assert!(channel.load_more_messages(cx));
});
let get_messages = server.receive::<proto::GetChannelMessages>().await.unwrap();
@@ -805,7 +805,7 @@ mod tests {
new_count: 2,
}
);
- channel.read_with(&cx, |channel, _| {
+ channel.read_with(cx, |channel, _| {
assert_eq!(
channel
.messages_in_range(0..2)
@@ -133,9 +133,8 @@ struct ClientState {
entity_id_extractors: HashMap<TypeId, Box<dyn Send + Sync + Fn(&dyn AnyTypedEnvelope) -> u64>>,
_maintain_connection: Option<Task<()>>,
heartbeat_interval: Duration,
-
models_by_entity_type_and_remote_id: HashMap<(TypeId, u64), AnyWeakModelHandle>,
- models_by_message_type: HashMap<TypeId, AnyModelHandle>,
+ models_by_message_type: HashMap<TypeId, AnyWeakModelHandle>,
model_types_by_message_type: HashMap<TypeId, TypeId>,
message_handlers: HashMap<
TypeId,
@@ -228,6 +227,17 @@ impl Client {
self.http.clone()
}
+ #[cfg(any(test, feature = "test-support"))]
+ pub fn tear_down(&self) {
+ let mut state = self.state.write();
+ state._maintain_connection.take();
+ state.message_handlers.clear();
+ state.models_by_message_type.clear();
+ state.models_by_entity_type_and_remote_id.clear();
+ state.entity_id_extractors.clear();
+ self.peer.reset();
+ }
+
#[cfg(any(test, feature = "test-support"))]
pub fn override_authenticate<F>(&mut self, authenticate: F) -> &mut Self
where
@@ -339,18 +349,22 @@ impl Client {
{
let message_type_id = TypeId::of::<M>();
- let client = self.clone();
+ let client = Arc::downgrade(self);
let mut state = self.state.write();
state
.models_by_message_type
- .insert(message_type_id, model.into());
+ .insert(message_type_id, model.downgrade().into());
let prev_handler = state.message_handlers.insert(
message_type_id,
Arc::new(move |handle, envelope, cx| {
let model = handle.downcast::<E>().unwrap();
let envelope = envelope.into_any().downcast::<TypedEnvelope<M>>().unwrap();
- handler(model, *envelope, client.clone(), cx).boxed_local()
+ if let Some(client) = client.upgrade() {
+ handler(model, *envelope, client.clone(), cx).boxed_local()
+ } else {
+ async move { Ok(()) }.boxed_local()
+ }
}),
);
if prev_handler.is_some() {
@@ -376,7 +390,7 @@ impl Client {
let model_type_id = TypeId::of::<E>();
let message_type_id = TypeId::of::<M>();
- let client = self.clone();
+ let client = Arc::downgrade(self);
let mut state = self.state.write();
state
.model_types_by_message_type
@@ -399,7 +413,11 @@ impl Client {
Arc::new(move |handle, envelope, cx| {
let model = handle.downcast::<E>().unwrap();
let envelope = envelope.into_any().downcast::<TypedEnvelope<M>>().unwrap();
- handler(model, *envelope, client.clone(), cx).boxed_local()
+ if let Some(client) = client.upgrade() {
+ handler(model, *envelope, client.clone(), cx).boxed_local()
+ } else {
+ async move { Ok(()) }.boxed_local()
+ }
}),
);
if prev_handler.is_some() {
@@ -541,7 +559,7 @@ impl Client {
let model = state
.models_by_message_type
.get(&payload_type_id)
- .cloned()
+ .and_then(|model| model.upgrade(&cx))
.or_else(|| {
let model_type_id =
*state.model_types_by_message_type.get(&payload_type_id)?;
@@ -917,7 +935,7 @@ mod tests {
use gpui::TestAppContext;
#[gpui::test(iterations = 10)]
- async fn test_heartbeat(cx: TestAppContext) {
+ async fn test_heartbeat(cx: &mut TestAppContext) {
cx.foreground().forbid_parking();
let user_id = 5;
@@ -937,7 +955,7 @@ mod tests {
}
#[gpui::test(iterations = 10)]
- async fn test_reconnection(cx: TestAppContext) {
+ async fn test_reconnection(cx: &mut TestAppContext) {
cx.foreground().forbid_parking();
let user_id = 5;
@@ -985,7 +1003,7 @@ mod tests {
}
#[gpui::test]
- async fn test_subscribing_to_entity(mut cx: TestAppContext) {
+ async fn test_subscribing_to_entity(cx: &mut TestAppContext) {
cx.foreground().forbid_parking();
let user_id = 5;
@@ -1017,14 +1035,11 @@ mod tests {
subscription: None,
});
- let _subscription1 =
- model1.update(&mut cx, |_, cx| client.add_model_for_remote_entity(1, cx));
- let _subscription2 =
- model2.update(&mut cx, |_, cx| client.add_model_for_remote_entity(2, cx));
+ let _subscription1 = model1.update(cx, |_, cx| client.add_model_for_remote_entity(1, cx));
+ let _subscription2 = model2.update(cx, |_, cx| client.add_model_for_remote_entity(2, cx));
// Ensure dropping a subscription for the same entity type still allows receiving of
// messages for other entity IDs of the same type.
- let subscription3 =
- model3.update(&mut cx, |_, cx| client.add_model_for_remote_entity(3, cx));
+ let subscription3 = model3.update(cx, |_, cx| client.add_model_for_remote_entity(3, cx));
drop(subscription3);
server.send(proto::UnshareProject { project_id: 1 });
@@ -1034,7 +1049,7 @@ mod tests {
}
#[gpui::test]
- async fn test_subscribing_after_dropping_subscription(mut cx: TestAppContext) {
+ async fn test_subscribing_after_dropping_subscription(cx: &mut TestAppContext) {
cx.foreground().forbid_parking();
let user_id = 5;
@@ -1062,7 +1077,7 @@ mod tests {
}
#[gpui::test]
- async fn test_dropping_subscription_in_handler(mut cx: TestAppContext) {
+ async fn test_dropping_subscription_in_handler(cx: &mut TestAppContext) {
cx.foreground().forbid_parking();
let user_id = 5;
@@ -1079,7 +1094,7 @@ mod tests {
async { Ok(()) }
},
);
- model.update(&mut cx, |model, _| {
+ model.update(cx, |model, _| {
model.subscription = Some(subscription);
});
server.send(proto::Ping {});
@@ -8,7 +8,7 @@ use gpui::{AsyncAppContext, Entity, ImageData, ModelContext, ModelHandle, Task};
use postage::{prelude::Stream, sink::Sink, watch};
use std::{
collections::{HashMap, HashSet},
- sync::Arc,
+ sync::{Arc, Weak},
};
use util::TryFutureExt as _;
@@ -38,7 +38,7 @@ pub struct UserStore {
update_contacts_tx: watch::Sender<Option<proto::UpdateContacts>>,
current_user: watch::Receiver<Option<Arc<User>>>,
contacts: Arc<[Contact]>,
- client: Arc<Client>,
+ client: Weak<Client>,
http: Arc<dyn HttpClient>,
_maintain_contacts: Task<()>,
_maintain_current_user: Task<()>,
@@ -65,7 +65,7 @@ impl UserStore {
users: Default::default(),
current_user: current_user_rx,
contacts: Arc::from([]),
- client: client.clone(),
+ client: Arc::downgrade(&client),
update_contacts_tx,
http,
_maintain_contacts: cx.spawn_weak(|this, mut cx| async move {
@@ -156,25 +156,26 @@ impl UserStore {
let http = self.http.clone();
user_ids.retain(|id| !self.users.contains_key(id));
cx.spawn_weak(|this, mut cx| async move {
- if !user_ids.is_empty() {
- let response = rpc.request(proto::GetUsers { user_ids }).await?;
- let new_users = future::join_all(
- response
- .users
- .into_iter()
- .map(|user| User::new(user, http.as_ref())),
- )
- .await;
+ if let Some(rpc) = rpc.upgrade() {
+ if !user_ids.is_empty() {
+ let response = rpc.request(proto::GetUsers { user_ids }).await?;
+ let new_users = future::join_all(
+ response
+ .users
+ .into_iter()
+ .map(|user| User::new(user, http.as_ref())),
+ )
+ .await;
- if let Some(this) = this.upgrade(&cx) {
- this.update(&mut cx, |this, _| {
- for user in new_users {
- this.users.insert(user.id, Arc::new(user));
- }
- });
+ if let Some(this) = this.upgrade(&cx) {
+ this.update(&mut cx, |this, _| {
+ for user in new_users {
+ this.users.insert(user.id, Arc::new(user));
+ }
+ });
+ }
}
}
-
Ok(())
})
}
@@ -725,7 +725,7 @@ mod tests {
use workspace::WorkspaceParams;
#[gpui::test]
- async fn test_diagnostics(mut cx: TestAppContext) {
+ async fn test_diagnostics(cx: &mut TestAppContext) {
let params = cx.update(WorkspaceParams::test);
let project = params.project.clone();
let workspace = cx.add_view(0, |cx| Workspace::new(¶ms, cx));
@@ -760,14 +760,14 @@ mod tests {
.await;
project
- .update(&mut cx, |project, cx| {
+ .update(cx, |project, cx| {
project.find_or_create_local_worktree("/test", false, cx)
})
.await
.unwrap();
// Create some diagnostics
- project.update(&mut cx, |project, cx| {
+ project.update(cx, |project, cx| {
project
.update_diagnostic_entries(
PathBuf::from("/test/main.rs"),
@@ -856,7 +856,7 @@ mod tests {
});
view.next_notification(&cx).await;
- view.update(&mut cx, |view, cx| {
+ view.update(cx, |view, cx| {
let editor = view.editor.update(cx, |editor, cx| editor.snapshot(cx));
assert_eq!(
@@ -920,7 +920,7 @@ mod tests {
});
// Diagnostics are added for another earlier path.
- project.update(&mut cx, |project, cx| {
+ project.update(cx, |project, cx| {
project.disk_based_diagnostics_started(cx);
project
.update_diagnostic_entries(
@@ -944,7 +944,7 @@ mod tests {
});
view.next_notification(&cx).await;
- view.update(&mut cx, |view, cx| {
+ view.update(cx, |view, cx| {
let editor = view.editor.update(cx, |editor, cx| editor.snapshot(cx));
assert_eq!(
@@ -1021,7 +1021,7 @@ mod tests {
});
// Diagnostics are added to the first path
- project.update(&mut cx, |project, cx| {
+ project.update(cx, |project, cx| {
project.disk_based_diagnostics_started(cx);
project
.update_diagnostic_entries(
@@ -1059,7 +1059,7 @@ mod tests {
});
view.next_notification(&cx).await;
- view.update(&mut cx, |view, cx| {
+ view.update(cx, |view, cx| {
let editor = view.editor.update(cx, |editor, cx| editor.snapshot(cx));
assert_eq!(
@@ -464,7 +464,7 @@ mod tests {
use Bias::*;
#[gpui::test(iterations = 100)]
- async fn test_random_display_map(mut cx: gpui::TestAppContext, mut rng: StdRng) {
+ async fn test_random_display_map(cx: &mut gpui::TestAppContext, mut rng: StdRng) {
cx.foreground().set_block_on_ticks(0..=50);
cx.foreground().forbid_parking();
let operations = env::var("OPERATIONS")
@@ -512,11 +512,11 @@ mod tests {
cx,
)
});
- let mut notifications = observe(&map, &mut cx);
+ let mut notifications = observe(&map, cx);
let mut fold_count = 0;
let mut blocks = Vec::new();
- let snapshot = map.update(&mut cx, |map, cx| map.snapshot(cx));
+ let snapshot = map.update(cx, |map, cx| map.snapshot(cx));
log::info!("buffer text: {:?}", snapshot.buffer_snapshot.text());
log::info!("fold text: {:?}", snapshot.folds_snapshot.text());
log::info!("tab text: {:?}", snapshot.tabs_snapshot.text());
@@ -533,10 +533,10 @@ mod tests {
Some(rng.gen_range(0.0..=max_wrap_width))
};
log::info!("setting wrap width to {:?}", wrap_width);
- map.update(&mut cx, |map, cx| map.set_wrap_width(wrap_width, cx));
+ map.update(cx, |map, cx| map.set_wrap_width(wrap_width, cx));
}
20..=44 => {
- map.update(&mut cx, |map, cx| {
+ map.update(cx, |map, cx| {
if rng.gen() || blocks.is_empty() {
let buffer = map.snapshot(cx).buffer_snapshot;
let block_properties = (0..rng.gen_range(1..=1))
@@ -582,7 +582,7 @@ mod tests {
45..=79 => {
let mut ranges = Vec::new();
for _ in 0..rng.gen_range(1..=3) {
- buffer.read_with(&cx, |buffer, cx| {
+ buffer.read_with(cx, |buffer, cx| {
let buffer = buffer.read(cx);
let end = buffer.clip_offset(rng.gen_range(0..=buffer.len()), Right);
let start = buffer.clip_offset(rng.gen_range(0..=end), Left);
@@ -592,26 +592,26 @@ mod tests {
if rng.gen() && fold_count > 0 {
log::info!("unfolding ranges: {:?}", ranges);
- map.update(&mut cx, |map, cx| {
+ map.update(cx, |map, cx| {
map.unfold(ranges, cx);
});
} else {
log::info!("folding ranges: {:?}", ranges);
- map.update(&mut cx, |map, cx| {
+ map.update(cx, |map, cx| {
map.fold(ranges, cx);
});
}
}
_ => {
- buffer.update(&mut cx, |buffer, cx| buffer.randomly_edit(&mut rng, 5, cx));
+ buffer.update(cx, |buffer, cx| buffer.randomly_edit(&mut rng, 5, cx));
}
}
- if map.read_with(&cx, |map, cx| map.is_rewrapping(cx)) {
+ if map.read_with(cx, |map, cx| map.is_rewrapping(cx)) {
notifications.next().await.unwrap();
}
- let snapshot = map.update(&mut cx, |map, cx| map.snapshot(cx));
+ let snapshot = map.update(cx, |map, cx| map.snapshot(cx));
fold_count = snapshot.fold_count();
log::info!("buffer text: {:?}", snapshot.buffer_snapshot.text());
log::info!("fold text: {:?}", snapshot.folds_snapshot.text());
@@ -846,7 +846,7 @@ mod tests {
}
#[gpui::test]
- async fn test_chunks(mut cx: gpui::TestAppContext) {
+ async fn test_chunks(cx: &mut gpui::TestAppContext) {
use unindent::Unindent as _;
let text = r#"
@@ -914,7 +914,7 @@ mod tests {
]
);
- map.update(&mut cx, |map, cx| {
+ map.update(cx, |map, cx| {
map.fold(vec![Point::new(0, 6)..Point::new(3, 2)], cx)
});
assert_eq!(
@@ -931,7 +931,7 @@ mod tests {
}
#[gpui::test]
- async fn test_chunks_with_soft_wrapping(mut cx: gpui::TestAppContext) {
+ async fn test_chunks_with_soft_wrapping(cx: &mut gpui::TestAppContext) {
use unindent::Unindent as _;
cx.foreground().set_block_on_ticks(usize::MAX..=usize::MAX);
@@ -996,7 +996,7 @@ mod tests {
[("{}\n\n".to_string(), None)]
);
- map.update(&mut cx, |map, cx| {
+ map.update(cx, |map, cx| {
map.fold(vec![Point::new(0, 6)..Point::new(3, 2)], cx)
});
assert_eq!(
@@ -1010,7 +1010,7 @@ mod tests {
use text::Rope;
#[gpui::test(iterations = 100)]
- async fn test_random_wraps(mut cx: gpui::TestAppContext, mut rng: StdRng) {
+ async fn test_random_wraps(cx: &mut gpui::TestAppContext, mut rng: StdRng) {
cx.foreground().set_block_on_ticks(0..=50);
cx.foreground().forbid_parking();
let operations = env::var("OPERATIONS")
@@ -1043,7 +1043,7 @@ mod tests {
MultiBuffer::build_simple(&text, cx)
}
});
- let mut buffer_snapshot = buffer.read_with(&cx, |buffer, cx| buffer.snapshot(cx));
+ let mut buffer_snapshot = buffer.read_with(cx, |buffer, cx| buffer.snapshot(cx));
let (mut fold_map, folds_snapshot) = FoldMap::new(buffer_snapshot.clone());
let (tab_map, tabs_snapshot) = TabMap::new(folds_snapshot.clone(), tab_size);
log::info!("Unwrapped text (no folds): {:?}", buffer_snapshot.text());
@@ -1059,13 +1059,13 @@ mod tests {
let (wrap_map, _) =
cx.update(|cx| WrapMap::new(tabs_snapshot.clone(), font_id, font_size, wrap_width, cx));
- let mut notifications = observe(&wrap_map, &mut cx);
+ let mut notifications = observe(&wrap_map, cx);
- if wrap_map.read_with(&cx, |map, _| map.is_rewrapping()) {
+ if wrap_map.read_with(cx, |map, _| map.is_rewrapping()) {
notifications.next().await.unwrap();
}
- let (initial_snapshot, _) = wrap_map.update(&mut cx, |map, cx| {
+ let (initial_snapshot, _) = wrap_map.update(cx, |map, cx| {
assert!(!map.is_rewrapping());
map.sync(tabs_snapshot.clone(), Vec::new(), cx)
});
@@ -1091,20 +1091,20 @@ mod tests {
Some(rng.gen_range(0.0..=1000.0))
};
log::info!("Setting wrap width to {:?}", wrap_width);
- wrap_map.update(&mut cx, |map, cx| map.set_wrap_width(wrap_width, cx));
+ wrap_map.update(cx, |map, cx| map.set_wrap_width(wrap_width, cx));
}
20..=39 => {
for (folds_snapshot, fold_edits) in fold_map.randomly_mutate(&mut rng) {
let (tabs_snapshot, tab_edits) = tab_map.sync(folds_snapshot, fold_edits);
- let (mut snapshot, wrap_edits) = wrap_map
- .update(&mut cx, |map, cx| map.sync(tabs_snapshot, tab_edits, cx));
+ let (mut snapshot, wrap_edits) =
+ wrap_map.update(cx, |map, cx| map.sync(tabs_snapshot, tab_edits, cx));
snapshot.check_invariants();
snapshot.verify_chunks(&mut rng);
edits.push((snapshot, wrap_edits));
}
}
_ => {
- buffer.update(&mut cx, |buffer, cx| {
+ buffer.update(cx, |buffer, cx| {
let subscription = buffer.subscribe();
let edit_count = rng.gen_range(1..=5);
buffer.randomly_mutate(&mut rng, edit_count, cx);
@@ -1125,24 +1125,23 @@ mod tests {
let unwrapped_text = tabs_snapshot.text();
let expected_text = wrap_text(&unwrapped_text, wrap_width, &mut line_wrapper);
- let (mut snapshot, wrap_edits) = wrap_map.update(&mut cx, |map, cx| {
- map.sync(tabs_snapshot.clone(), tab_edits, cx)
- });
+ let (mut snapshot, wrap_edits) =
+ wrap_map.update(cx, |map, cx| map.sync(tabs_snapshot.clone(), tab_edits, cx));
snapshot.check_invariants();
snapshot.verify_chunks(&mut rng);
edits.push((snapshot, wrap_edits));
- if wrap_map.read_with(&cx, |map, _| map.is_rewrapping()) && rng.gen_bool(0.4) {
+ if wrap_map.read_with(cx, |map, _| map.is_rewrapping()) && rng.gen_bool(0.4) {
log::info!("Waiting for wrapping to finish");
- while wrap_map.read_with(&cx, |map, _| map.is_rewrapping()) {
+ while wrap_map.read_with(cx, |map, _| map.is_rewrapping()) {
notifications.next().await.unwrap();
}
- wrap_map.read_with(&cx, |map, _| assert!(map.pending_edits.is_empty()));
+ wrap_map.read_with(cx, |map, _| assert!(map.pending_edits.is_empty()));
}
- if !wrap_map.read_with(&cx, |map, _| map.is_rewrapping()) {
+ if !wrap_map.read_with(cx, |map, _| map.is_rewrapping()) {
let (mut wrapped_snapshot, wrap_edits) =
- wrap_map.update(&mut cx, |map, cx| map.sync(tabs_snapshot, Vec::new(), cx));
+ wrap_map.update(cx, |map, cx| map.sync(tabs_snapshot, Vec::new(), cx));
let actual_text = wrapped_snapshot.text();
let actual_longest_row = wrapped_snapshot.longest_row();
log::info!("Wrapping finished: {:?}", actual_text);
@@ -1220,13 +1219,13 @@ mod tests {
assert_eq!(initial_text.to_string(), snapshot_text.to_string());
}
- if wrap_map.read_with(&cx, |map, _| map.is_rewrapping()) {
+ if wrap_map.read_with(cx, |map, _| map.is_rewrapping()) {
log::info!("Waiting for wrapping to finish");
- while wrap_map.read_with(&cx, |map, _| map.is_rewrapping()) {
+ while wrap_map.read_with(cx, |map, _| map.is_rewrapping()) {
notifications.next().await.unwrap();
}
}
- wrap_map.read_with(&cx, |map, _| assert!(map.pending_edits.is_empty()));
+ wrap_map.read_with(cx, |map, _| assert!(map.pending_edits.is_empty()));
}
fn wrap_text(
@@ -7772,7 +7772,7 @@ mod tests {
}
#[gpui::test]
- async fn test_select_larger_smaller_syntax_node(mut cx: gpui::TestAppContext) {
+ async fn test_select_larger_smaller_syntax_node(cx: &mut gpui::TestAppContext) {
let settings = cx.read(Settings::test);
let language = Arc::new(Language::new(
LanguageConfig::default(),
@@ -7794,7 +7794,7 @@ mod tests {
view.condition(&cx, |view, cx| !view.buffer.read(cx).is_parsing(cx))
.await;
- view.update(&mut cx, |view, cx| {
+ view.update(cx, |view, cx| {
view.select_display_ranges(
&[
DisplayPoint::new(0, 25)..DisplayPoint::new(0, 25),
@@ -7806,7 +7806,7 @@ mod tests {
view.select_larger_syntax_node(&SelectLargerSyntaxNode, cx);
});
assert_eq!(
- view.update(&mut cx, |view, cx| view.selected_display_ranges(cx)),
+ view.update(cx, |view, cx| view.selected_display_ranges(cx)),
&[
DisplayPoint::new(0, 23)..DisplayPoint::new(0, 27),
DisplayPoint::new(2, 35)..DisplayPoint::new(2, 7),
@@ -7814,50 +7814,50 @@ mod tests {
]
);
- view.update(&mut cx, |view, cx| {
+ view.update(cx, |view, cx| {
view.select_larger_syntax_node(&SelectLargerSyntaxNode, cx);
});
assert_eq!(
- view.update(&mut cx, |view, cx| view.selected_display_ranges(cx)),
+ view.update(cx, |view, cx| view.selected_display_ranges(cx)),
&[
DisplayPoint::new(0, 16)..DisplayPoint::new(0, 28),
DisplayPoint::new(4, 1)..DisplayPoint::new(2, 0),
]
);
- view.update(&mut cx, |view, cx| {
+ view.update(cx, |view, cx| {
view.select_larger_syntax_node(&SelectLargerSyntaxNode, cx);
});
assert_eq!(
- view.update(&mut cx, |view, cx| view.selected_display_ranges(cx)),
+ view.update(cx, |view, cx| view.selected_display_ranges(cx)),
&[DisplayPoint::new(5, 0)..DisplayPoint::new(0, 0)]
);
// Trying to expand the selected syntax node one more time has no effect.
- view.update(&mut cx, |view, cx| {
+ view.update(cx, |view, cx| {
view.select_larger_syntax_node(&SelectLargerSyntaxNode, cx);
});
assert_eq!(
- view.update(&mut cx, |view, cx| view.selected_display_ranges(cx)),
+ view.update(cx, |view, cx| view.selected_display_ranges(cx)),
&[DisplayPoint::new(5, 0)..DisplayPoint::new(0, 0)]
);
- view.update(&mut cx, |view, cx| {
+ view.update(cx, |view, cx| {
view.select_smaller_syntax_node(&SelectSmallerSyntaxNode, cx);
});
assert_eq!(
- view.update(&mut cx, |view, cx| view.selected_display_ranges(cx)),
+ view.update(cx, |view, cx| view.selected_display_ranges(cx)),
&[
DisplayPoint::new(0, 16)..DisplayPoint::new(0, 28),
DisplayPoint::new(4, 1)..DisplayPoint::new(2, 0),
]
);
- view.update(&mut cx, |view, cx| {
+ view.update(cx, |view, cx| {
view.select_smaller_syntax_node(&SelectSmallerSyntaxNode, cx);
});
assert_eq!(
- view.update(&mut cx, |view, cx| view.selected_display_ranges(cx)),
+ view.update(cx, |view, cx| view.selected_display_ranges(cx)),
&[
DisplayPoint::new(0, 23)..DisplayPoint::new(0, 27),
DisplayPoint::new(2, 35)..DisplayPoint::new(2, 7),
@@ -7865,11 +7865,11 @@ mod tests {
]
);
- view.update(&mut cx, |view, cx| {
+ view.update(cx, |view, cx| {
view.select_smaller_syntax_node(&SelectSmallerSyntaxNode, cx);
});
assert_eq!(
- view.update(&mut cx, |view, cx| view.selected_display_ranges(cx)),
+ view.update(cx, |view, cx| view.selected_display_ranges(cx)),
&[
DisplayPoint::new(0, 25)..DisplayPoint::new(0, 25),
DisplayPoint::new(2, 24)..DisplayPoint::new(2, 12),
@@ -7878,11 +7878,11 @@ mod tests {
);
// Trying to shrink the selected syntax node one more time has no effect.
- view.update(&mut cx, |view, cx| {
+ view.update(cx, |view, cx| {
view.select_smaller_syntax_node(&SelectSmallerSyntaxNode, cx);
});
assert_eq!(
- view.update(&mut cx, |view, cx| view.selected_display_ranges(cx)),
+ view.update(cx, |view, cx| view.selected_display_ranges(cx)),
&[
DisplayPoint::new(0, 25)..DisplayPoint::new(0, 25),
DisplayPoint::new(2, 24)..DisplayPoint::new(2, 12),
@@ -7892,7 +7892,7 @@ mod tests {
// Ensure that we keep expanding the selection if the larger selection starts or ends within
// a fold.
- view.update(&mut cx, |view, cx| {
+ view.update(cx, |view, cx| {
view.fold_ranges(
vec![
Point::new(0, 21)..Point::new(0, 24),
@@ -7903,7 +7903,7 @@ mod tests {
view.select_larger_syntax_node(&SelectLargerSyntaxNode, cx);
});
assert_eq!(
- view.update(&mut cx, |view, cx| view.selected_display_ranges(cx)),
+ view.update(cx, |view, cx| view.selected_display_ranges(cx)),
&[
DisplayPoint::new(0, 16)..DisplayPoint::new(0, 28),
DisplayPoint::new(2, 35)..DisplayPoint::new(2, 7),
@@ -7913,7 +7913,7 @@ mod tests {
}
#[gpui::test]
- async fn test_autoindent_selections(mut cx: gpui::TestAppContext) {
+ async fn test_autoindent_selections(cx: &mut gpui::TestAppContext) {
let settings = cx.read(Settings::test);
let language = Arc::new(
Language::new(
@@ -7954,7 +7954,7 @@ mod tests {
.condition(&cx, |editor, cx| !editor.buffer.read(cx).is_parsing(cx))
.await;
- editor.update(&mut cx, |editor, cx| {
+ editor.update(cx, |editor, cx| {
editor.select_ranges([5..5, 8..8, 9..9], None, cx);
editor.newline(&Newline, cx);
assert_eq!(editor.text(cx), "fn a(\n \n) {\n \n}\n");
@@ -7970,7 +7970,7 @@ mod tests {
}
#[gpui::test]
- async fn test_autoclose_pairs(mut cx: gpui::TestAppContext) {
+ async fn test_autoclose_pairs(cx: &mut gpui::TestAppContext) {
let settings = cx.read(Settings::test);
let language = Arc::new(Language::new(
LanguageConfig {
@@ -8007,7 +8007,7 @@ mod tests {
view.condition(&cx, |view, cx| !view.buffer.read(cx).is_parsing(cx))
.await;
- view.update(&mut cx, |view, cx| {
+ view.update(cx, |view, cx| {
view.select_display_ranges(
&[
DisplayPoint::new(0, 0)..DisplayPoint::new(0, 1),
@@ -8081,7 +8081,7 @@ mod tests {
}
#[gpui::test]
- async fn test_snippets(mut cx: gpui::TestAppContext) {
+ async fn test_snippets(cx: &mut gpui::TestAppContext) {
let settings = cx.read(Settings::test);
let text = "
@@ -8093,7 +8093,7 @@ mod tests {
let buffer = cx.update(|cx| MultiBuffer::build_simple(&text, cx));
let (_, editor) = cx.add_window(|cx| build_editor(buffer, settings, cx));
- editor.update(&mut cx, |editor, cx| {
+ editor.update(cx, |editor, cx| {
let buffer = &editor.snapshot(cx).buffer_snapshot;
let snippet = Snippet::parse("f(${1:one}, ${2:two}, ${1:three})$0").unwrap();
let insertion_ranges = [
@@ -8188,7 +8188,7 @@ mod tests {
}
#[gpui::test]
- async fn test_completion(mut cx: gpui::TestAppContext) {
+ async fn test_completion(cx: &mut gpui::TestAppContext) {
let settings = cx.read(Settings::test);
let (language_server, mut fake) = cx.update(|cx| {
lsp::LanguageServer::fake_with_capabilities(
@@ -8213,23 +8213,23 @@ mod tests {
let fs = FakeFs::new(cx.background().clone());
fs.insert_file("/file", text).await;
- let project = Project::test(fs, &mut cx);
+ let project = Project::test(fs, cx);
let (worktree, relative_path) = project
- .update(&mut cx, |project, cx| {
+ .update(cx, |project, cx| {
project.find_or_create_local_worktree("/file", false, cx)
})
.await
.unwrap();
let project_path = ProjectPath {
- worktree_id: worktree.read_with(&cx, |worktree, _| worktree.id()),
+ worktree_id: worktree.read_with(cx, |worktree, _| worktree.id()),
path: relative_path.into(),
};
let buffer = project
- .update(&mut cx, |project, cx| project.open_buffer(project_path, cx))
+ .update(cx, |project, cx| project.open_buffer(project_path, cx))
.await
.unwrap();
- buffer.update(&mut cx, |buffer, cx| {
+ buffer.update(cx, |buffer, cx| {
buffer.set_language_server(Some(language_server), cx);
});
@@ -8238,7 +8238,7 @@ mod tests {
let (_, editor) = cx.add_window(|cx| build_editor(buffer, settings, cx));
- editor.update(&mut cx, |editor, cx| {
+ editor.update(cx, |editor, cx| {
editor.project = Some(project);
editor.select_ranges([Point::new(0, 3)..Point::new(0, 3)], None, cx);
editor.handle_input(&Input(".".to_string()), cx);
@@ -8258,7 +8258,7 @@ mod tests {
.condition(&cx, |editor, _| editor.context_menu_visible())
.await;
- let apply_additional_edits = editor.update(&mut cx, |editor, cx| {
+ let apply_additional_edits = editor.update(cx, |editor, cx| {
editor.move_down(&MoveDown, cx);
let apply_additional_edits = editor
.confirm_completion(&ConfirmCompletion(None), cx)
@@ -8282,7 +8282,7 @@ mod tests {
.await;
apply_additional_edits.await.unwrap();
assert_eq!(
- editor.read_with(&cx, |editor, cx| editor.text(cx)),
+ editor.read_with(cx, |editor, cx| editor.text(cx)),
"
one.second_completion
two
@@ -8292,7 +8292,7 @@ mod tests {
.unindent()
);
- editor.update(&mut cx, |editor, cx| {
+ editor.update(cx, |editor, cx| {
editor.select_ranges(
[
Point::new(1, 3)..Point::new(1, 3),
@@ -8323,7 +8323,7 @@ mod tests {
.condition(&cx, |editor, _| editor.context_menu_visible())
.await;
- editor.update(&mut cx, |editor, cx| {
+ editor.update(cx, |editor, cx| {
editor.handle_input(&Input("i".to_string()), cx);
});
@@ -8342,7 +8342,7 @@ mod tests {
.condition(&cx, |editor, _| editor.context_menu_visible())
.await;
- let apply_additional_edits = editor.update(&mut cx, |editor, cx| {
+ let apply_additional_edits = editor.update(cx, |editor, cx| {
let apply_additional_edits = editor
.confirm_completion(&ConfirmCompletion(None), cx)
.unwrap();
@@ -8421,7 +8421,7 @@ mod tests {
}
#[gpui::test]
- async fn test_toggle_comment(mut cx: gpui::TestAppContext) {
+ async fn test_toggle_comment(cx: &mut gpui::TestAppContext) {
let settings = cx.read(Settings::test);
let language = Arc::new(Language::new(
LanguageConfig {
@@ -8444,7 +8444,7 @@ mod tests {
let buffer = cx.add_model(|cx| MultiBuffer::singleton(buffer, cx));
let (_, view) = cx.add_window(|cx| build_editor(buffer, settings, cx));
- view.update(&mut cx, |editor, cx| {
+ view.update(cx, |editor, cx| {
// If multiple selections intersect a line, the line is only
// toggled once.
editor.select_display_ranges(
@@ -8678,7 +8678,7 @@ mod tests {
}
#[gpui::test]
- async fn test_extra_newline_insertion(mut cx: gpui::TestAppContext) {
+ async fn test_extra_newline_insertion(cx: &mut gpui::TestAppContext) {
let settings = cx.read(Settings::test);
let language = Arc::new(Language::new(
LanguageConfig {
@@ -8715,7 +8715,7 @@ mod tests {
view.condition(&cx, |view, cx| !view.buffer.read(cx).is_parsing(cx))
.await;
- view.update(&mut cx, |view, cx| {
+ view.update(cx, |view, cx| {
view.select_display_ranges(
&[
DisplayPoint::new(0, 2)..DisplayPoint::new(0, 3),
@@ -421,7 +421,7 @@ mod tests {
use workspace::{Workspace, WorkspaceParams};
#[gpui::test]
- async fn test_matching_paths(mut cx: gpui::TestAppContext) {
+ async fn test_matching_paths(cx: &mut gpui::TestAppContext) {
let mut path_openers = Vec::new();
cx.update(|cx| {
super::init(cx);
@@ -447,7 +447,7 @@ mod tests {
let (window_id, workspace) = cx.add_window(|cx| Workspace::new(¶ms, cx));
params
.project
- .update(&mut cx, |project, cx| {
+ .update(cx, |project, cx| {
project.find_or_create_local_worktree("/root", false, cx)
})
.await
@@ -496,7 +496,7 @@ mod tests {
}
#[gpui::test]
- async fn test_matching_cancellation(mut cx: gpui::TestAppContext) {
+ async fn test_matching_cancellation(cx: &mut gpui::TestAppContext) {
let params = cx.update(WorkspaceParams::test);
let fs = params.fs.as_fake();
fs.insert_tree(
@@ -516,7 +516,7 @@ mod tests {
let (_, workspace) = cx.add_window(|cx| Workspace::new(¶ms, cx));
params
.project
- .update(&mut cx, |project, cx| {
+ .update(cx, |project, cx| {
project.find_or_create_local_worktree("/dir", false, cx)
})
.await
@@ -533,12 +533,12 @@ mod tests {
let query = "hi".to_string();
finder
- .update(&mut cx, |f, cx| f.spawn_search(query.clone(), cx))
+ .update(cx, |f, cx| f.spawn_search(query.clone(), cx))
.unwrap()
.await;
- finder.read_with(&cx, |f, _| assert_eq!(f.matches.len(), 5));
+ finder.read_with(cx, |f, _| assert_eq!(f.matches.len(), 5));
- finder.update(&mut cx, |finder, cx| {
+ finder.update(cx, |finder, cx| {
let matches = finder.matches.clone();
// Simulate a search being cancelled after the time limit,
@@ -571,7 +571,7 @@ mod tests {
}
#[gpui::test]
- async fn test_single_file_worktrees(mut cx: gpui::TestAppContext) {
+ async fn test_single_file_worktrees(cx: &mut gpui::TestAppContext) {
let params = cx.update(WorkspaceParams::test);
params
.fs
@@ -582,7 +582,7 @@ mod tests {
let (_, workspace) = cx.add_window(|cx| Workspace::new(¶ms, cx));
params
.project
- .update(&mut cx, |project, cx| {
+ .update(cx, |project, cx| {
project.find_or_create_local_worktree("/root/the-parent-dir/the-file", false, cx)
})
.await
@@ -600,7 +600,7 @@ mod tests {
// Even though there is only one worktree, that worktree's filename
// is included in the matching, because the worktree is a single file.
finder
- .update(&mut cx, |f, cx| f.spawn_search("thf".into(), cx))
+ .update(cx, |f, cx| f.spawn_search("thf".into(), cx))
.unwrap()
.await;
cx.read(|cx| {
@@ -618,14 +618,14 @@ mod tests {
// Since the worktree root is a file, searching for its name followed by a slash does
// not match anything.
finder
- .update(&mut cx, |f, cx| f.spawn_search("thf/".into(), cx))
+ .update(cx, |f, cx| f.spawn_search("thf/".into(), cx))
.unwrap()
.await;
- finder.read_with(&cx, |f, _| assert_eq!(f.matches.len(), 0));
+ finder.read_with(cx, |f, _| assert_eq!(f.matches.len(), 0));
}
#[gpui::test(retries = 5)]
- async fn test_multiple_matches_with_same_relative_path(mut cx: gpui::TestAppContext) {
+ async fn test_multiple_matches_with_same_relative_path(cx: &mut gpui::TestAppContext) {
let params = cx.update(WorkspaceParams::test);
params
.fs
@@ -642,7 +642,7 @@ mod tests {
let (_, workspace) = cx.add_window(|cx| Workspace::new(¶ms, cx));
workspace
- .update(&mut cx, |workspace, cx| {
+ .update(cx, |workspace, cx| {
workspace.open_paths(
&[PathBuf::from("/root/dir1"), PathBuf::from("/root/dir2")],
cx,
@@ -662,12 +662,12 @@ mod tests {
// Run a search that matches two files with the same relative path.
finder
- .update(&mut cx, |f, cx| f.spawn_search("a.t".into(), cx))
+ .update(cx, |f, cx| f.spawn_search("a.t".into(), cx))
.unwrap()
.await;
// Can switch between different matches with the same relative path.
- finder.update(&mut cx, |f, cx| {
+ finder.update(cx, |f, cx| {
assert_eq!(f.matches.len(), 2);
assert_eq!(f.selected_index(), 0);
f.select_next(&SelectNext, cx);
@@ -8,15 +8,16 @@ version = "0.1.0"
path = "src/gpui.rs"
[features]
-test-support = ["env_logger", "collections/test-support"]
+test-support = ["backtrace", "dhat", "env_logger", "collections/test-support"]
[dependencies]
collections = { path = "../collections" }
gpui_macros = { path = "../gpui_macros" }
sum_tree = { path = "../sum_tree" }
async-task = "4.0.3"
-backtrace = "0.3"
+backtrace = { version = "0.3", optional = true }
ctor = "0.1"
+dhat = { version = "0.3", optional = true }
env_logger = { version = "0.8", optional = true }
etagere = "0.2"
futures = "0.3"
@@ -48,7 +49,9 @@ bindgen = "0.58.1"
cc = "1.0.67"
[dev-dependencies]
+backtrace = "0.3"
collections = { path = "../collections", features = ["test-support"] }
+dhat = "0.3"
env_logger = "0.8"
png = "0.16"
simplelog = "0.9"
@@ -4,14 +4,15 @@ use crate::{
keymap::{self, Keystroke},
platform::{self, CursorStyle, Platform, PromptLevel, WindowOptions},
presenter::Presenter,
- util::{post_inc, timeout},
+ util::post_inc,
AssetCache, AssetSource, ClipboardItem, FontCache, PathPromptOptions, TextLayoutCache,
};
use anyhow::{anyhow, Result};
use keymap::MatchResult;
+use lazy_static::lazy_static;
use parking_lot::Mutex;
use platform::Event;
-use postage::{mpsc, oneshot, sink::Sink as _, stream::Stream as _};
+use postage::oneshot;
use smol::prelude::*;
use std::{
any::{type_name, Any, TypeId},
@@ -235,7 +236,7 @@ pub struct App(Rc<RefCell<MutableAppContext>>);
#[derive(Clone)]
pub struct AsyncAppContext(Rc<RefCell<MutableAppContext>>);
-#[derive(Clone)]
+#[cfg(any(test, feature = "test-support"))]
pub struct TestAppContext {
cx: Rc<RefCell<MutableAppContext>>,
foreground_platform: Rc<platform::test::ForegroundPlatform>,
@@ -252,6 +253,7 @@ impl App {
platform.clone(),
foreground_platform.clone(),
Arc::new(FontCache::new(platform.fonts())),
+ Default::default(),
asset_source,
))));
@@ -382,6 +384,7 @@ impl App {
}
}
+#[cfg(any(test, feature = "test-support"))]
impl TestAppContext {
pub fn new(
foreground_platform: Rc<platform::test::ForegroundPlatform>,
@@ -389,6 +392,7 @@ impl TestAppContext {
foreground: Rc<executor::Foreground>,
background: Arc<executor::Background>,
font_cache: Arc<FontCache>,
+ leak_detector: Arc<Mutex<LeakDetector>>,
first_entity_id: usize,
) -> Self {
let mut cx = MutableAppContext::new(
@@ -397,6 +401,11 @@ impl TestAppContext {
platform,
foreground_platform.clone(),
font_cache,
+ RefCounts {
+ #[cfg(any(test, feature = "test-support"))]
+ leak_detector,
+ ..Default::default()
+ },
(),
);
cx.next_entity_id = first_entity_id;
@@ -536,6 +545,8 @@ impl TestAppContext {
}
pub fn simulate_prompt_answer(&self, window_id: usize, answer: usize) {
+ use postage::prelude::Sink as _;
+
let mut state = self.cx.borrow_mut();
let (_, window) = state
.presenters_and_platform_windows
@@ -551,6 +562,11 @@ impl TestAppContext {
.expect("prompt was not called");
let _ = done_tx.try_send(answer);
}
+
+ #[cfg(any(test, feature = "test-support"))]
+ pub fn leak_detector(&self) -> Arc<Mutex<LeakDetector>> {
+ self.cx.borrow().leak_detector()
+ }
}
impl AsyncAppContext {
@@ -665,6 +681,7 @@ impl ReadViewWith for AsyncAppContext {
}
}
+#[cfg(any(test, feature = "test-support"))]
impl UpdateModel for TestAppContext {
fn update_model<T: Entity, O>(
&mut self,
@@ -675,6 +692,7 @@ impl UpdateModel for TestAppContext {
}
}
+#[cfg(any(test, feature = "test-support"))]
impl ReadModelWith for TestAppContext {
fn read_model_with<E: Entity, T>(
&self,
@@ -687,6 +705,7 @@ impl ReadModelWith for TestAppContext {
}
}
+#[cfg(any(test, feature = "test-support"))]
impl UpdateView for TestAppContext {
fn update_view<T, S>(
&mut self,
@@ -700,6 +719,7 @@ impl UpdateView for TestAppContext {
}
}
+#[cfg(any(test, feature = "test-support"))]
impl ReadViewWith for TestAppContext {
fn read_view_with<V, T>(
&self,
@@ -741,7 +761,6 @@ pub struct MutableAppContext {
release_observations: Arc<Mutex<HashMap<usize, BTreeMap<usize, ReleaseObservationCallback>>>>,
presenters_and_platform_windows:
HashMap<usize, (Rc<RefCell<Presenter>>, Box<dyn platform::Window>)>,
- debug_elements_callbacks: HashMap<usize, Box<dyn Fn(&AppContext) -> crate::json::Value>>,
foreground: Rc<executor::Foreground>,
pending_effects: VecDeque<Effect>,
pending_notifications: HashSet<usize>,
@@ -758,8 +777,8 @@ impl MutableAppContext {
platform: Arc<dyn platform::Platform>,
foreground_platform: Rc<dyn platform::ForegroundPlatform>,
font_cache: Arc<FontCache>,
+ ref_counts: RefCounts,
asset_source: impl AssetSource,
- // entity_drop_tx:
) -> Self {
Self {
weak_self: None,
@@ -771,7 +790,7 @@ impl MutableAppContext {
windows: Default::default(),
app_states: Default::default(),
element_states: Default::default(),
- ref_counts: Arc::new(Mutex::new(RefCounts::default())),
+ ref_counts: Arc::new(Mutex::new(ref_counts)),
background,
font_cache,
platform,
@@ -788,7 +807,6 @@ impl MutableAppContext {
observations: Default::default(),
release_observations: Default::default(),
presenters_and_platform_windows: HashMap::new(),
- debug_elements_callbacks: HashMap::new(),
foreground,
pending_effects: VecDeque::new(),
pending_notifications: HashSet::new(),
@@ -829,11 +847,11 @@ impl MutableAppContext {
}
}
- fn remove_all_windows(&mut self) {
+ pub fn remove_all_windows(&mut self) {
for (window_id, _) in self.cx.windows.drain() {
self.presenters_and_platform_windows.remove(&window_id);
}
- self.remove_dropped_entities();
+ self.flush_effects();
}
pub fn platform(&self) -> Arc<dyn platform::Platform> {
@@ -852,18 +870,10 @@ impl MutableAppContext {
&self.cx.background
}
- pub fn on_debug_elements<F>(&mut self, window_id: usize, callback: F)
- where
- F: 'static + Fn(&AppContext) -> crate::json::Value,
- {
- self.debug_elements_callbacks
- .insert(window_id, Box::new(callback));
- }
-
pub fn debug_elements(&self, window_id: usize) -> Option<crate::json::Value> {
- self.debug_elements_callbacks
+ self.presenters_and_platform_windows
.get(&window_id)
- .map(|debug_elements| debug_elements(&self.cx))
+ .and_then(|(presenter, _)| presenter.borrow().debug_elements(self))
}
pub fn add_action<A, V, F>(&mut self, handler: F)
@@ -1383,7 +1393,6 @@ impl MutableAppContext {
pub fn remove_window(&mut self, window_id: usize) {
self.cx.windows.remove(&window_id);
self.presenters_and_platform_windows.remove(&window_id);
- self.remove_dropped_entities();
self.flush_effects();
}
@@ -1435,10 +1444,6 @@ impl MutableAppContext {
self.presenters_and_platform_windows
.insert(window_id, (presenter.clone(), window));
-
- self.on_debug_elements(window_id, move |cx| {
- presenter.borrow().debug_elements(cx).unwrap()
- });
}
pub fn build_presenter(&mut self, window_id: usize, titlebar_height: f32) -> Presenter {
@@ -1808,6 +1813,11 @@ impl MutableAppContext {
pub fn read_from_clipboard(&self) -> Option<ClipboardItem> {
self.cx.platform.read_from_clipboard()
}
+
+ #[cfg(any(test, feature = "test-support"))]
+ pub fn leak_detector(&self) -> Arc<Mutex<LeakDetector>> {
+ self.cx.ref_counts.lock().leak_detector.clone()
+ }
}
impl ReadModel for MutableAppContext {
@@ -2003,12 +2013,11 @@ impl UpgradeModelHandle for AppContext {
fn upgrade_any_model_handle(&self, handle: &AnyWeakModelHandle) -> Option<AnyModelHandle> {
if self.models.contains_key(&handle.model_id) {
- self.ref_counts.lock().inc_model(handle.model_id);
- Some(AnyModelHandle {
- model_id: handle.model_id,
- model_type: handle.model_type,
- ref_counts: self.ref_counts.clone(),
- })
+ Some(AnyModelHandle::new(
+ handle.model_id,
+ handle.model_type,
+ self.ref_counts.clone(),
+ ))
} else {
None
}
@@ -2814,19 +2823,33 @@ pub enum EntityLocation {
View(usize, usize),
}
-pub struct ModelHandle<T> {
+pub struct ModelHandle<T: Entity> {
model_id: usize,
model_type: PhantomData<T>,
ref_counts: Arc<Mutex<RefCounts>>,
+
+ #[cfg(any(test, feature = "test-support"))]
+ handle_id: usize,
}
impl<T: Entity> ModelHandle<T> {
fn new(model_id: usize, ref_counts: &Arc<Mutex<RefCounts>>) -> Self {
ref_counts.lock().inc_model(model_id);
+
+ #[cfg(any(test, feature = "test-support"))]
+ let handle_id = ref_counts
+ .lock()
+ .leak_detector
+ .lock()
+ .handle_created(Some(type_name::<T>()), model_id);
+
Self {
model_id,
model_type: PhantomData,
ref_counts: ref_counts.clone(),
+
+ #[cfg(any(test, feature = "test-support"))]
+ handle_id,
}
}
@@ -2866,8 +2889,11 @@ impl<T: Entity> ModelHandle<T> {
})
}
+ #[cfg(any(test, feature = "test-support"))]
pub fn next_notification(&self, cx: &TestAppContext) -> impl Future<Output = ()> {
- let (mut tx, mut rx) = mpsc::channel(1);
+ use postage::prelude::{Sink as _, Stream as _};
+
+ let (mut tx, mut rx) = postage::mpsc::channel(1);
let mut cx = cx.cx.borrow_mut();
let subscription = cx.observe(self, move |_, _| {
tx.try_send(()).ok();
@@ -2880,7 +2906,7 @@ impl<T: Entity> ModelHandle<T> {
};
async move {
- let notification = timeout(duration, rx.recv())
+ let notification = crate::util::timeout(duration, rx.recv())
.await
.expect("next notification timed out");
drop(subscription);
@@ -2888,11 +2914,14 @@ impl<T: Entity> ModelHandle<T> {
}
}
+ #[cfg(any(test, feature = "test-support"))]
pub fn next_event(&self, cx: &TestAppContext) -> impl Future<Output = T::Event>
where
T::Event: Clone,
{
- let (mut tx, mut rx) = mpsc::channel(1);
+ use postage::prelude::{Sink as _, Stream as _};
+
+ let (mut tx, mut rx) = postage::mpsc::channel(1);
let mut cx = cx.cx.borrow_mut();
let subscription = cx.subscribe(self, move |_, event, _| {
tx.blocking_send(event.clone()).ok();
@@ -2905,7 +2934,7 @@ impl<T: Entity> ModelHandle<T> {
};
async move {
- let event = timeout(duration, rx.recv())
+ let event = crate::util::timeout(duration, rx.recv())
.await
.expect("next event timed out");
drop(subscription);
@@ -2913,12 +2942,15 @@ impl<T: Entity> ModelHandle<T> {
}
}
+ #[cfg(any(test, feature = "test-support"))]
pub fn condition(
&self,
cx: &TestAppContext,
mut predicate: impl FnMut(&T, &AppContext) -> bool,
) -> impl Future<Output = ()> {
- let (tx, mut rx) = mpsc::channel(1024);
+ use postage::prelude::{Sink as _, Stream as _};
+
+ let (tx, mut rx) = postage::mpsc::channel(1024);
let mut cx = cx.cx.borrow_mut();
let subscriptions = (
@@ -2945,7 +2977,7 @@ impl<T: Entity> ModelHandle<T> {
};
async move {
- timeout(duration, async move {
+ crate::util::timeout(duration, async move {
loop {
{
let cx = cx.borrow();
@@ -2975,44 +3007,39 @@ impl<T: Entity> ModelHandle<T> {
}
}
-impl<T> Clone for ModelHandle<T> {
+impl<T: Entity> Clone for ModelHandle<T> {
fn clone(&self) -> Self {
- self.ref_counts.lock().inc_model(self.model_id);
- Self {
- model_id: self.model_id,
- model_type: PhantomData,
- ref_counts: self.ref_counts.clone(),
- }
+ Self::new(self.model_id, &self.ref_counts)
}
}
-impl<T> PartialEq for ModelHandle<T> {
+impl<T: Entity> PartialEq for ModelHandle<T> {
fn eq(&self, other: &Self) -> bool {
self.model_id == other.model_id
}
}
-impl<T> Eq for ModelHandle<T> {}
+impl<T: Entity> Eq for ModelHandle<T> {}
-impl<T> PartialEq<WeakModelHandle<T>> for ModelHandle<T> {
+impl<T: Entity> PartialEq<WeakModelHandle<T>> for ModelHandle<T> {
fn eq(&self, other: &WeakModelHandle<T>) -> bool {
self.model_id == other.model_id
}
}
-impl<T> Hash for ModelHandle<T> {
+impl<T: Entity> Hash for ModelHandle<T> {
fn hash<H: Hasher>(&self, state: &mut H) {
self.model_id.hash(state);
}
}
-impl<T> std::borrow::Borrow<usize> for ModelHandle<T> {
+impl<T: Entity> std::borrow::Borrow<usize> for ModelHandle<T> {
fn borrow(&self) -> &usize {
&self.model_id
}
}
-impl<T> Debug for ModelHandle<T> {
+impl<T: Entity> Debug for ModelHandle<T> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_tuple(&format!("ModelHandle<{}>", type_name::<T>()))
.field(&self.model_id)
@@ -3020,12 +3047,19 @@ impl<T> Debug for ModelHandle<T> {
}
}
-unsafe impl<T> Send for ModelHandle<T> {}
-unsafe impl<T> Sync for ModelHandle<T> {}
+unsafe impl<T: Entity> Send for ModelHandle<T> {}
+unsafe impl<T: Entity> Sync for ModelHandle<T> {}
-impl<T> Drop for ModelHandle<T> {
+impl<T: Entity> Drop for ModelHandle<T> {
fn drop(&mut self) {
- self.ref_counts.lock().dec_model(self.model_id);
+ let mut ref_counts = self.ref_counts.lock();
+ ref_counts.dec_model(self.model_id);
+
+ #[cfg(any(test, feature = "test-support"))]
+ ref_counts
+ .leak_detector
+ .lock()
+ .handle_dropped(self.model_id, self.handle_id);
}
}
@@ -3111,16 +3145,28 @@ pub struct ViewHandle<T> {
view_id: usize,
view_type: PhantomData<T>,
ref_counts: Arc<Mutex<RefCounts>>,
+ #[cfg(any(test, feature = "test-support"))]
+ handle_id: usize,
}
impl<T: View> ViewHandle<T> {
fn new(window_id: usize, view_id: usize, ref_counts: &Arc<Mutex<RefCounts>>) -> Self {
ref_counts.lock().inc_view(window_id, view_id);
+ #[cfg(any(test, feature = "test-support"))]
+ let handle_id = ref_counts
+ .lock()
+ .leak_detector
+ .lock()
+ .handle_created(Some(type_name::<T>()), view_id);
+
Self {
window_id,
view_id,
view_type: PhantomData,
ref_counts: ref_counts.clone(),
+
+ #[cfg(any(test, feature = "test-support"))]
+ handle_id,
}
}
@@ -3180,8 +3226,11 @@ impl<T: View> ViewHandle<T> {
.map_or(false, |focused_id| focused_id == self.view_id)
}
+ #[cfg(any(test, feature = "test-support"))]
pub fn next_notification(&self, cx: &TestAppContext) -> impl Future<Output = ()> {
- let (mut tx, mut rx) = mpsc::channel(1);
+ use postage::prelude::{Sink as _, Stream as _};
+
+ let (mut tx, mut rx) = postage::mpsc::channel(1);
let mut cx = cx.cx.borrow_mut();
let subscription = cx.observe(self, move |_, _| {
tx.try_send(()).ok();
@@ -3194,7 +3243,7 @@ impl<T: View> ViewHandle<T> {
};
async move {
- let notification = timeout(duration, rx.recv())
+ let notification = crate::util::timeout(duration, rx.recv())
.await
.expect("next notification timed out");
drop(subscription);
@@ -3202,12 +3251,15 @@ impl<T: View> ViewHandle<T> {
}
}
+ #[cfg(any(test, feature = "test-support"))]
pub fn condition(
&self,
cx: &TestAppContext,
mut predicate: impl FnMut(&T, &AppContext) -> bool,
) -> impl Future<Output = ()> {
- let (tx, mut rx) = mpsc::channel(1024);
+ use postage::prelude::{Sink as _, Stream as _};
+
+ let (tx, mut rx) = postage::mpsc::channel(1024);
let mut cx = cx.cx.borrow_mut();
let subscriptions = self.update(&mut *cx, |_, cx| {
@@ -3236,7 +3288,7 @@ impl<T: View> ViewHandle<T> {
};
async move {
- timeout(duration, async move {
+ crate::util::timeout(duration, async move {
loop {
{
let cx = cx.borrow();
@@ -3266,17 +3318,9 @@ impl<T: View> ViewHandle<T> {
}
}
-impl<T> Clone for ViewHandle<T> {
+impl<T: View> Clone for ViewHandle<T> {
fn clone(&self) -> Self {
- self.ref_counts
- .lock()
- .inc_view(self.window_id, self.view_id);
- Self {
- window_id: self.window_id,
- view_id: self.view_id,
- view_type: PhantomData,
- ref_counts: self.ref_counts.clone(),
- }
+ ViewHandle::new(self.window_id, self.view_id, &self.ref_counts)
}
}
@@ -3302,6 +3346,12 @@ impl<T> Drop for ViewHandle<T> {
self.ref_counts
.lock()
.dec_view(self.window_id, self.view_id);
+ #[cfg(any(test, feature = "test-support"))]
+ self.ref_counts
+ .lock()
+ .leak_detector
+ .lock()
+ .handle_dropped(self.view_id, self.handle_id);
}
}
@@ -3333,9 +3383,37 @@ pub struct AnyViewHandle {
view_id: usize,
view_type: TypeId,
ref_counts: Arc<Mutex<RefCounts>>,
+
+ #[cfg(any(test, feature = "test-support"))]
+ handle_id: usize,
}
impl AnyViewHandle {
+ fn new(
+ window_id: usize,
+ view_id: usize,
+ view_type: TypeId,
+ ref_counts: Arc<Mutex<RefCounts>>,
+ ) -> Self {
+ ref_counts.lock().inc_view(window_id, view_id);
+
+ #[cfg(any(test, feature = "test-support"))]
+ let handle_id = ref_counts
+ .lock()
+ .leak_detector
+ .lock()
+ .handle_created(None, view_id);
+
+ Self {
+ window_id,
+ view_id,
+ view_type,
+ ref_counts,
+ #[cfg(any(test, feature = "test-support"))]
+ handle_id,
+ }
+ }
+
pub fn id(&self) -> usize {
self.view_id
}
@@ -3356,6 +3434,8 @@ impl AnyViewHandle {
view_id: self.view_id,
ref_counts: self.ref_counts.clone(),
view_type: PhantomData,
+ #[cfg(any(test, feature = "test-support"))]
+ handle_id: self.handle_id,
});
unsafe {
Arc::decrement_strong_count(&self.ref_counts);
@@ -3370,15 +3450,12 @@ impl AnyViewHandle {
impl Clone for AnyViewHandle {
fn clone(&self) -> Self {
- self.ref_counts
- .lock()
- .inc_view(self.window_id, self.view_id);
- Self {
- window_id: self.window_id,
- view_id: self.view_id,
- view_type: self.view_type,
- ref_counts: self.ref_counts.clone(),
- }
+ Self::new(
+ self.window_id,
+ self.view_id,
+ self.view_type,
+ self.ref_counts.clone(),
+ )
}
}
@@ -3390,16 +3467,12 @@ impl From<&AnyViewHandle> for AnyViewHandle {
impl<T: View> From<&ViewHandle<T>> for AnyViewHandle {
fn from(handle: &ViewHandle<T>) -> Self {
- handle
- .ref_counts
- .lock()
- .inc_view(handle.window_id, handle.view_id);
- AnyViewHandle {
- window_id: handle.window_id,
- view_id: handle.view_id,
- view_type: TypeId::of::<T>(),
- ref_counts: handle.ref_counts.clone(),
- }
+ Self::new(
+ handle.window_id,
+ handle.view_id,
+ TypeId::of::<T>(),
+ handle.ref_counts.clone(),
+ )
}
}
@@ -3410,6 +3483,8 @@ impl<T: View> From<ViewHandle<T>> for AnyViewHandle {
view_id: handle.view_id,
view_type: TypeId::of::<T>(),
ref_counts: handle.ref_counts.clone(),
+ #[cfg(any(test, feature = "test-support"))]
+ handle_id: handle.handle_id,
};
unsafe {
Arc::decrement_strong_count(&handle.ref_counts);
@@ -3424,6 +3499,12 @@ impl Drop for AnyViewHandle {
self.ref_counts
.lock()
.dec_view(self.window_id, self.view_id);
+ #[cfg(any(test, feature = "test-support"))]
+ self.ref_counts
+ .lock()
+ .leak_detector
+ .lock()
+ .handle_dropped(self.view_id, self.handle_id);
}
}
@@ -3431,15 +3512,41 @@ pub struct AnyModelHandle {
model_id: usize,
model_type: TypeId,
ref_counts: Arc<Mutex<RefCounts>>,
+
+ #[cfg(any(test, feature = "test-support"))]
+ handle_id: usize,
}
impl AnyModelHandle {
+ fn new(model_id: usize, model_type: TypeId, ref_counts: Arc<Mutex<RefCounts>>) -> Self {
+ ref_counts.lock().inc_model(model_id);
+
+ #[cfg(any(test, feature = "test-support"))]
+ let handle_id = ref_counts
+ .lock()
+ .leak_detector
+ .lock()
+ .handle_created(None, model_id);
+
+ Self {
+ model_id,
+ model_type,
+ ref_counts,
+
+ #[cfg(any(test, feature = "test-support"))]
+ handle_id,
+ }
+ }
+
pub fn downcast<T: Entity>(self) -> Option<ModelHandle<T>> {
if self.is::<T>() {
let result = Some(ModelHandle {
model_id: self.model_id,
model_type: PhantomData,
ref_counts: self.ref_counts.clone(),
+
+ #[cfg(any(test, feature = "test-support"))]
+ handle_id: self.handle_id,
});
unsafe {
Arc::decrement_strong_count(&self.ref_counts);
@@ -3465,29 +3572,30 @@ impl AnyModelHandle {
impl<T: Entity> From<ModelHandle<T>> for AnyModelHandle {
fn from(handle: ModelHandle<T>) -> Self {
- handle.ref_counts.lock().inc_model(handle.model_id);
- Self {
- model_id: handle.model_id,
- model_type: TypeId::of::<T>(),
- ref_counts: handle.ref_counts.clone(),
- }
+ Self::new(
+ handle.model_id,
+ TypeId::of::<T>(),
+ handle.ref_counts.clone(),
+ )
}
}
impl Clone for AnyModelHandle {
fn clone(&self) -> Self {
- self.ref_counts.lock().inc_model(self.model_id);
- Self {
- model_id: self.model_id,
- model_type: self.model_type,
- ref_counts: self.ref_counts.clone(),
- }
+ Self::new(self.model_id, self.model_type, self.ref_counts.clone())
}
}
impl Drop for AnyModelHandle {
fn drop(&mut self) {
- self.ref_counts.lock().dec_model(self.model_id);
+ let mut ref_counts = self.ref_counts.lock();
+ ref_counts.dec_model(self.model_id);
+
+ #[cfg(any(test, feature = "test-support"))]
+ ref_counts
+ .leak_detector
+ .lock()
+ .handle_dropped(self.model_id, self.handle_id);
}
}
@@ -3502,6 +3610,15 @@ impl AnyWeakModelHandle {
}
}
+impl<T: Entity> From<WeakModelHandle<T>> for AnyWeakModelHandle {
+ fn from(handle: WeakModelHandle<T>) -> Self {
+ AnyWeakModelHandle {
+ model_id: handle.model_id,
+ model_type: TypeId::of::<T>(),
+ }
+ }
+}
+
pub struct WeakViewHandle<T> {
window_id: usize,
view_id: usize,
@@ -3694,6 +3811,77 @@ impl Drop for Subscription {
}
}
+lazy_static! {
+ static ref LEAK_BACKTRACE: bool =
+ std::env::var("LEAK_BACKTRACE").map_or(false, |b| !b.is_empty());
+}
+
+#[cfg(any(test, feature = "test-support"))]
+#[derive(Default)]
+pub struct LeakDetector {
+ next_handle_id: usize,
+ handle_backtraces: HashMap<
+ usize,
+ (
+ Option<&'static str>,
+ HashMap<usize, Option<backtrace::Backtrace>>,
+ ),
+ >,
+}
+
+#[cfg(any(test, feature = "test-support"))]
+impl LeakDetector {
+ fn handle_created(&mut self, type_name: Option<&'static str>, entity_id: usize) -> usize {
+ let handle_id = post_inc(&mut self.next_handle_id);
+ let entry = self.handle_backtraces.entry(entity_id).or_default();
+ let backtrace = if *LEAK_BACKTRACE {
+ Some(backtrace::Backtrace::new_unresolved())
+ } else {
+ None
+ };
+ if let Some(type_name) = type_name {
+ entry.0.get_or_insert(type_name);
+ }
+ entry.1.insert(handle_id, backtrace);
+ handle_id
+ }
+
+ fn handle_dropped(&mut self, entity_id: usize, handle_id: usize) {
+ if let Some((_, backtraces)) = self.handle_backtraces.get_mut(&entity_id) {
+ assert!(backtraces.remove(&handle_id).is_some());
+ if backtraces.is_empty() {
+ self.handle_backtraces.remove(&entity_id);
+ }
+ }
+ }
+
+ pub fn detect(&mut self) {
+ let mut found_leaks = false;
+ for (id, (type_name, backtraces)) in self.handle_backtraces.iter_mut() {
+ eprintln!(
+ "leaked {} handles to {:?} {}",
+ backtraces.len(),
+ type_name.unwrap_or("entity"),
+ id
+ );
+ for trace in backtraces.values_mut() {
+ if let Some(trace) = trace {
+ trace.resolve();
+ eprintln!("{:?}", crate::util::CwdBacktrace(trace));
+ }
+ }
+ found_leaks = true;
+ }
+
+ let hint = if *LEAK_BACKTRACE {
+ ""
+ } else {
+ " – set LEAK_BACKTRACE=1 for more information"
+ };
+ assert!(!found_leaks, "detected leaked handles{}", hint);
+ }
+}
+
#[derive(Default)]
struct RefCounts {
entity_counts: HashMap<usize, usize>,
@@ -3701,6 +3889,9 @@ struct RefCounts {
dropped_models: HashSet<usize>,
dropped_views: HashSet<(usize, usize)>,
dropped_element_states: HashSet<ElementStateId>,
+
+ #[cfg(any(test, feature = "test-support"))]
+ leak_detector: Arc<Mutex<LeakDetector>>,
}
struct ElementStateRefCount {
@@ -3881,13 +4072,11 @@ mod tests {
let handle_1 = cx.add_model(|_| Model::default());
let handle_2 = cx.add_model(|_| Model::default());
- let handle_2b = handle_2.clone();
-
handle_1.update(cx, |_, c| {
- c.subscribe(&handle_2, move |model: &mut Model, _, event, c| {
+ c.subscribe(&handle_2, move |model: &mut Model, emitter, event, c| {
model.events.push(*event);
- c.subscribe(&handle_2b, |model, _, event, _| {
+ c.subscribe(&emitter, |model, _, event, _| {
model.events.push(*event * 2);
})
.detach();
@@ -3916,12 +4105,11 @@ mod tests {
let handle_1 = cx.add_model(|_| Model::default());
let handle_2 = cx.add_model(|_| Model::default());
- let handle_2b = handle_2.clone();
handle_1.update(cx, |_, c| {
c.observe(&handle_2, move |model, observed, c| {
model.events.push(observed.read(c).count);
- c.observe(&handle_2b, |model, observed, c| {
+ c.observe(&observed, |model, observed, c| {
model.events.push(observed.read(c).count * 2);
})
.detach();
@@ -4155,14 +4343,13 @@ mod tests {
let (window_id, handle_1) = cx.add_window(Default::default(), |_| View::default());
let handle_2 = cx.add_view(window_id, |_| View::default());
- let handle_2b = handle_2.clone();
let handle_3 = cx.add_model(|_| Model);
handle_1.update(cx, |_, c| {
- c.subscribe(&handle_2, move |me, _, event, c| {
+ c.subscribe(&handle_2, move |me, emitter, event, c| {
me.events.push(*event);
- c.subscribe(&handle_2b, |me, _, event, _| {
+ c.subscribe(&emitter, |me, _, event, _| {
me.events.push(*event * 2);
})
.detach();
@@ -4605,7 +4792,7 @@ mod tests {
}
#[crate::test(self)]
- async fn test_model_condition(mut cx: TestAppContext) {
+ async fn test_model_condition(cx: &mut TestAppContext) {
struct Counter(usize);
impl super::Entity for Counter {
@@ -4625,23 +4812,23 @@ mod tests {
let condition2 = model.condition(&cx, |model, _| model.0 == 3);
smol::pin!(condition1, condition2);
- model.update(&mut cx, |model, cx| model.inc(cx));
+ model.update(cx, |model, cx| model.inc(cx));
assert_eq!(poll_once(&mut condition1).await, None);
assert_eq!(poll_once(&mut condition2).await, None);
- model.update(&mut cx, |model, cx| model.inc(cx));
+ model.update(cx, |model, cx| model.inc(cx));
assert_eq!(poll_once(&mut condition1).await, Some(()));
assert_eq!(poll_once(&mut condition2).await, None);
- model.update(&mut cx, |model, cx| model.inc(cx));
+ model.update(cx, |model, cx| model.inc(cx));
assert_eq!(poll_once(&mut condition2).await, Some(()));
- model.update(&mut cx, |_, cx| cx.notify());
+ model.update(cx, |_, cx| cx.notify());
}
#[crate::test(self)]
#[should_panic]
- async fn test_model_condition_timeout(mut cx: TestAppContext) {
+ async fn test_model_condition_timeout(cx: &mut TestAppContext) {
struct Model;
impl super::Entity for Model {
@@ -4654,7 +4841,7 @@ mod tests {
#[crate::test(self)]
#[should_panic(expected = "model dropped with pending condition")]
- async fn test_model_condition_panic_on_drop(mut cx: TestAppContext) {
+ async fn test_model_condition_panic_on_drop(cx: &mut TestAppContext) {
struct Model;
impl super::Entity for Model {
@@ -4668,7 +4855,7 @@ mod tests {
}
#[crate::test(self)]
- async fn test_view_condition(mut cx: TestAppContext) {
+ async fn test_view_condition(cx: &mut TestAppContext) {
struct Counter(usize);
impl super::Entity for Counter {
@@ -4698,22 +4885,22 @@ mod tests {
let condition2 = view.condition(&cx, |view, _| view.0 == 3);
smol::pin!(condition1, condition2);
- view.update(&mut cx, |view, cx| view.inc(cx));
+ view.update(cx, |view, cx| view.inc(cx));
assert_eq!(poll_once(&mut condition1).await, None);
assert_eq!(poll_once(&mut condition2).await, None);
- view.update(&mut cx, |view, cx| view.inc(cx));
+ view.update(cx, |view, cx| view.inc(cx));
assert_eq!(poll_once(&mut condition1).await, Some(()));
assert_eq!(poll_once(&mut condition2).await, None);
- view.update(&mut cx, |view, cx| view.inc(cx));
+ view.update(cx, |view, cx| view.inc(cx));
assert_eq!(poll_once(&mut condition2).await, Some(()));
- view.update(&mut cx, |_, cx| cx.notify());
+ view.update(cx, |_, cx| cx.notify());
}
#[crate::test(self)]
#[should_panic]
- async fn test_view_condition_timeout(mut cx: TestAppContext) {
+ async fn test_view_condition_timeout(cx: &mut TestAppContext) {
struct View;
impl super::Entity for View {
@@ -4736,7 +4923,7 @@ mod tests {
#[crate::test(self)]
#[should_panic(expected = "view dropped with pending condition")]
- async fn test_view_condition_panic_on_drop(mut cx: TestAppContext) {
+ async fn test_view_condition_panic_on_drop(cx: &mut TestAppContext) {
struct View;
impl super::Entity for View {
@@ -1,28 +1,18 @@
use anyhow::{anyhow, Result};
use async_task::Runnable;
-use backtrace::{Backtrace, BacktraceFmt, BytesOrWideString};
-use collections::HashMap;
-use parking_lot::Mutex;
-use postage::{barrier, prelude::Stream as _};
-use rand::prelude::*;
-use smol::{channel, future::yield_now, prelude::*, Executor, Timer};
+use smol::{channel, prelude::*, Executor, Timer};
use std::{
any::Any,
- fmt::{self, Debug, Display},
+ fmt::{self, Display},
marker::PhantomData,
mem,
- ops::RangeInclusive,
pin::Pin,
rc::Rc,
- sync::{
- atomic::{AtomicBool, Ordering::SeqCst},
- Arc,
- },
+ sync::Arc,
task::{Context, Poll},
thread,
- time::{Duration, Instant},
+ time::Duration,
};
-use waker_fn::waker_fn;
use crate::{
platform::{self, Dispatcher},
@@ -34,6 +24,7 @@ pub enum Foreground {
dispatcher: Arc<dyn platform::Dispatcher>,
_not_send_or_sync: PhantomData<Rc<()>>,
},
+ #[cfg(any(test, feature = "test-support"))]
Deterministic {
cx_id: usize,
executor: Arc<Deterministic>,
@@ -41,9 +32,8 @@ pub enum Foreground {
}
pub enum Background {
- Deterministic {
- executor: Arc<Deterministic>,
- },
+ #[cfg(any(test, feature = "test-support"))]
+ Deterministic { executor: Arc<Deterministic> },
Production {
executor: Arc<smol::Executor<'static>>,
_stop: channel::Sender<()>,
@@ -70,39 +60,47 @@ pub enum Task<T> {
unsafe impl<T: Send> Send for Task<T> {}
+#[cfg(any(test, feature = "test-support"))]
struct DeterministicState {
- rng: StdRng,
+ rng: rand::prelude::StdRng,
seed: u64,
- scheduled_from_foreground: HashMap<usize, Vec<ForegroundRunnable>>,
+ scheduled_from_foreground: collections::HashMap<usize, Vec<ForegroundRunnable>>,
scheduled_from_background: Vec<Runnable>,
forbid_parking: bool,
- block_on_ticks: RangeInclusive<usize>,
- now: Instant,
- pending_timers: Vec<(Instant, barrier::Sender)>,
- waiting_backtrace: Option<Backtrace>,
+ block_on_ticks: std::ops::RangeInclusive<usize>,
+ now: std::time::Instant,
+ next_timer_id: usize,
+ pending_timers: Vec<(usize, std::time::Instant, postage::barrier::Sender)>,
+ waiting_backtrace: Option<backtrace::Backtrace>,
}
+#[cfg(any(test, feature = "test-support"))]
struct ForegroundRunnable {
runnable: Runnable,
main: bool,
}
+#[cfg(any(test, feature = "test-support"))]
pub struct Deterministic {
- state: Arc<Mutex<DeterministicState>>,
- parker: Mutex<parking::Parker>,
+ state: Arc<parking_lot::Mutex<DeterministicState>>,
+ parker: parking_lot::Mutex<parking::Parker>,
}
+#[cfg(any(test, feature = "test-support"))]
impl Deterministic {
pub fn new(seed: u64) -> Arc<Self> {
+ use rand::prelude::*;
+
Arc::new(Self {
- state: Arc::new(Mutex::new(DeterministicState {
+ state: Arc::new(parking_lot::Mutex::new(DeterministicState {
rng: StdRng::seed_from_u64(seed),
seed,
scheduled_from_foreground: Default::default(),
scheduled_from_background: Default::default(),
forbid_parking: false,
block_on_ticks: 0..=1000,
- now: Instant::now(),
+ now: std::time::Instant::now(),
+ next_timer_id: Default::default(),
pending_timers: Default::default(),
waiting_backtrace: None,
})),
@@ -156,9 +154,32 @@ impl Deterministic {
task
}
- fn run(&self, cx_id: usize, main_future: AnyLocalFuture) -> Box<dyn Any> {
+ fn run<'a>(
+ &self,
+ cx_id: usize,
+ main_future: Pin<Box<dyn 'a + Future<Output = Box<dyn Any>>>>,
+ ) -> Box<dyn Any> {
+ use std::sync::atomic::{AtomicBool, Ordering::SeqCst};
+
let woken = Arc::new(AtomicBool::new(false));
- let mut main_task = self.spawn_from_foreground(cx_id, main_future, true);
+
+ let state = self.state.clone();
+ let unparker = self.parker.lock().unparker();
+ let (runnable, mut main_task) = unsafe {
+ async_task::spawn_unchecked(main_future, move |runnable| {
+ let mut state = state.lock();
+ state
+ .scheduled_from_foreground
+ .entry(cx_id)
+ .or_default()
+ .push(ForegroundRunnable {
+ runnable,
+ main: true,
+ });
+ unparker.unpark();
+ })
+ };
+ runnable.schedule();
loop {
if let Some(result) = self.run_internal(woken.clone(), Some(&mut main_task)) {
@@ -174,18 +195,22 @@ impl Deterministic {
}
}
- fn run_until_parked(&self) {
+ pub fn run_until_parked(&self) {
+ use std::sync::atomic::AtomicBool;
let woken = Arc::new(AtomicBool::new(false));
self.run_internal(woken, None);
}
fn run_internal(
&self,
- woken: Arc<AtomicBool>,
+ woken: Arc<std::sync::atomic::AtomicBool>,
mut main_task: Option<&mut AnyLocalTask>,
) -> Option<Box<dyn Any>> {
+ use rand::prelude::*;
+ use std::sync::atomic::Ordering::SeqCst;
+
let unparker = self.parker.lock().unparker();
- let waker = waker_fn(move || {
+ let waker = waker_fn::waker_fn(move || {
woken.store(true, SeqCst);
unparker.unpark();
});
@@ -197,6 +222,12 @@ impl Deterministic {
if state.scheduled_from_foreground.is_empty()
&& state.scheduled_from_background.is_empty()
{
+ if let Some(main_task) = main_task {
+ if let Poll::Ready(result) = main_task.poll(&mut cx) {
+ return Some(result);
+ }
+ }
+
return None;
}
@@ -240,8 +271,10 @@ impl Deterministic {
where
F: Unpin + Future<Output = T>,
{
+ use rand::prelude::*;
+
let unparker = self.parker.lock().unparker();
- let waker = waker_fn(move || {
+ let waker = waker_fn::waker_fn(move || {
unparker.unpark();
});
@@ -272,17 +305,30 @@ impl Deterministic {
None
}
+
+ pub fn advance_clock(&self, duration: Duration) {
+ let mut state = self.state.lock();
+ state.now += duration;
+ let now = state.now;
+ let mut pending_timers = mem::take(&mut state.pending_timers);
+ drop(state);
+
+ pending_timers.retain(|(_, wakeup, _)| *wakeup > now);
+ self.state.lock().pending_timers.extend(pending_timers);
+ }
}
+#[cfg(any(test, feature = "test-support"))]
impl DeterministicState {
fn will_park(&mut self) {
if self.forbid_parking {
let mut backtrace_message = String::new();
+ #[cfg(any(test, feature = "test-support"))]
if let Some(backtrace) = self.waiting_backtrace.as_mut() {
backtrace.resolve();
backtrace_message = format!(
"\nbacktrace of waiting future:\n{:?}",
- CwdBacktrace::new(backtrace)
+ util::CwdBacktrace(backtrace)
);
}
@@ -294,37 +340,6 @@ impl DeterministicState {
}
}
-struct CwdBacktrace<'a> {
- backtrace: &'a Backtrace,
-}
-
-impl<'a> CwdBacktrace<'a> {
- fn new(backtrace: &'a Backtrace) -> Self {
- Self { backtrace }
- }
-}
-
-impl<'a> Debug for CwdBacktrace<'a> {
- fn fmt(&self, f: &mut fmt::Formatter<'_>) -> std::fmt::Result {
- let cwd = std::env::current_dir().unwrap();
- let mut print_path = |fmt: &mut fmt::Formatter<'_>, path: BytesOrWideString<'_>| {
- fmt::Display::fmt(&path, fmt)
- };
- let mut fmt = BacktraceFmt::new(f, backtrace::PrintFmt::Full, &mut print_path);
- for frame in self.backtrace.frames() {
- let mut formatted_frame = fmt.frame();
- if frame
- .symbols()
- .iter()
- .any(|s| s.filename().map_or(false, |f| f.starts_with(&cwd)))
- {
- formatted_frame.backtrace_frame(frame)?;
- }
- }
- fmt.finish()
- }
-}
-
impl Foreground {
pub fn platform(dispatcher: Arc<dyn platform::Dispatcher>) -> Result<Self> {
if dispatcher.is_main_thread() {
@@ -340,6 +355,7 @@ impl Foreground {
pub fn spawn<T: 'static>(&self, future: impl Future<Output = T> + 'static) -> Task<T> {
let future = any_local_future(future);
let any_task = match self {
+ #[cfg(any(test, feature = "test-support"))]
Self::Deterministic { cx_id, executor } => {
executor.spawn_from_foreground(*cx_id, future, false)
}
@@ -361,15 +377,17 @@ impl Foreground {
Task::local(any_task)
}
- pub fn run<T: 'static>(&self, future: impl 'static + Future<Output = T>) -> T {
- let future = any_local_future(future);
- let any_value = match self {
+ #[cfg(any(test, feature = "test-support"))]
+ pub fn run<T: 'static>(&self, future: impl Future<Output = T>) -> T {
+ let future = async move { Box::new(future.await) as Box<dyn Any> }.boxed_local();
+ let result = match self {
Self::Deterministic { cx_id, executor } => executor.run(*cx_id, future),
Self::Platform { .. } => panic!("you can't call run on a platform foreground executor"),
};
- *any_value.downcast().unwrap()
+ *result.downcast().unwrap()
}
+ #[cfg(any(test, feature = "test-support"))]
pub fn run_until_parked(&self) {
match self {
Self::Deterministic { executor, .. } => executor.run_until_parked(),
@@ -377,6 +395,7 @@ impl Foreground {
}
}
+ #[cfg(any(test, feature = "test-support"))]
pub fn parking_forbidden(&self) -> bool {
match self {
Self::Deterministic { executor, .. } => executor.state.lock().forbid_parking,
@@ -384,15 +403,18 @@ impl Foreground {
}
}
+ #[cfg(any(test, feature = "test-support"))]
pub fn start_waiting(&self) {
match self {
Self::Deterministic { executor, .. } => {
- executor.state.lock().waiting_backtrace = Some(Backtrace::new_unresolved());
+ executor.state.lock().waiting_backtrace =
+ Some(backtrace::Backtrace::new_unresolved());
}
_ => panic!("this method can only be called on a deterministic executor"),
}
}
+ #[cfg(any(test, feature = "test-support"))]
pub fn finish_waiting(&self) {
match self {
Self::Deterministic { executor, .. } => {
@@ -402,7 +424,10 @@ impl Foreground {
}
}
+ #[cfg(any(test, feature = "test-support"))]
pub fn forbid_parking(&self) {
+ use rand::prelude::*;
+
match self {
Self::Deterministic { executor, .. } => {
let mut state = executor.state.lock();
@@ -415,13 +440,36 @@ impl Foreground {
pub async fn timer(&self, duration: Duration) {
match self {
+ #[cfg(any(test, feature = "test-support"))]
Self::Deterministic { executor, .. } => {
- let (tx, mut rx) = barrier::channel();
+ use postage::prelude::Stream as _;
+
+ let (tx, mut rx) = postage::barrier::channel();
+ let timer_id;
{
let mut state = executor.state.lock();
let wakeup_at = state.now + duration;
- state.pending_timers.push((wakeup_at, tx));
+ timer_id = util::post_inc(&mut state.next_timer_id);
+ state.pending_timers.push((timer_id, wakeup_at, tx));
+ }
+
+ struct DropTimer<'a>(usize, &'a Foreground);
+ impl<'a> Drop for DropTimer<'a> {
+ fn drop(&mut self) {
+ match self.1 {
+ Foreground::Deterministic { executor, .. } => {
+ executor
+ .state
+ .lock()
+ .pending_timers
+ .retain(|(timer_id, _, _)| *timer_id != self.0);
+ }
+ _ => unreachable!(),
+ }
+ }
}
+
+ let _guard = DropTimer(timer_id, self);
rx.recv().await;
}
_ => {
@@ -430,25 +478,19 @@ impl Foreground {
}
}
+ #[cfg(any(test, feature = "test-support"))]
pub fn advance_clock(&self, duration: Duration) {
match self {
Self::Deterministic { executor, .. } => {
executor.run_until_parked();
-
- let mut state = executor.state.lock();
- state.now += duration;
- let now = state.now;
- let mut pending_timers = mem::take(&mut state.pending_timers);
- drop(state);
-
- pending_timers.retain(|(wakeup, _)| *wakeup > now);
- executor.state.lock().pending_timers.extend(pending_timers);
+ executor.advance_clock(duration);
}
_ => panic!("this method can only be called on a deterministic executor"),
}
}
- pub fn set_block_on_ticks(&self, range: RangeInclusive<usize>) {
+ #[cfg(any(test, feature = "test-support"))]
+ pub fn set_block_on_ticks(&self, range: std::ops::RangeInclusive<usize>) {
match self {
Self::Deterministic { executor, .. } => executor.state.lock().block_on_ticks = range,
_ => panic!("this method can only be called on a deterministic executor"),
@@ -488,6 +530,7 @@ impl Background {
let future = any_future(future);
let any_task = match self {
Self::Production { executor, .. } => executor.spawn(future),
+ #[cfg(any(test, feature = "test-support"))]
Self::Deterministic { executor } => executor.spawn(future),
};
Task::send(any_task)
@@ -500,6 +543,7 @@ impl Background {
smol::pin!(future);
match self {
Self::Production { .. } => smol::block_on(&mut future),
+ #[cfg(any(test, feature = "test-support"))]
Self::Deterministic { executor, .. } => {
executor.block(&mut future, usize::MAX).unwrap()
}
@@ -519,7 +563,9 @@ impl Background {
if !timeout.is_zero() {
let output = match self {
Self::Production { .. } => smol::block_on(util::timeout(timeout, &mut future)).ok(),
+ #[cfg(any(test, feature = "test-support"))]
Self::Deterministic { executor, .. } => {
+ use rand::prelude::*;
let max_ticks = {
let mut state = executor.state.lock();
let range = state.block_on_ticks.clone();
@@ -554,7 +600,11 @@ impl Background {
}
}
+ #[cfg(any(test, feature = "test-support"))]
pub async fn simulate_random_delay(&self) {
+ use rand::prelude::*;
+ use smol::future::yield_now;
+
match self {
Self::Deterministic { executor, .. } => {
if executor.state.lock().rng.gen_bool(0.2) {
@@ -562,6 +612,9 @@ impl Background {
for _ in 0..yields {
yield_now().await;
}
+
+ let delay = Duration::from_millis(executor.state.lock().rng.gen_range(0..100));
+ executor.advance_clock(delay);
}
}
_ => panic!("this method can only be called on a deterministic executor"),
@@ -39,6 +39,7 @@ pub struct Window {
pub(crate) last_prompt: Cell<Option<oneshot::Sender<usize>>>,
}
+#[cfg(any(test, feature = "test-support"))]
impl ForegroundPlatform {
pub(crate) fn simulate_new_path_selection(
&self,
@@ -1,3 +1,10 @@
+use crate::{
+ executor, platform, Entity, FontCache, Handle, LeakDetector, MutableAppContext, Platform,
+ Subscription, TestAppContext,
+};
+use futures::StreamExt;
+use parking_lot::Mutex;
+use smol::channel;
use std::{
panic::{self, RefUnwindSafe},
rc::Rc,
@@ -7,14 +14,6 @@ use std::{
},
};
-use futures::StreamExt;
-use smol::channel;
-
-use crate::{
- executor, platform, Entity, FontCache, Handle, MutableAppContext, Platform, Subscription,
- TestAppContext,
-};
-
#[cfg(test)]
#[ctor::ctor]
fn init_logger() {
@@ -23,6 +22,9 @@ fn init_logger() {
}
}
+// #[global_allocator]
+// static ALLOC: dhat::Alloc = dhat::Alloc;
+
pub fn run_test(
mut num_iterations: u64,
mut starting_seed: u64,
@@ -36,6 +38,8 @@ pub fn run_test(
bool,
)),
) {
+ // let _profiler = dhat::Profiler::new_heap();
+
let is_randomized = num_iterations > 1;
if is_randomized {
if let Ok(value) = std::env::var("SEED") {
@@ -65,24 +69,31 @@ pub fn run_test(
}
let deterministic = executor::Deterministic::new(seed);
+ let leak_detector = Arc::new(Mutex::new(LeakDetector::default()));
let mut cx = TestAppContext::new(
foreground_platform.clone(),
platform.clone(),
deterministic.build_foreground(usize::MAX),
deterministic.build_background(),
font_cache.clone(),
+ leak_detector.clone(),
0,
);
cx.update(|cx| {
test_fn(
cx,
foreground_platform.clone(),
- deterministic,
+ deterministic.clone(),
seed,
is_last_iteration,
- )
+ );
});
+ cx.update(|cx| cx.remove_all_windows());
+ deterministic.run_until_parked();
+ cx.update(|_| {}); // flush effects
+
+ leak_detector.lock().detect();
if is_last_iteration {
break;
}
@@ -18,3 +18,31 @@ where
let future = async move { Ok(f.await) };
timer.race(future).await
}
+
+#[cfg(any(test, feature = "test-support"))]
+pub struct CwdBacktrace<'a>(pub &'a backtrace::Backtrace);
+
+#[cfg(any(test, feature = "test-support"))]
+impl<'a> std::fmt::Debug for CwdBacktrace<'a> {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ use backtrace::{BacktraceFmt, BytesOrWideString};
+
+ let cwd = std::env::current_dir().unwrap();
+ let cwd = cwd.parent().unwrap();
+ let mut print_path = |fmt: &mut std::fmt::Formatter<'_>, path: BytesOrWideString<'_>| {
+ std::fmt::Display::fmt(&path, fmt)
+ };
+ let mut fmt = BacktraceFmt::new(f, backtrace::PrintFmt::Full, &mut print_path);
+ for frame in self.0.frames() {
+ let mut formatted_frame = fmt.frame();
+ if frame
+ .symbols()
+ .iter()
+ .any(|s| s.filename().map_or(false, |f| f.starts_with(&cwd)))
+ {
+ formatted_frame.backtrace_frame(frame)?;
+ }
+ }
+ fmt.finish()
+ }
+}
@@ -65,25 +65,14 @@ pub fn test(args: TokenStream, function: TokenStream) -> TokenStream {
let mut outer_fn: ItemFn = if inner_fn.sig.asyncness.is_some() {
// Pass to the test function the number of app contexts that it needs,
// based on its parameter list.
+ let mut cx_vars = proc_macro2::TokenStream::new();
+ let mut cx_teardowns = proc_macro2::TokenStream::new();
let mut inner_fn_args = proc_macro2::TokenStream::new();
for (ix, arg) in inner_fn.sig.inputs.iter().enumerate() {
if let FnArg::Typed(arg) = arg {
if let Type::Path(ty) = &*arg.ty {
let last_segment = ty.path.segments.last();
match last_segment.map(|s| s.ident.to_string()).as_deref() {
- Some("TestAppContext") => {
- let first_entity_id = ix * 100_000;
- inner_fn_args.extend(quote!(
- #namespace::TestAppContext::new(
- foreground_platform.clone(),
- cx.platform().clone(),
- deterministic.build_foreground(#ix),
- deterministic.build_background(),
- cx.font_cache().clone(),
- #first_entity_id,
- ),
- ));
- }
Some("StdRng") => {
inner_fn_args.extend(quote!(rand::SeedableRng::seed_from_u64(seed),));
}
@@ -97,6 +86,47 @@ pub fn test(args: TokenStream, function: TokenStream) -> TokenStream {
)
}
}
+ } else if let Type::Reference(ty) = &*arg.ty {
+ match &*ty.elem {
+ Type::Path(ty) => {
+ let last_segment = ty.path.segments.last();
+ match last_segment.map(|s| s.ident.to_string()).as_deref() {
+ Some("TestAppContext") => {
+ let first_entity_id = ix * 100_000;
+ let cx_varname = format_ident!("cx_{}", ix);
+ cx_vars.extend(quote!(
+ let mut #cx_varname = #namespace::TestAppContext::new(
+ foreground_platform.clone(),
+ cx.platform().clone(),
+ deterministic.build_foreground(#ix),
+ deterministic.build_background(),
+ cx.font_cache().clone(),
+ cx.leak_detector(),
+ #first_entity_id,
+ );
+ ));
+ cx_teardowns.extend(quote!(
+ #cx_varname.update(|cx| cx.remove_all_windows());
+ deterministic.run_until_parked();
+ #cx_varname.update(|_| {}); // flush effects
+ ));
+ inner_fn_args.extend(quote!(&mut #cx_varname,));
+ }
+ _ => {
+ return TokenStream::from(
+ syn::Error::new_spanned(arg, "invalid argument")
+ .into_compile_error(),
+ )
+ }
+ }
+ }
+ _ => {
+ return TokenStream::from(
+ syn::Error::new_spanned(arg, "invalid argument")
+ .into_compile_error(),
+ )
+ }
+ }
} else {
return TokenStream::from(
syn::Error::new_spanned(arg, "invalid argument").into_compile_error(),
@@ -119,7 +149,9 @@ pub fn test(args: TokenStream, function: TokenStream) -> TokenStream {
#starting_seed as u64,
#max_retries,
&mut |cx, foreground_platform, deterministic, seed, is_last_iteration| {
- cx.foreground().run(#inner_fn_name(#inner_fn_args))
+ #cx_vars
+ cx.foreground().run(#inner_fn_name(#inner_fn_args));
+ #cx_teardowns
}
);
}
@@ -125,23 +125,23 @@ fn test_edit_events(cx: &mut gpui::MutableAppContext) {
}
#[gpui::test]
-async fn test_apply_diff(mut cx: gpui::TestAppContext) {
+async fn test_apply_diff(cx: &mut gpui::TestAppContext) {
let text = "a\nbb\nccc\ndddd\neeeee\nffffff\n";
let buffer = cx.add_model(|cx| Buffer::new(0, text, cx));
let text = "a\nccc\ndddd\nffffff\n";
- let diff = buffer.read_with(&cx, |b, cx| b.diff(text.into(), cx)).await;
- buffer.update(&mut cx, |b, cx| b.apply_diff(diff, cx));
+ let diff = buffer.read_with(cx, |b, cx| b.diff(text.into(), cx)).await;
+ buffer.update(cx, |b, cx| b.apply_diff(diff, cx));
cx.read(|cx| assert_eq!(buffer.read(cx).text(), text));
let text = "a\n1\n\nccc\ndd2dd\nffffff\n";
- let diff = buffer.read_with(&cx, |b, cx| b.diff(text.into(), cx)).await;
- buffer.update(&mut cx, |b, cx| b.apply_diff(diff, cx));
+ let diff = buffer.read_with(cx, |b, cx| b.diff(text.into(), cx)).await;
+ buffer.update(cx, |b, cx| b.apply_diff(diff, cx));
cx.read(|cx| assert_eq!(buffer.read(cx).text(), text));
}
#[gpui::test]
-async fn test_reparse(mut cx: gpui::TestAppContext) {
+async fn test_reparse(cx: &mut gpui::TestAppContext) {
let text = "fn a() {}";
let buffer =
cx.add_model(|cx| Buffer::new(0, text, cx).with_language(Arc::new(rust_lang()), cx));
@@ -159,13 +159,13 @@ async fn test_reparse(mut cx: gpui::TestAppContext) {
)
);
- buffer.update(&mut cx, |buffer, _| {
+ buffer.update(cx, |buffer, _| {
buffer.set_sync_parse_timeout(Duration::ZERO)
});
// Perform some edits (add parameter and variable reference)
// Parsing doesn't begin until the transaction is complete
- buffer.update(&mut cx, |buf, cx| {
+ buffer.update(cx, |buf, cx| {
buf.start_transaction();
let offset = buf.text().find(")").unwrap();
@@ -196,19 +196,19 @@ async fn test_reparse(mut cx: gpui::TestAppContext) {
// * turn identifier into a field expression
// * turn field expression into a method call
// * add a turbofish to the method call
- buffer.update(&mut cx, |buf, cx| {
+ buffer.update(cx, |buf, cx| {
let offset = buf.text().find(";").unwrap();
buf.edit(vec![offset..offset], ".e", cx);
assert_eq!(buf.text(), "fn a(b: C) { d.e; }");
assert!(buf.is_parsing());
});
- buffer.update(&mut cx, |buf, cx| {
+ buffer.update(cx, |buf, cx| {
let offset = buf.text().find(";").unwrap();
buf.edit(vec![offset..offset], "(f)", cx);
assert_eq!(buf.text(), "fn a(b: C) { d.e(f); }");
assert!(buf.is_parsing());
});
- buffer.update(&mut cx, |buf, cx| {
+ buffer.update(cx, |buf, cx| {
let offset = buf.text().find("(f)").unwrap();
buf.edit(vec![offset..offset], "::<G>", cx);
assert_eq!(buf.text(), "fn a(b: C) { d.e::<G>(f); }");
@@ -230,7 +230,7 @@ async fn test_reparse(mut cx: gpui::TestAppContext) {
)
);
- buffer.update(&mut cx, |buf, cx| {
+ buffer.update(cx, |buf, cx| {
buf.undo(cx);
assert_eq!(buf.text(), "fn a() {}");
assert!(buf.is_parsing());
@@ -247,7 +247,7 @@ async fn test_reparse(mut cx: gpui::TestAppContext) {
)
);
- buffer.update(&mut cx, |buf, cx| {
+ buffer.update(cx, |buf, cx| {
buf.redo(cx);
assert_eq!(buf.text(), "fn a(b: C) { d.e::<G>(f); }");
assert!(buf.is_parsing());
@@ -276,7 +276,7 @@ async fn test_reparse(mut cx: gpui::TestAppContext) {
}
#[gpui::test]
-async fn test_outline(mut cx: gpui::TestAppContext) {
+async fn test_outline(cx: &mut gpui::TestAppContext) {
let language = Arc::new(
rust_lang()
.with_outline_query(
@@ -336,7 +336,7 @@ async fn test_outline(mut cx: gpui::TestAppContext) {
let buffer = cx.add_model(|cx| Buffer::new(0, text, cx).with_language(language, cx));
let outline = buffer
- .read_with(&cx, |buffer, _| buffer.snapshot().outline(None))
+ .read_with(cx, |buffer, _| buffer.snapshot().outline(None))
.unwrap();
assert_eq!(
@@ -553,7 +553,7 @@ fn test_autoindent_adjusts_lines_when_only_text_changes(cx: &mut MutableAppConte
}
#[gpui::test]
-async fn test_diagnostics(mut cx: gpui::TestAppContext) {
+async fn test_diagnostics(cx: &mut gpui::TestAppContext) {
let (language_server, mut fake) = cx.update(lsp::LanguageServer::fake);
let mut rust_lang = rust_lang();
rust_lang.config.language_server = Some(LanguageServerConfig {
@@ -579,13 +579,13 @@ async fn test_diagnostics(mut cx: gpui::TestAppContext) {
.await;
// Edit the buffer, moving the content down
- buffer.update(&mut cx, |buffer, cx| buffer.edit([0..0], "\n\n", cx));
+ buffer.update(cx, |buffer, cx| buffer.edit([0..0], "\n\n", cx));
let change_notification_1 = fake
.receive_notification::<lsp::notification::DidChangeTextDocument>()
.await;
assert!(change_notification_1.text_document.version > open_notification.text_document.version);
- buffer.update(&mut cx, |buffer, cx| {
+ buffer.update(cx, |buffer, cx| {
// Receive diagnostics for an earlier version of the buffer.
buffer
.update_diagnostics(
@@ -760,7 +760,7 @@ async fn test_diagnostics(mut cx: gpui::TestAppContext) {
// Keep editing the buffer and ensure disk-based diagnostics get translated according to the
// changes since the last save.
- buffer.update(&mut cx, |buffer, cx| {
+ buffer.update(cx, |buffer, cx| {
buffer.edit(Some(Point::new(2, 0)..Point::new(2, 0)), " ", cx);
buffer.edit(Some(Point::new(2, 8)..Point::new(2, 10)), "(x: usize)", cx);
});
@@ -771,7 +771,7 @@ async fn test_diagnostics(mut cx: gpui::TestAppContext) {
change_notification_2.text_document.version > change_notification_1.text_document.version
);
- buffer.update(&mut cx, |buffer, cx| {
+ buffer.update(cx, |buffer, cx| {
buffer
.update_diagnostics(
vec![
@@ -836,7 +836,7 @@ async fn test_diagnostics(mut cx: gpui::TestAppContext) {
}
#[gpui::test]
-async fn test_edits_from_lsp_with_past_version(mut cx: gpui::TestAppContext) {
+async fn test_edits_from_lsp_with_past_version(cx: &mut gpui::TestAppContext) {
let (language_server, mut fake) = cx.update(lsp::LanguageServer::fake);
let text = "
@@ -865,7 +865,7 @@ async fn test_edits_from_lsp_with_past_version(mut cx: gpui::TestAppContext) {
.version;
// Simulate editing the buffer after the language server computes some edits.
- buffer.update(&mut cx, |buffer, cx| {
+ buffer.update(cx, |buffer, cx| {
buffer.edit(
[Point::new(0, 0)..Point::new(0, 0)],
"// above first function\n",
@@ -902,7 +902,7 @@ async fn test_edits_from_lsp_with_past_version(mut cx: gpui::TestAppContext) {
});
let edits = buffer
- .update(&mut cx, |buffer, cx| {
+ .update(cx, |buffer, cx| {
buffer.edits_from_lsp(
vec![
// replace body of first function
@@ -937,7 +937,7 @@ async fn test_edits_from_lsp_with_past_version(mut cx: gpui::TestAppContext) {
.await
.unwrap();
- buffer.update(&mut cx, |buffer, cx| {
+ buffer.update(cx, |buffer, cx| {
for (range, new_text) in edits {
buffer.edit([range], new_text, cx);
}
@@ -962,7 +962,7 @@ async fn test_edits_from_lsp_with_past_version(mut cx: gpui::TestAppContext) {
}
#[gpui::test]
-async fn test_edits_from_lsp_with_edits_on_adjacent_lines(mut cx: gpui::TestAppContext) {
+async fn test_edits_from_lsp_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
let text = "
use a::b;
use a::c;
@@ -979,7 +979,7 @@ async fn test_edits_from_lsp_with_edits_on_adjacent_lines(mut cx: gpui::TestAppC
// Simulate the language server sending us a small edit in the form of a very large diff.
// Rust-analyzer does this when performing a merge-imports code action.
let edits = buffer
- .update(&mut cx, |buffer, cx| {
+ .update(cx, |buffer, cx| {
buffer.edits_from_lsp(
[
// Replace the first use statement without editing the semicolon.
@@ -1015,7 +1015,7 @@ async fn test_edits_from_lsp_with_edits_on_adjacent_lines(mut cx: gpui::TestAppC
.await
.unwrap();
- buffer.update(&mut cx, |buffer, cx| {
+ buffer.update(cx, |buffer, cx| {
let edits = edits
.into_iter()
.map(|(range, text)| {
@@ -1053,7 +1053,7 @@ async fn test_edits_from_lsp_with_edits_on_adjacent_lines(mut cx: gpui::TestAppC
}
#[gpui::test]
-async fn test_empty_diagnostic_ranges(mut cx: gpui::TestAppContext) {
+async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
cx.add_model(|cx| {
let text = concat!(
"let one = ;\n", //
@@ -10,10 +10,11 @@ path = "src/lsp.rs"
test-support = ["async-pipe"]
[dependencies]
+collections = { path = "../collections" }
gpui = { path = "../gpui" }
util = { path = "../util" }
anyhow = "1.0"
-async-pipe = { git = "https://github.com/routerify/async-pipe-rs", rev = "feeb77e83142a9ff837d0767652ae41bfc5d8e47", optional = true }
+async-pipe = { git = "https://github.com/zed-industries/async-pipe-rs", rev = "82d00a04211cf4e1236029aa03e6b6ce2a74c553", optional = true }
futures = "0.3"
log = "0.4"
lsp-types = "0.91"
@@ -26,7 +27,7 @@ smol = "1.2"
[dev-dependencies]
gpui = { path = "../gpui", features = ["test-support"] }
util = { path = "../util", features = ["test-support"] }
-async-pipe = { git = "https://github.com/routerify/async-pipe-rs", rev = "feeb77e83142a9ff837d0767652ae41bfc5d8e47" }
+async-pipe = { git = "https://github.com/zed-industries/async-pipe-rs", rev = "82d00a04211cf4e1236029aa03e6b6ce2a74c553" }
ctor = "0.1"
env_logger = "0.8"
unindent = "0.1.7"
@@ -1,8 +1,9 @@
use anyhow::{anyhow, Context, Result};
-use futures::{io::BufWriter, AsyncRead, AsyncWrite};
+use collections::HashMap;
+use futures::{channel::oneshot, io::BufWriter, AsyncRead, AsyncWrite};
use gpui::{executor, Task};
use parking_lot::{Mutex, RwLock};
-use postage::{barrier, oneshot, prelude::Stream, sink::Sink, watch};
+use postage::{barrier, prelude::Stream, watch};
use serde::{Deserialize, Serialize};
use serde_json::{json, value::RawValue, Value};
use smol::{
@@ -11,7 +12,6 @@ use smol::{
process::Command,
};
use std::{
- collections::HashMap,
future::Future,
io::Write,
str::FromStr,
@@ -128,13 +128,15 @@ impl LanguageServer {
let mut stdin = BufWriter::new(stdin);
let mut stdout = BufReader::new(stdout);
let (outbound_tx, outbound_rx) = channel::unbounded::<Vec<u8>>();
- let notification_handlers = Arc::new(RwLock::new(HashMap::<_, NotificationHandler>::new()));
- let response_handlers = Arc::new(Mutex::new(HashMap::<_, ResponseHandler>::new()));
+ let notification_handlers =
+ Arc::new(RwLock::new(HashMap::<_, NotificationHandler>::default()));
+ let response_handlers = Arc::new(Mutex::new(HashMap::<_, ResponseHandler>::default()));
let input_task = executor.spawn(
{
let notification_handlers = notification_handlers.clone();
let response_handlers = response_handlers.clone();
async move {
+ let _clear_response_handlers = ClearResponseHandlers(response_handlers.clone());
let mut buffer = Vec::new();
loop {
buffer.clear();
@@ -188,8 +190,10 @@ impl LanguageServer {
.log_err(),
);
let (output_done_tx, output_done_rx) = barrier::channel();
- let output_task = executor.spawn(
+ let output_task = executor.spawn({
+ let response_handlers = response_handlers.clone();
async move {
+ let _clear_response_handlers = ClearResponseHandlers(response_handlers);
let mut content_len_buffer = Vec::new();
while let Ok(message) = outbound_rx.recv().await {
content_len_buffer.clear();
@@ -203,8 +207,8 @@ impl LanguageServer {
drop(output_done_tx);
Ok(())
}
- .log_err(),
- );
+ .log_err()
+ });
let (initialized_tx, initialized_rx) = barrier::channel();
let (mut capabilities_tx, capabilities_rx) = watch::channel();
@@ -323,9 +327,12 @@ impl LanguageServer {
outbound_tx.close();
Some(
async move {
+ log::debug!("language server shutdown started");
shutdown_request.await?;
+ response_handlers.lock().clear();
exit?;
output_done.recv().await;
+ log::debug!("language server shutdown finished");
drop(tasks);
Ok(())
}
@@ -403,9 +410,13 @@ impl LanguageServer {
params,
})
.unwrap();
- let mut response_handlers = response_handlers.lock();
- let (mut tx, mut rx) = oneshot::channel();
- response_handlers.insert(
+
+ let send = outbound_tx
+ .try_send(message)
+ .context("failed to write to language server's stdin");
+
+ let (tx, rx) = oneshot::channel();
+ response_handlers.lock().insert(
id,
Box::new(move |result| {
let response = match result {
@@ -414,16 +425,13 @@ impl LanguageServer {
}
Err(error) => Err(anyhow!("{}", error.message)),
};
- let _ = tx.try_send(response);
+ let _ = tx.send(response);
}),
);
- let send = outbound_tx
- .try_send(message)
- .context("failed to write to language server's stdin");
async move {
send?;
- rx.recv().await.unwrap()
+ rx.await?
}
}
@@ -476,18 +484,23 @@ impl Drop for Subscription {
#[cfg(any(test, feature = "test-support"))]
pub struct FakeLanguageServer {
- handlers: Arc<
- Mutex<
- HashMap<
- &'static str,
- Box<dyn Send + FnMut(usize, &[u8], gpui::AsyncAppContext) -> Vec<u8>>,
- >,
- >,
- >,
+ handlers: FakeLanguageServerHandlers,
outgoing_tx: futures::channel::mpsc::UnboundedSender<Vec<u8>>,
incoming_rx: futures::channel::mpsc::UnboundedReceiver<Vec<u8>>,
+ _input_task: Task<Result<()>>,
+ _output_task: Task<Result<()>>,
}
+#[cfg(any(test, feature = "test-support"))]
+type FakeLanguageServerHandlers = Arc<
+ Mutex<
+ HashMap<
+ &'static str,
+ Box<dyn Send + FnMut(usize, &[u8], gpui::AsyncAppContext) -> Vec<u8>>,
+ >,
+ >,
+>;
+
#[cfg(any(test, feature = "test-support"))]
impl LanguageServer {
pub fn fake(cx: &mut gpui::MutableAppContext) -> (Arc<Self>, FakeLanguageServer) {
@@ -533,59 +546,69 @@ impl FakeLanguageServer {
let (incoming_tx, incoming_rx) = futures::channel::mpsc::unbounded();
let (outgoing_tx, mut outgoing_rx) = futures::channel::mpsc::unbounded();
- let this = Self {
- outgoing_tx: outgoing_tx.clone(),
- incoming_rx,
- handlers: Default::default(),
- };
+ let handlers = FakeLanguageServerHandlers::default();
- // Receive incoming messages
- let handlers = this.handlers.clone();
- cx.spawn(|cx| async move {
- let mut buffer = Vec::new();
- let mut stdin = smol::io::BufReader::new(stdin);
- while Self::receive(&mut stdin, &mut buffer).await.is_ok() {
- cx.background().simulate_random_delay().await;
- if let Ok(request) = serde_json::from_slice::<AnyRequest>(&buffer) {
- assert_eq!(request.jsonrpc, JSON_RPC_VERSION);
-
- if let Some(handler) = handlers.lock().get_mut(request.method) {
- let response =
- handler(request.id, request.params.get().as_bytes(), cx.clone());
- log::debug!("handled lsp request. method:{}", request.method);
- outgoing_tx.unbounded_send(response)?;
- } else {
- log::debug!("unhandled lsp request. method:{}", request.method);
- outgoing_tx.unbounded_send(
- serde_json::to_vec(&AnyResponse {
+ let input_task = cx.spawn(|cx| {
+ let handlers = handlers.clone();
+ let outgoing_tx = outgoing_tx.clone();
+ async move {
+ let mut buffer = Vec::new();
+ let mut stdin = smol::io::BufReader::new(stdin);
+ while Self::receive(&mut stdin, &mut buffer).await.is_ok() {
+ cx.background().simulate_random_delay().await;
+ if let Ok(request) = serde_json::from_slice::<AnyRequest>(&buffer) {
+ assert_eq!(request.jsonrpc, JSON_RPC_VERSION);
+
+ let response;
+ if let Some(handler) = handlers.lock().get_mut(request.method) {
+ response =
+ handler(request.id, request.params.get().as_bytes(), cx.clone());
+ log::debug!("handled lsp request. method:{}", request.method);
+ } else {
+ response = serde_json::to_vec(&AnyResponse {
id: request.id,
error: Some(Error {
message: "no handler".to_string(),
}),
result: None,
})
- .unwrap(),
- )?;
+ .unwrap();
+ log::debug!("unhandled lsp request. method:{}", request.method);
+ }
+ outgoing_tx.unbounded_send(response)?;
+ } else {
+ incoming_tx.unbounded_send(buffer.clone())?;
}
- } else {
- incoming_tx.unbounded_send(buffer.clone())?;
}
+ Ok::<_, anyhow::Error>(())
}
- Ok::<_, anyhow::Error>(())
- })
- .detach();
-
- // Send outgoing messages
- cx.background()
- .spawn(async move {
- let mut stdout = smol::io::BufWriter::new(stdout);
- while let Some(notification) = outgoing_rx.next().await {
- Self::send(&mut stdout, ¬ification).await;
- }
- })
- .detach();
+ });
- this
+ let output_task = cx.background().spawn(async move {
+ let mut stdout = smol::io::BufWriter::new(stdout);
+ while let Some(message) = outgoing_rx.next().await {
+ stdout
+ .write_all(CONTENT_LEN_HEADER.as_bytes())
+ .await
+ .unwrap();
+ stdout
+ .write_all((format!("{}", message.len())).as_bytes())
+ .await
+ .unwrap();
+ stdout.write_all("\r\n\r\n".as_bytes()).await.unwrap();
+ stdout.write_all(&message).await.unwrap();
+ stdout.flush().await.unwrap();
+ }
+ Ok(())
+ });
+
+ Self {
+ outgoing_tx,
+ incoming_rx,
+ handlers,
+ _input_task: input_task,
+ _output_task: output_task,
+ }
}
pub async fn notify<T: notification::Notification>(&mut self, params: T::Params) {
@@ -665,20 +688,6 @@ impl FakeLanguageServer {
.await;
}
- async fn send(stdout: &mut smol::io::BufWriter<async_pipe::PipeWriter>, message: &[u8]) {
- stdout
- .write_all(CONTENT_LEN_HEADER.as_bytes())
- .await
- .unwrap();
- stdout
- .write_all((format!("{}", message.len())).as_bytes())
- .await
- .unwrap();
- stdout.write_all("\r\n\r\n".as_bytes()).await.unwrap();
- stdout.write_all(&message).await.unwrap();
- stdout.flush().await.unwrap();
- }
-
async fn receive(
stdin: &mut smol::io::BufReader<async_pipe::PipeReader>,
buffer: &mut Vec<u8>,
@@ -689,7 +698,7 @@ impl FakeLanguageServer {
let message_len: usize = std::str::from_utf8(buffer)
.unwrap()
.strip_prefix(CONTENT_LEN_HEADER)
- .unwrap()
+ .ok_or_else(|| anyhow!("invalid content length header"))?
.trim_end()
.parse()
.unwrap();
@@ -699,6 +708,14 @@ impl FakeLanguageServer {
}
}
+struct ClearResponseHandlers(Arc<Mutex<HashMap<usize, ResponseHandler>>>);
+
+impl Drop for ClearResponseHandlers {
+ fn drop(&mut self) {
+ self.0.lock().clear();
+ }
+}
+
#[cfg(test)]
mod tests {
use super::*;
@@ -712,7 +729,7 @@ mod tests {
}
#[gpui::test]
- async fn test_fake(mut cx: TestAppContext) {
+ async fn test_fake(cx: &mut TestAppContext) {
let (server, mut fake) = cx.update(LanguageServer::fake);
let (message_tx, message_rx) = channel::unbounded();
@@ -225,7 +225,7 @@ struct FakeFsEntry {
struct FakeFsState {
entries: std::collections::BTreeMap<PathBuf, FakeFsEntry>,
next_inode: u64,
- events_tx: postage::broadcast::Sender<Vec<fsevent::Event>>,
+ event_txs: Vec<smol::channel::Sender<Vec<fsevent::Event>>>,
}
#[cfg(any(test, feature = "test-support"))]
@@ -248,8 +248,6 @@ impl FakeFsState {
I: IntoIterator<Item = T>,
T: Into<PathBuf>,
{
- use postage::prelude::Sink as _;
-
let events = paths
.into_iter()
.map(|path| fsevent::Event {
@@ -257,9 +255,12 @@ impl FakeFsState {
flags: fsevent::StreamFlags::empty(),
path: path.into(),
})
- .collect();
+ .collect::<Vec<_>>();
- let _ = self.events_tx.send(events).await;
+ self.event_txs.retain(|tx| {
+ let _ = tx.try_send(events.clone());
+ !tx.is_closed()
+ });
}
}
@@ -267,13 +268,12 @@ impl FakeFsState {
pub struct FakeFs {
// Use an unfair lock to ensure tests are deterministic.
state: futures::lock::Mutex<FakeFsState>,
- executor: std::sync::Arc<gpui::executor::Background>,
+ executor: std::sync::Weak<gpui::executor::Background>,
}
#[cfg(any(test, feature = "test-support"))]
impl FakeFs {
pub fn new(executor: std::sync::Arc<gpui::executor::Background>) -> std::sync::Arc<Self> {
- let (events_tx, _) = postage::broadcast::channel(2048);
let mut entries = std::collections::BTreeMap::new();
entries.insert(
Path::new("/").to_path_buf(),
@@ -288,11 +288,11 @@ impl FakeFs {
},
);
std::sync::Arc::new(Self {
- executor,
+ executor: std::sync::Arc::downgrade(&executor),
state: futures::lock::Mutex::new(FakeFsState {
entries,
next_inode: 1,
- events_tx,
+ event_txs: Default::default(),
}),
})
}
@@ -375,13 +375,21 @@ impl FakeFs {
}
.boxed()
}
+
+ async fn simulate_random_delay(&self) {
+ self.executor
+ .upgrade()
+ .expect("excecutor has been dropped")
+ .simulate_random_delay()
+ .await;
+ }
}
#[cfg(any(test, feature = "test-support"))]
#[async_trait::async_trait]
impl Fs for FakeFs {
async fn create_dir(&self, path: &Path) -> Result<()> {
- self.executor.simulate_random_delay().await;
+ self.simulate_random_delay().await;
let state = &mut *self.state.lock().await;
let path = normalize_path(path);
let mut ancestor_path = PathBuf::new();
@@ -418,7 +426,7 @@ impl Fs for FakeFs {
}
async fn create_file(&self, path: &Path, options: CreateOptions) -> Result<()> {
- self.executor.simulate_random_delay().await;
+ self.simulate_random_delay().await;
let mut state = self.state.lock().await;
let path = normalize_path(path);
state.validate_path(&path)?;
@@ -546,7 +554,7 @@ impl Fs for FakeFs {
async fn load(&self, path: &Path) -> Result<String> {
let path = normalize_path(path);
- self.executor.simulate_random_delay().await;
+ self.simulate_random_delay().await;
let state = self.state.lock().await;
let text = state
.entries
@@ -557,7 +565,7 @@ impl Fs for FakeFs {
}
async fn save(&self, path: &Path, text: &Rope) -> Result<()> {
- self.executor.simulate_random_delay().await;
+ self.simulate_random_delay().await;
let mut state = self.state.lock().await;
let path = normalize_path(path);
state.validate_path(&path)?;
@@ -589,13 +597,13 @@ impl Fs for FakeFs {
}
async fn canonicalize(&self, path: &Path) -> Result<PathBuf> {
- self.executor.simulate_random_delay().await;
+ self.simulate_random_delay().await;
Ok(normalize_path(path))
}
async fn is_file(&self, path: &Path) -> bool {
let path = normalize_path(path);
- self.executor.simulate_random_delay().await;
+ self.simulate_random_delay().await;
let state = self.state.lock().await;
state
.entries
@@ -604,7 +612,7 @@ impl Fs for FakeFs {
}
async fn metadata(&self, path: &Path) -> Result<Option<Metadata>> {
- self.executor.simulate_random_delay().await;
+ self.simulate_random_delay().await;
let state = self.state.lock().await;
let path = normalize_path(path);
Ok(state.entries.get(&path).map(|entry| entry.metadata.clone()))
@@ -615,7 +623,7 @@ impl Fs for FakeFs {
abs_path: &Path,
) -> Result<Pin<Box<dyn Send + Stream<Item = Result<PathBuf>>>>> {
use futures::{future, stream};
- self.executor.simulate_random_delay().await;
+ self.simulate_random_delay().await;
let state = self.state.lock().await;
let abs_path = normalize_path(abs_path);
Ok(Box::pin(stream::iter(state.entries.clone()).filter_map(
@@ -634,9 +642,10 @@ impl Fs for FakeFs {
path: &Path,
_: Duration,
) -> Pin<Box<dyn Send + Stream<Item = Vec<fsevent::Event>>>> {
- let state = self.state.lock().await;
- self.executor.simulate_random_delay().await;
- let rx = state.events_tx.subscribe();
+ let mut state = self.state.lock().await;
+ self.simulate_random_delay().await;
+ let (tx, rx) = smol::channel::unbounded();
+ state.event_txs.push(tx);
let path = path.to_path_buf();
Box::pin(futures::StreamExt::filter(rx, move |events| {
let result = events.iter().any(|event| event.path.starts_with(&path));
@@ -3603,7 +3603,7 @@ mod tests {
use worktree::WorktreeHandle as _;
#[gpui::test]
- async fn test_populate_and_search(mut cx: gpui::TestAppContext) {
+ async fn test_populate_and_search(cx: &mut gpui::TestAppContext) {
let dir = temp_tree(json!({
"root": {
"apple": "",
@@ -3627,10 +3627,10 @@ mod tests {
)
.unwrap();
- let project = Project::test(Arc::new(RealFs), &mut cx);
+ let project = Project::test(Arc::new(RealFs), cx);
let (tree, _) = project
- .update(&mut cx, |project, cx| {
+ .update(cx, |project, cx| {
project.find_or_create_local_worktree(&root_link_path, false, cx)
})
.await
@@ -3649,7 +3649,7 @@ mod tests {
let cancel_flag = Default::default();
let results = project
- .read_with(&cx, |project, cx| {
+ .read_with(cx, |project, cx| {
project.match_paths("bna", false, false, 10, &cancel_flag, cx)
})
.await;
@@ -3666,7 +3666,7 @@ mod tests {
}
#[gpui::test]
- async fn test_language_server_diagnostics(mut cx: gpui::TestAppContext) {
+ async fn test_language_server_diagnostics(cx: &mut gpui::TestAppContext) {
let (language_server_config, mut fake_servers) = LanguageServerConfig::fake();
let progress_token = language_server_config
.disk_based_diagnostics_progress_token
@@ -3693,31 +3693,31 @@ mod tests {
)
.await;
- let project = Project::test(fs, &mut cx);
- project.update(&mut cx, |project, _| {
+ let project = Project::test(fs, cx);
+ project.update(cx, |project, _| {
Arc::get_mut(&mut project.languages).unwrap().add(language);
});
let (tree, _) = project
- .update(&mut cx, |project, cx| {
+ .update(cx, |project, cx| {
project.find_or_create_local_worktree("/dir", false, cx)
})
.await
.unwrap();
- let worktree_id = tree.read_with(&cx, |tree, _| tree.id());
+ let worktree_id = tree.read_with(cx, |tree, _| tree.id());
cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
.await;
// Cause worktree to start the fake language server
let _buffer = project
- .update(&mut cx, |project, cx| {
+ .update(cx, |project, cx| {
project.open_buffer((worktree_id, Path::new("b.rs")), cx)
})
.await
.unwrap();
- let mut events = subscribe(&project, &mut cx);
+ let mut events = subscribe(&project, cx);
let mut fake_server = fake_servers.next().await.unwrap();
fake_server.start_progress(&progress_token).await;
@@ -3759,11 +3759,11 @@ mod tests {
);
let buffer = project
- .update(&mut cx, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx))
+ .update(cx, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx))
.await
.unwrap();
- buffer.read_with(&cx, |buffer, _| {
+ buffer.read_with(cx, |buffer, _| {
let snapshot = buffer.snapshot();
let diagnostics = snapshot
.diagnostics_in_range::<_, Point>(0..buffer.len())
@@ -3785,7 +3785,7 @@ mod tests {
}
#[gpui::test]
- async fn test_search_worktree_without_files(mut cx: gpui::TestAppContext) {
+ async fn test_search_worktree_without_files(cx: &mut gpui::TestAppContext) {
let dir = temp_tree(json!({
"root": {
"dir1": {},
@@ -3795,9 +3795,9 @@ mod tests {
}
}));
- let project = Project::test(Arc::new(RealFs), &mut cx);
+ let project = Project::test(Arc::new(RealFs), cx);
let (tree, _) = project
- .update(&mut cx, |project, cx| {
+ .update(cx, |project, cx| {
project.find_or_create_local_worktree(&dir.path(), false, cx)
})
.await
@@ -3808,7 +3808,7 @@ mod tests {
let cancel_flag = Default::default();
let results = project
- .read_with(&cx, |project, cx| {
+ .read_with(cx, |project, cx| {
project.match_paths("dir", false, false, 10, &cancel_flag, cx)
})
.await;
@@ -3817,7 +3817,7 @@ mod tests {
}
#[gpui::test]
- async fn test_definition(mut cx: gpui::TestAppContext) {
+ async fn test_definition(cx: &mut gpui::TestAppContext) {
let (language_server_config, mut fake_servers) = LanguageServerConfig::fake();
let language = Arc::new(Language::new(
LanguageConfig {
@@ -3839,23 +3839,23 @@ mod tests {
)
.await;
- let project = Project::test(fs, &mut cx);
- project.update(&mut cx, |project, _| {
+ let project = Project::test(fs, cx);
+ project.update(cx, |project, _| {
Arc::get_mut(&mut project.languages).unwrap().add(language);
});
let (tree, _) = project
- .update(&mut cx, |project, cx| {
+ .update(cx, |project, cx| {
project.find_or_create_local_worktree("/dir/b.rs", false, cx)
})
.await
.unwrap();
- let worktree_id = tree.read_with(&cx, |tree, _| tree.id());
+ let worktree_id = tree.read_with(cx, |tree, _| tree.id());
cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
.await;
let buffer = project
- .update(&mut cx, |project, cx| {
+ .update(cx, |project, cx| {
project.open_buffer(
ProjectPath {
worktree_id,
@@ -3883,7 +3883,7 @@ mod tests {
});
let mut definitions = project
- .update(&mut cx, |project, cx| project.definition(&buffer, 22, cx))
+ .update(cx, |project, cx| project.definition(&buffer, 22, cx))
.await
.unwrap();
@@ -3934,7 +3934,7 @@ mod tests {
}
#[gpui::test]
- async fn test_save_file(mut cx: gpui::TestAppContext) {
+ async fn test_save_file(cx: &mut gpui::TestAppContext) {
let fs = FakeFs::new(cx.background());
fs.insert_tree(
"/dir",
@@ -3944,22 +3944,22 @@ mod tests {
)
.await;
- let project = Project::test(fs.clone(), &mut cx);
+ let project = Project::test(fs.clone(), cx);
let worktree_id = project
- .update(&mut cx, |p, cx| {
+ .update(cx, |p, cx| {
p.find_or_create_local_worktree("/dir", false, cx)
})
.await
.unwrap()
.0
- .read_with(&cx, |tree, _| tree.id());
+ .read_with(cx, |tree, _| tree.id());
let buffer = project
- .update(&mut cx, |p, cx| p.open_buffer((worktree_id, "file1"), cx))
+ .update(cx, |p, cx| p.open_buffer((worktree_id, "file1"), cx))
.await
.unwrap();
buffer
- .update(&mut cx, |buffer, cx| {
+ .update(cx, |buffer, cx| {
assert_eq!(buffer.text(), "the old contents");
buffer.edit(Some(0..0), "a line of text.\n".repeat(10 * 1024), cx);
buffer.save(cx)
@@ -3968,11 +3968,11 @@ mod tests {
.unwrap();
let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
- assert_eq!(new_text, buffer.read_with(&cx, |buffer, _| buffer.text()));
+ assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
}
#[gpui::test]
- async fn test_save_in_single_file_worktree(mut cx: gpui::TestAppContext) {
+ async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
let fs = FakeFs::new(cx.background());
fs.insert_tree(
"/dir",
@@ -3982,22 +3982,22 @@ mod tests {
)
.await;
- let project = Project::test(fs.clone(), &mut cx);
+ let project = Project::test(fs.clone(), cx);
let worktree_id = project
- .update(&mut cx, |p, cx| {
+ .update(cx, |p, cx| {
p.find_or_create_local_worktree("/dir/file1", false, cx)
})
.await
.unwrap()
.0
- .read_with(&cx, |tree, _| tree.id());
+ .read_with(cx, |tree, _| tree.id());
let buffer = project
- .update(&mut cx, |p, cx| p.open_buffer((worktree_id, ""), cx))
+ .update(cx, |p, cx| p.open_buffer((worktree_id, ""), cx))
.await
.unwrap();
buffer
- .update(&mut cx, |buffer, cx| {
+ .update(cx, |buffer, cx| {
buffer.edit(Some(0..0), "a line of text.\n".repeat(10 * 1024), cx);
buffer.save(cx)
})
@@ -4005,11 +4005,11 @@ mod tests {
.unwrap();
let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
- assert_eq!(new_text, buffer.read_with(&cx, |buffer, _| buffer.text()));
+ assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
}
#[gpui::test(retries = 5)]
- async fn test_rescan_and_remote_updates(mut cx: gpui::TestAppContext) {
+ async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
let dir = temp_tree(json!({
"a": {
"file1": "",
@@ -4024,16 +4024,16 @@ mod tests {
}
}));
- let project = Project::test(Arc::new(RealFs), &mut cx);
- let rpc = project.read_with(&cx, |p, _| p.client.clone());
+ let project = Project::test(Arc::new(RealFs), cx);
+ let rpc = project.read_with(cx, |p, _| p.client.clone());
let (tree, _) = project
- .update(&mut cx, |p, cx| {
+ .update(cx, |p, cx| {
p.find_or_create_local_worktree(dir.path(), false, cx)
})
.await
.unwrap();
- let worktree_id = tree.read_with(&cx, |tree, _| tree.id());
+ let worktree_id = tree.read_with(cx, |tree, _| tree.id());
let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
let buffer = project.update(cx, |p, cx| p.open_buffer((worktree_id, path), cx));
@@ -4047,10 +4047,10 @@ mod tests {
})
};
- let buffer2 = buffer_for_path("a/file2", &mut cx).await;
- let buffer3 = buffer_for_path("a/file3", &mut cx).await;
- let buffer4 = buffer_for_path("b/c/file4", &mut cx).await;
- let buffer5 = buffer_for_path("b/c/file5", &mut cx).await;
+ let buffer2 = buffer_for_path("a/file2", cx).await;
+ let buffer3 = buffer_for_path("a/file3", cx).await;
+ let buffer4 = buffer_for_path("b/c/file4", cx).await;
+ let buffer5 = buffer_for_path("b/c/file5", cx).await;
let file2_id = id_for_path("a/file2", &cx);
let file3_id = id_for_path("a/file3", &cx);
@@ -4061,7 +4061,7 @@ mod tests {
.await;
// Create a remote copy of this worktree.
- let initial_snapshot = tree.read_with(&cx, |tree, _| tree.as_local().unwrap().snapshot());
+ let initial_snapshot = tree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
let (remote, load_task) = cx.update(|cx| {
Worktree::remote(
1,
@@ -4136,7 +4136,7 @@ mod tests {
// Update the remote worktree. Check that it becomes consistent with the
// local worktree.
- remote.update(&mut cx, |remote, cx| {
+ remote.update(cx, |remote, cx| {
let update_message = tree.read(cx).as_local().unwrap().snapshot().build_update(
&initial_snapshot,
1,
@@ -4161,7 +4161,7 @@ mod tests {
}
#[gpui::test]
- async fn test_buffer_deduping(mut cx: gpui::TestAppContext) {
+ async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
let fs = FakeFs::new(cx.background());
fs.insert_tree(
"/the-dir",
@@ -4172,18 +4172,18 @@ mod tests {
)
.await;
- let project = Project::test(fs.clone(), &mut cx);
+ let project = Project::test(fs.clone(), cx);
let worktree_id = project
- .update(&mut cx, |p, cx| {
+ .update(cx, |p, cx| {
p.find_or_create_local_worktree("/the-dir", false, cx)
})
.await
.unwrap()
.0
- .read_with(&cx, |tree, _| tree.id());
+ .read_with(cx, |tree, _| tree.id());
// Spawn multiple tasks to open paths, repeating some paths.
- let (buffer_a_1, buffer_b, buffer_a_2) = project.update(&mut cx, |p, cx| {
+ let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
(
p.open_buffer((worktree_id, "a.txt"), cx),
p.open_buffer((worktree_id, "b.txt"), cx),
@@ -4194,8 +4194,8 @@ mod tests {
let buffer_a_1 = buffer_a_1.await.unwrap();
let buffer_a_2 = buffer_a_2.await.unwrap();
let buffer_b = buffer_b.await.unwrap();
- assert_eq!(buffer_a_1.read_with(&cx, |b, _| b.text()), "a-contents");
- assert_eq!(buffer_b.read_with(&cx, |b, _| b.text()), "b-contents");
+ assert_eq!(buffer_a_1.read_with(cx, |b, _| b.text()), "a-contents");
+ assert_eq!(buffer_b.read_with(cx, |b, _| b.text()), "b-contents");
// There is only one buffer per path.
let buffer_a_id = buffer_a_1.id();
@@ -4204,7 +4204,7 @@ mod tests {
// Open the same path again while it is still open.
drop(buffer_a_1);
let buffer_a_3 = project
- .update(&mut cx, |p, cx| p.open_buffer((worktree_id, "a.txt"), cx))
+ .update(cx, |p, cx| p.open_buffer((worktree_id, "a.txt"), cx))
.await
.unwrap();
@@ -4213,7 +4213,7 @@ mod tests {
}
#[gpui::test]
- async fn test_buffer_is_dirty(mut cx: gpui::TestAppContext) {
+ async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
use std::fs;
let dir = temp_tree(json!({
@@ -4222,28 +4222,28 @@ mod tests {
"file3": "ghi",
}));
- let project = Project::test(Arc::new(RealFs), &mut cx);
+ let project = Project::test(Arc::new(RealFs), cx);
let (worktree, _) = project
- .update(&mut cx, |p, cx| {
+ .update(cx, |p, cx| {
p.find_or_create_local_worktree(dir.path(), false, cx)
})
.await
.unwrap();
- let worktree_id = worktree.read_with(&cx, |worktree, _| worktree.id());
+ let worktree_id = worktree.read_with(cx, |worktree, _| worktree.id());
worktree.flush_fs_events(&cx).await;
worktree
- .read_with(&cx, |t, _| t.as_local().unwrap().scan_complete())
+ .read_with(cx, |t, _| t.as_local().unwrap().scan_complete())
.await;
let buffer1 = project
- .update(&mut cx, |p, cx| p.open_buffer((worktree_id, "file1"), cx))
+ .update(cx, |p, cx| p.open_buffer((worktree_id, "file1"), cx))
.await
.unwrap();
let events = Rc::new(RefCell::new(Vec::new()));
// initially, the buffer isn't dirty.
- buffer1.update(&mut cx, |buffer, cx| {
+ buffer1.update(cx, |buffer, cx| {
cx.subscribe(&buffer1, {
let events = events.clone();
move |_, _, event, _| events.borrow_mut().push(event.clone())
@@ -4257,7 +4257,7 @@ mod tests {
});
// after the first edit, the buffer is dirty, and emits a dirtied event.
- buffer1.update(&mut cx, |buffer, cx| {
+ buffer1.update(cx, |buffer, cx| {
assert!(buffer.text() == "ac");
assert!(buffer.is_dirty());
assert_eq!(
@@ -4269,7 +4269,7 @@ mod tests {
});
// after saving, the buffer is not dirty, and emits a saved event.
- buffer1.update(&mut cx, |buffer, cx| {
+ buffer1.update(cx, |buffer, cx| {
assert!(!buffer.is_dirty());
assert_eq!(*events.borrow(), &[language::Event::Saved]);
events.borrow_mut().clear();
@@ -4279,7 +4279,7 @@ mod tests {
});
// after editing again, the buffer is dirty, and emits another dirty event.
- buffer1.update(&mut cx, |buffer, cx| {
+ buffer1.update(cx, |buffer, cx| {
assert!(buffer.text() == "aBDc");
assert!(buffer.is_dirty());
assert_eq!(
@@ -4304,10 +4304,10 @@ mod tests {
// When a file is deleted, the buffer is considered dirty.
let events = Rc::new(RefCell::new(Vec::new()));
let buffer2 = project
- .update(&mut cx, |p, cx| p.open_buffer((worktree_id, "file2"), cx))
+ .update(cx, |p, cx| p.open_buffer((worktree_id, "file2"), cx))
.await
.unwrap();
- buffer2.update(&mut cx, |_, cx| {
+ buffer2.update(cx, |_, cx| {
cx.subscribe(&buffer2, {
let events = events.clone();
move |_, _, event, _| events.borrow_mut().push(event.clone())
@@ -4325,10 +4325,10 @@ mod tests {
// When a file is already dirty when deleted, we don't emit a Dirtied event.
let events = Rc::new(RefCell::new(Vec::new()));
let buffer3 = project
- .update(&mut cx, |p, cx| p.open_buffer((worktree_id, "file3"), cx))
+ .update(cx, |p, cx| p.open_buffer((worktree_id, "file3"), cx))
.await
.unwrap();
- buffer3.update(&mut cx, |_, cx| {
+ buffer3.update(cx, |_, cx| {
cx.subscribe(&buffer3, {
let events = events.clone();
move |_, _, event, _| events.borrow_mut().push(event.clone())
@@ -4337,7 +4337,7 @@ mod tests {
});
worktree.flush_fs_events(&cx).await;
- buffer3.update(&mut cx, |buffer, cx| {
+ buffer3.update(cx, |buffer, cx| {
buffer.edit(Some(0..0), "x", cx);
});
events.borrow_mut().clear();
@@ -4350,30 +4350,28 @@ mod tests {
}
#[gpui::test]
- async fn test_buffer_file_changes_on_disk(mut cx: gpui::TestAppContext) {
+ async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
use std::fs;
let initial_contents = "aaa\nbbbbb\nc\n";
let dir = temp_tree(json!({ "the-file": initial_contents }));
- let project = Project::test(Arc::new(RealFs), &mut cx);
+ let project = Project::test(Arc::new(RealFs), cx);
let (worktree, _) = project
- .update(&mut cx, |p, cx| {
+ .update(cx, |p, cx| {
p.find_or_create_local_worktree(dir.path(), false, cx)
})
.await
.unwrap();
- let worktree_id = worktree.read_with(&cx, |tree, _| tree.id());
+ let worktree_id = worktree.read_with(cx, |tree, _| tree.id());
worktree
- .read_with(&cx, |t, _| t.as_local().unwrap().scan_complete())
+ .read_with(cx, |t, _| t.as_local().unwrap().scan_complete())
.await;
let abs_path = dir.path().join("the-file");
let buffer = project
- .update(&mut cx, |p, cx| {
- p.open_buffer((worktree_id, "the-file"), cx)
- })
+ .update(cx, |p, cx| p.open_buffer((worktree_id, "the-file"), cx))
.await
.unwrap();
@@ -4397,7 +4395,7 @@ mod tests {
// Change the file on disk, adding two new lines of text, and removing
// one line.
- buffer.read_with(&cx, |buffer, _| {
+ buffer.read_with(cx, |buffer, _| {
assert!(!buffer.is_dirty());
assert!(!buffer.has_conflict());
});
@@ -4411,7 +4409,7 @@ mod tests {
.condition(&cx, |buffer, _| buffer.text() == new_contents)
.await;
- buffer.update(&mut cx, |buffer, _| {
+ buffer.update(cx, |buffer, _| {
assert_eq!(buffer.text(), new_contents);
assert!(!buffer.is_dirty());
assert!(!buffer.has_conflict());
@@ -4433,7 +4431,7 @@ mod tests {
});
// Modify the buffer
- buffer.update(&mut cx, |buffer, cx| {
+ buffer.update(cx, |buffer, cx| {
buffer.edit(vec![0..0], " ", cx);
assert!(buffer.is_dirty());
assert!(!buffer.has_conflict());
@@ -4450,7 +4448,7 @@ mod tests {
}
#[gpui::test]
- async fn test_grouped_diagnostics(mut cx: gpui::TestAppContext) {
+ async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
let fs = FakeFs::new(cx.background());
fs.insert_tree(
"/the-dir",
@@ -4467,17 +4465,17 @@ mod tests {
)
.await;
- let project = Project::test(fs.clone(), &mut cx);
+ let project = Project::test(fs.clone(), cx);
let (worktree, _) = project
- .update(&mut cx, |p, cx| {
+ .update(cx, |p, cx| {
p.find_or_create_local_worktree("/the-dir", false, cx)
})
.await
.unwrap();
- let worktree_id = worktree.read_with(&cx, |tree, _| tree.id());
+ let worktree_id = worktree.read_with(cx, |tree, _| tree.id());
let buffer = project
- .update(&mut cx, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx))
+ .update(cx, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx))
.await
.unwrap();
@@ -4582,11 +4580,11 @@ mod tests {
};
project
- .update(&mut cx, |p, cx| {
+ .update(cx, |p, cx| {
p.update_diagnostics(message, &Default::default(), cx)
})
.unwrap();
- let buffer = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
+ let buffer = buffer.read_with(cx, |buffer, _| buffer.snapshot());
assert_eq!(
buffer
@@ -4709,7 +4707,7 @@ mod tests {
}
#[gpui::test]
- async fn test_rename(mut cx: gpui::TestAppContext) {
+ async fn test_rename(cx: &mut gpui::TestAppContext) {
let (language_server_config, mut fake_servers) = LanguageServerConfig::fake();
let language = Arc::new(Language::new(
LanguageConfig {
@@ -4731,23 +4729,23 @@ mod tests {
)
.await;
- let project = Project::test(fs.clone(), &mut cx);
- project.update(&mut cx, |project, _| {
+ let project = Project::test(fs.clone(), cx);
+ project.update(cx, |project, _| {
Arc::get_mut(&mut project.languages).unwrap().add(language);
});
let (tree, _) = project
- .update(&mut cx, |project, cx| {
+ .update(cx, |project, cx| {
project.find_or_create_local_worktree("/dir", false, cx)
})
.await
.unwrap();
- let worktree_id = tree.read_with(&cx, |tree, _| tree.id());
+ let worktree_id = tree.read_with(cx, |tree, _| tree.id());
cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
.await;
let buffer = project
- .update(&mut cx, |project, cx| {
+ .update(cx, |project, cx| {
project.open_buffer((worktree_id, Path::new("one.rs")), cx)
})
.await
@@ -4755,7 +4753,7 @@ mod tests {
let mut fake_server = fake_servers.next().await.unwrap();
- let response = project.update(&mut cx, |project, cx| {
+ let response = project.update(cx, |project, cx| {
project.prepare_rename(buffer.clone(), 7, cx)
});
fake_server
@@ -4771,10 +4769,10 @@ mod tests {
.await
.unwrap();
let range = response.await.unwrap().unwrap();
- let range = buffer.read_with(&cx, |buffer, _| range.to_offset(buffer));
+ let range = buffer.read_with(cx, |buffer, _| range.to_offset(buffer));
assert_eq!(range, 6..9);
- let response = project.update(&mut cx, |project, cx| {
+ let response = project.update(cx, |project, cx| {
project.perform_rename(buffer.clone(), 7, "THREE".to_string(), true, cx)
});
fake_server
@@ -4837,7 +4835,7 @@ mod tests {
.remove_entry(&buffer)
.unwrap()
.0
- .read_with(&cx, |buffer, _| buffer.text()),
+ .read_with(cx, |buffer, _| buffer.text()),
"const THREE: usize = 1;"
);
assert_eq!(
@@ -4845,13 +4843,13 @@ mod tests {
.into_keys()
.next()
.unwrap()
- .read_with(&cx, |buffer, _| buffer.text()),
+ .read_with(cx, |buffer, _| buffer.text()),
"const TWO: usize = one::THREE + one::THREE;"
);
}
#[gpui::test]
- async fn test_search(mut cx: gpui::TestAppContext) {
+ async fn test_search(cx: &mut gpui::TestAppContext) {
let fs = FakeFs::new(cx.background());
fs.insert_tree(
"/dir",
@@ -4863,19 +4861,19 @@ mod tests {
}),
)
.await;
- let project = Project::test(fs.clone(), &mut cx);
+ let project = Project::test(fs.clone(), cx);
let (tree, _) = project
- .update(&mut cx, |project, cx| {
+ .update(cx, |project, cx| {
project.find_or_create_local_worktree("/dir", false, cx)
})
.await
.unwrap();
- let worktree_id = tree.read_with(&cx, |tree, _| tree.id());
+ let worktree_id = tree.read_with(cx, |tree, _| tree.id());
cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
.await;
assert_eq!(
- search(&project, SearchQuery::text("TWO", false, true), &mut cx)
+ search(&project, SearchQuery::text("TWO", false, true), cx)
.await
.unwrap(),
HashMap::from_iter([
@@ -4885,17 +4883,17 @@ mod tests {
);
let buffer_4 = project
- .update(&mut cx, |project, cx| {
+ .update(cx, |project, cx| {
project.open_buffer((worktree_id, "four.rs"), cx)
})
.await
.unwrap();
- buffer_4.update(&mut cx, |buffer, cx| {
+ buffer_4.update(cx, |buffer, cx| {
buffer.edit([20..28, 31..43], "two::TWO", cx);
});
assert_eq!(
- search(&project, SearchQuery::text("TWO", false, true), &mut cx)
+ search(&project, SearchQuery::text("TWO", false, true), cx)
.await
.unwrap(),
HashMap::from_iter([
@@ -384,16 +384,19 @@ impl Worktree {
worktree.snapshot = worktree.background_snapshot.lock().clone();
if worktree.is_scanning() {
if worktree.poll_task.is_none() {
- worktree.poll_task = Some(cx.spawn(|this, mut cx| async move {
+ worktree.poll_task = Some(cx.spawn_weak(|this, mut cx| async move {
if is_fake_fs {
- smol::future::yield_now().await;
+ #[cfg(any(test, feature = "test-support"))]
+ cx.background().simulate_random_delay().await;
} else {
smol::Timer::after(Duration::from_millis(100)).await;
}
- this.update(&mut cx, |this, cx| {
- this.as_local_mut().unwrap().poll_task = None;
- this.poll_snapshot(cx);
- })
+ if let Some(this) = this.upgrade(&cx) {
+ this.update(&mut cx, |this, cx| {
+ this.as_local_mut().unwrap().poll_task = None;
+ this.poll_snapshot(cx);
+ });
+ }
}));
}
} else {
@@ -2441,7 +2444,7 @@ mod tests {
use util::test::temp_tree;
#[gpui::test]
- async fn test_traversal(cx: gpui::TestAppContext) {
+ async fn test_traversal(cx: &mut gpui::TestAppContext) {
let fs = FakeFs::new(cx.background());
fs.insert_tree(
"/root",
@@ -2470,7 +2473,7 @@ mod tests {
cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
.await;
- tree.read_with(&cx, |tree, _| {
+ tree.read_with(cx, |tree, _| {
assert_eq!(
tree.entries(false)
.map(|entry| entry.path.as_ref())
@@ -2486,7 +2489,7 @@ mod tests {
}
#[gpui::test]
- async fn test_rescan_with_gitignore(cx: gpui::TestAppContext) {
+ async fn test_rescan_with_gitignore(cx: &mut gpui::TestAppContext) {
let dir = temp_tree(json!({
".git": {},
".gitignore": "ignored-dir\n",
@@ -589,7 +589,9 @@ mod tests {
use workspace::WorkspaceParams;
#[gpui::test]
- async fn test_visible_list(mut cx: gpui::TestAppContext) {
+ async fn test_visible_list(cx: &mut gpui::TestAppContext) {
+ cx.foreground().forbid_parking();
+
let params = cx.update(WorkspaceParams::test);
let settings = params.settings.clone();
let fs = params.fs.as_fake();
@@ -639,28 +641,28 @@ mod tests {
)
});
let (root1, _) = project
- .update(&mut cx, |project, cx| {
+ .update(cx, |project, cx| {
project.find_or_create_local_worktree("/root1", false, cx)
})
.await
.unwrap();
root1
- .read_with(&cx, |t, _| t.as_local().unwrap().scan_complete())
+ .read_with(cx, |t, _| t.as_local().unwrap().scan_complete())
.await;
let (root2, _) = project
- .update(&mut cx, |project, cx| {
+ .update(cx, |project, cx| {
project.find_or_create_local_worktree("/root2", false, cx)
})
.await
.unwrap();
root2
- .read_with(&cx, |t, _| t.as_local().unwrap().scan_complete())
+ .read_with(cx, |t, _| t.as_local().unwrap().scan_complete())
.await;
let (_, workspace) = cx.add_window(|cx| Workspace::new(¶ms, cx));
- let panel = workspace.update(&mut cx, |_, cx| ProjectPanel::new(project, settings, cx));
+ let panel = workspace.update(cx, |_, cx| ProjectPanel::new(project, settings, cx));
assert_eq!(
- visible_entry_details(&panel, 0..50, &mut cx),
+ visible_entry_details(&panel, 0..50, cx),
&[
EntryDetails {
filename: "root1".to_string(),
@@ -721,9 +723,9 @@ mod tests {
],
);
- toggle_expand_dir(&panel, "root1/b", &mut cx);
+ toggle_expand_dir(&panel, "root1/b", cx);
assert_eq!(
- visible_entry_details(&panel, 0..50, &mut cx),
+ visible_entry_details(&panel, 0..50, cx),
&[
EntryDetails {
filename: "root1".to_string(),
@@ -799,7 +801,7 @@ mod tests {
);
assert_eq!(
- visible_entry_details(&panel, 5..8, &mut cx),
+ visible_entry_details(&panel, 5..8, cx),
[
EntryDetails {
filename: "4".to_string(),
@@ -59,18 +59,21 @@ impl Connection {
) {
use futures::channel::mpsc;
use io::{Error, ErrorKind};
+ use std::sync::Arc;
let (tx, rx) = mpsc::unbounded::<WebSocketMessage>();
let tx = tx
.sink_map_err(|e| WebSocketError::from(Error::new(ErrorKind::Other, e)))
.with({
+ let executor = Arc::downgrade(&executor);
let kill_rx = kill_rx.clone();
- let executor = executor.clone();
move |msg| {
let kill_rx = kill_rx.clone();
let executor = executor.clone();
Box::pin(async move {
- executor.simulate_random_delay().await;
+ if let Some(executor) = executor.upgrade() {
+ executor.simulate_random_delay().await;
+ }
if kill_rx.borrow().is_none() {
Ok(msg)
} else {
@@ -80,9 +83,11 @@ impl Connection {
}
});
let rx = rx.then(move |msg| {
- let executor = executor.clone();
+ let executor = Arc::downgrade(&executor);
Box::pin(async move {
- executor.simulate_random_delay().await;
+ if let Some(executor) = executor.upgrade() {
+ executor.simulate_random_delay().await;
+ }
msg
})
});
@@ -1,8 +1,7 @@
use super::proto::{self, AnyTypedEnvelope, EnvelopedMessage, MessageStream, RequestMessage};
use super::Connection;
use anyhow::{anyhow, Context, Result};
-use futures::stream::BoxStream;
-use futures::{FutureExt as _, StreamExt};
+use futures::{channel::oneshot, stream::BoxStream, FutureExt as _, StreamExt};
use parking_lot::{Mutex, RwLock};
use postage::{
barrier, mpsc,
@@ -92,7 +91,7 @@ pub struct ConnectionState {
outgoing_tx: futures::channel::mpsc::UnboundedSender<proto::Envelope>,
next_message_id: Arc<AtomicU32>,
response_channels:
- Arc<Mutex<Option<HashMap<u32, mpsc::Sender<(proto::Envelope, barrier::Sender)>>>>>,
+ Arc<Mutex<Option<HashMap<u32, oneshot::Sender<(proto::Envelope, barrier::Sender)>>>>>,
}
const WRITE_TIMEOUT: Duration = Duration::from_secs(10);
@@ -177,18 +176,14 @@ impl Peer {
async move {
if let Some(responding_to) = incoming.responding_to {
let channel = response_channels.lock().as_mut()?.remove(&responding_to);
- if let Some(mut tx) = channel {
+ if let Some(tx) = channel {
let mut requester_resumed = barrier::channel();
- if let Err(error) = tx.send((incoming, requester_resumed.0)).await {
+ if let Err(error) = tx.send((incoming, requester_resumed.0)) {
log::debug!(
"received RPC but request future was dropped {:?}",
- error.0 .0
+ error.0
);
}
- // Drop response channel before awaiting on the barrier. This allows the
- // barrier to get dropped even if the request's future is dropped before it
- // has a chance to observe the response.
- drop(tx);
requester_resumed.1.recv().await;
} else {
log::warn!("received RPC response to unknown request {}", responding_to);
@@ -239,7 +234,7 @@ impl Peer {
receiver_id: ConnectionId,
request: T,
) -> impl Future<Output = Result<T::Response>> {
- let (tx, mut rx) = mpsc::channel(1);
+ let (tx, rx) = oneshot::channel();
let send = self.connection_state(receiver_id).and_then(|connection| {
let message_id = connection.next_message_id.fetch_add(1, SeqCst);
connection
@@ -260,10 +255,7 @@ impl Peer {
});
async move {
send?;
- let (response, _barrier) = rx
- .recv()
- .await
- .ok_or_else(|| anyhow!("connection was closed"))?;
+ let (response, _barrier) = rx.await.map_err(|_| anyhow!("connection was closed"))?;
if let Some(proto::envelope::Payload::Error(error)) = &response.payload {
Err(anyhow!("RPC request failed - {}", error.message))
} else {
@@ -347,7 +339,7 @@ mod tests {
use gpui::TestAppContext;
#[gpui::test(iterations = 50)]
- async fn test_request_response(cx: TestAppContext) {
+ async fn test_request_response(cx: &mut TestAppContext) {
let executor = cx.foreground();
// create 2 clients connected to 1 server
@@ -441,7 +433,7 @@ mod tests {
}
#[gpui::test(iterations = 50)]
- async fn test_order_of_response_and_incoming(cx: TestAppContext) {
+ async fn test_order_of_response_and_incoming(cx: &mut TestAppContext) {
let executor = cx.foreground();
let server = Peer::new();
let client = Peer::new();
@@ -539,7 +531,7 @@ mod tests {
}
#[gpui::test(iterations = 50)]
- async fn test_dropping_request_before_completion(cx: TestAppContext) {
+ async fn test_dropping_request_before_completion(cx: &mut TestAppContext) {
let executor = cx.foreground();
let server = Peer::new();
let client = Peer::new();
@@ -651,7 +643,7 @@ mod tests {
}
#[gpui::test(iterations = 50)]
- async fn test_disconnect(cx: TestAppContext) {
+ async fn test_disconnect(cx: &mut TestAppContext) {
let executor = cx.foreground();
let (client_conn, mut server_conn, _) = Connection::in_memory(cx.background());
@@ -686,7 +678,7 @@ mod tests {
}
#[gpui::test(iterations = 50)]
- async fn test_io_error(cx: TestAppContext) {
+ async fn test_io_error(cx: &mut TestAppContext) {
let executor = cx.foreground();
let (client_conn, mut server_conn, _) = Connection::in_memory(cx.background());
@@ -520,7 +520,7 @@ mod tests {
use unindent::Unindent as _;
#[gpui::test]
- async fn test_search_simple(mut cx: TestAppContext) {
+ async fn test_search_simple(cx: &mut TestAppContext) {
let fonts = cx.font_cache();
let mut theme = gpui::fonts::with_font_cache(fonts.clone(), || theme::Theme::default());
theme.search.match_background = Color::red();
@@ -551,11 +551,11 @@ mod tests {
// Search for a string that appears with different casing.
// By default, search is case-insensitive.
- search_bar.update(&mut cx, |search_bar, cx| {
+ search_bar.update(cx, |search_bar, cx| {
search_bar.set_query("us", cx);
});
editor.next_notification(&cx).await;
- editor.update(&mut cx, |editor, cx| {
+ editor.update(cx, |editor, cx| {
assert_eq!(
editor.all_highlighted_ranges(cx),
&[
@@ -572,11 +572,11 @@ mod tests {
});
// Switch to a case sensitive search.
- search_bar.update(&mut cx, |search_bar, cx| {
+ search_bar.update(cx, |search_bar, cx| {
search_bar.toggle_search_option(&ToggleSearchOption(SearchOption::CaseSensitive), cx);
});
editor.next_notification(&cx).await;
- editor.update(&mut cx, |editor, cx| {
+ editor.update(cx, |editor, cx| {
assert_eq!(
editor.all_highlighted_ranges(cx),
&[(
@@ -588,11 +588,11 @@ mod tests {
// Search for a string that appears both as a whole word and
// within other words. By default, all results are found.
- search_bar.update(&mut cx, |search_bar, cx| {
+ search_bar.update(cx, |search_bar, cx| {
search_bar.set_query("or", cx);
});
editor.next_notification(&cx).await;
- editor.update(&mut cx, |editor, cx| {
+ editor.update(cx, |editor, cx| {
assert_eq!(
editor.all_highlighted_ranges(cx),
&[
@@ -629,11 +629,11 @@ mod tests {
});
// Switch to a whole word search.
- search_bar.update(&mut cx, |search_bar, cx| {
+ search_bar.update(cx, |search_bar, cx| {
search_bar.toggle_search_option(&ToggleSearchOption(SearchOption::WholeWord), cx);
});
editor.next_notification(&cx).await;
- editor.update(&mut cx, |editor, cx| {
+ editor.update(cx, |editor, cx| {
assert_eq!(
editor.all_highlighted_ranges(cx),
&[
@@ -653,10 +653,10 @@ mod tests {
);
});
- editor.update(&mut cx, |editor, cx| {
+ editor.update(cx, |editor, cx| {
editor.select_display_ranges(&[DisplayPoint::new(0, 0)..DisplayPoint::new(0, 0)], cx);
});
- search_bar.update(&mut cx, |search_bar, cx| {
+ search_bar.update(cx, |search_bar, cx| {
assert_eq!(search_bar.active_match_index, Some(0));
search_bar.select_match(&SelectMatch(Direction::Next), cx);
assert_eq!(
@@ -664,82 +664,82 @@ mod tests {
[DisplayPoint::new(0, 41)..DisplayPoint::new(0, 43)]
);
});
- search_bar.read_with(&cx, |search_bar, _| {
+ search_bar.read_with(cx, |search_bar, _| {
assert_eq!(search_bar.active_match_index, Some(0));
});
- search_bar.update(&mut cx, |search_bar, cx| {
+ search_bar.update(cx, |search_bar, cx| {
search_bar.select_match(&SelectMatch(Direction::Next), cx);
assert_eq!(
editor.update(cx, |editor, cx| editor.selected_display_ranges(cx)),
[DisplayPoint::new(3, 11)..DisplayPoint::new(3, 13)]
);
});
- search_bar.read_with(&cx, |search_bar, _| {
+ search_bar.read_with(cx, |search_bar, _| {
assert_eq!(search_bar.active_match_index, Some(1));
});
- search_bar.update(&mut cx, |search_bar, cx| {
+ search_bar.update(cx, |search_bar, cx| {
search_bar.select_match(&SelectMatch(Direction::Next), cx);
assert_eq!(
editor.update(cx, |editor, cx| editor.selected_display_ranges(cx)),
[DisplayPoint::new(3, 56)..DisplayPoint::new(3, 58)]
);
});
- search_bar.read_with(&cx, |search_bar, _| {
+ search_bar.read_with(cx, |search_bar, _| {
assert_eq!(search_bar.active_match_index, Some(2));
});
- search_bar.update(&mut cx, |search_bar, cx| {
+ search_bar.update(cx, |search_bar, cx| {
search_bar.select_match(&SelectMatch(Direction::Next), cx);
assert_eq!(
editor.update(cx, |editor, cx| editor.selected_display_ranges(cx)),
[DisplayPoint::new(0, 41)..DisplayPoint::new(0, 43)]
);
});
- search_bar.read_with(&cx, |search_bar, _| {
+ search_bar.read_with(cx, |search_bar, _| {
assert_eq!(search_bar.active_match_index, Some(0));
});
- search_bar.update(&mut cx, |search_bar, cx| {
+ search_bar.update(cx, |search_bar, cx| {
search_bar.select_match(&SelectMatch(Direction::Prev), cx);
assert_eq!(
editor.update(cx, |editor, cx| editor.selected_display_ranges(cx)),
[DisplayPoint::new(3, 56)..DisplayPoint::new(3, 58)]
);
});
- search_bar.read_with(&cx, |search_bar, _| {
+ search_bar.read_with(cx, |search_bar, _| {
assert_eq!(search_bar.active_match_index, Some(2));
});
- search_bar.update(&mut cx, |search_bar, cx| {
+ search_bar.update(cx, |search_bar, cx| {
search_bar.select_match(&SelectMatch(Direction::Prev), cx);
assert_eq!(
editor.update(cx, |editor, cx| editor.selected_display_ranges(cx)),
[DisplayPoint::new(3, 11)..DisplayPoint::new(3, 13)]
);
});
- search_bar.read_with(&cx, |search_bar, _| {
+ search_bar.read_with(cx, |search_bar, _| {
assert_eq!(search_bar.active_match_index, Some(1));
});
- search_bar.update(&mut cx, |search_bar, cx| {
+ search_bar.update(cx, |search_bar, cx| {
search_bar.select_match(&SelectMatch(Direction::Prev), cx);
assert_eq!(
editor.update(cx, |editor, cx| editor.selected_display_ranges(cx)),
[DisplayPoint::new(0, 41)..DisplayPoint::new(0, 43)]
);
});
- search_bar.read_with(&cx, |search_bar, _| {
+ search_bar.read_with(cx, |search_bar, _| {
assert_eq!(search_bar.active_match_index, Some(0));
});
// Park the cursor in between matches and ensure that going to the previous match selects
// the closest match to the left.
- editor.update(&mut cx, |editor, cx| {
+ editor.update(cx, |editor, cx| {
editor.select_display_ranges(&[DisplayPoint::new(1, 0)..DisplayPoint::new(1, 0)], cx);
});
- search_bar.update(&mut cx, |search_bar, cx| {
+ search_bar.update(cx, |search_bar, cx| {
assert_eq!(search_bar.active_match_index, Some(1));
search_bar.select_match(&SelectMatch(Direction::Prev), cx);
assert_eq!(
@@ -747,16 +747,16 @@ mod tests {
[DisplayPoint::new(0, 41)..DisplayPoint::new(0, 43)]
);
});
- search_bar.read_with(&cx, |search_bar, _| {
+ search_bar.read_with(cx, |search_bar, _| {
assert_eq!(search_bar.active_match_index, Some(0));
});
// Park the cursor in between matches and ensure that going to the next match selects the
// closest match to the right.
- editor.update(&mut cx, |editor, cx| {
+ editor.update(cx, |editor, cx| {
editor.select_display_ranges(&[DisplayPoint::new(1, 0)..DisplayPoint::new(1, 0)], cx);
});
- search_bar.update(&mut cx, |search_bar, cx| {
+ search_bar.update(cx, |search_bar, cx| {
assert_eq!(search_bar.active_match_index, Some(1));
search_bar.select_match(&SelectMatch(Direction::Next), cx);
assert_eq!(
@@ -764,16 +764,16 @@ mod tests {
[DisplayPoint::new(3, 11)..DisplayPoint::new(3, 13)]
);
});
- search_bar.read_with(&cx, |search_bar, _| {
+ search_bar.read_with(cx, |search_bar, _| {
assert_eq!(search_bar.active_match_index, Some(1));
});
// Park the cursor after the last match and ensure that going to the previous match selects
// the last match.
- editor.update(&mut cx, |editor, cx| {
+ editor.update(cx, |editor, cx| {
editor.select_display_ranges(&[DisplayPoint::new(3, 60)..DisplayPoint::new(3, 60)], cx);
});
- search_bar.update(&mut cx, |search_bar, cx| {
+ search_bar.update(cx, |search_bar, cx| {
assert_eq!(search_bar.active_match_index, Some(2));
search_bar.select_match(&SelectMatch(Direction::Prev), cx);
assert_eq!(
@@ -781,16 +781,16 @@ mod tests {
[DisplayPoint::new(3, 56)..DisplayPoint::new(3, 58)]
);
});
- search_bar.read_with(&cx, |search_bar, _| {
+ search_bar.read_with(cx, |search_bar, _| {
assert_eq!(search_bar.active_match_index, Some(2));
});
// Park the cursor after the last match and ensure that going to the next match selects the
// first match.
- editor.update(&mut cx, |editor, cx| {
+ editor.update(cx, |editor, cx| {
editor.select_display_ranges(&[DisplayPoint::new(3, 60)..DisplayPoint::new(3, 60)], cx);
});
- search_bar.update(&mut cx, |search_bar, cx| {
+ search_bar.update(cx, |search_bar, cx| {
assert_eq!(search_bar.active_match_index, Some(2));
search_bar.select_match(&SelectMatch(Direction::Next), cx);
assert_eq!(
@@ -798,16 +798,16 @@ mod tests {
[DisplayPoint::new(0, 41)..DisplayPoint::new(0, 43)]
);
});
- search_bar.read_with(&cx, |search_bar, _| {
+ search_bar.read_with(cx, |search_bar, _| {
assert_eq!(search_bar.active_match_index, Some(0));
});
// Park the cursor before the first match and ensure that going to the previous match
// selects the last match.
- editor.update(&mut cx, |editor, cx| {
+ editor.update(cx, |editor, cx| {
editor.select_display_ranges(&[DisplayPoint::new(0, 0)..DisplayPoint::new(0, 0)], cx);
});
- search_bar.update(&mut cx, |search_bar, cx| {
+ search_bar.update(cx, |search_bar, cx| {
assert_eq!(search_bar.active_match_index, Some(0));
search_bar.select_match(&SelectMatch(Direction::Prev), cx);
assert_eq!(
@@ -815,7 +815,7 @@ mod tests {
[DisplayPoint::new(3, 56)..DisplayPoint::new(3, 58)]
);
});
- search_bar.read_with(&cx, |search_bar, _| {
+ search_bar.read_with(cx, |search_bar, _| {
assert_eq!(search_bar.active_match_index, Some(2));
});
}
@@ -714,7 +714,7 @@ mod tests {
use std::sync::Arc;
#[gpui::test]
- async fn test_project_search(mut cx: TestAppContext) {
+ async fn test_project_search(cx: &mut TestAppContext) {
let fonts = cx.font_cache();
let mut theme = gpui::fonts::with_font_cache(fonts.clone(), || theme::Theme::default());
theme.search.match_background = Color::red();
@@ -732,9 +732,9 @@ mod tests {
}),
)
.await;
- let project = Project::test(fs.clone(), &mut cx);
+ let project = Project::test(fs.clone(), cx);
let (tree, _) = project
- .update(&mut cx, |project, cx| {
+ .update(cx, |project, cx| {
project.find_or_create_local_worktree("/dir", false, cx)
})
.await
@@ -747,14 +747,14 @@ mod tests {
ProjectSearchView::new(search.clone(), None, settings, cx)
});
- search_view.update(&mut cx, |search_view, cx| {
+ search_view.update(cx, |search_view, cx| {
search_view
.query_editor
.update(cx, |query_editor, cx| query_editor.set_text("TWO", cx));
search_view.search(&Search, cx);
});
search_view.next_notification(&cx).await;
- search_view.update(&mut cx, |search_view, cx| {
+ search_view.update(cx, |search_view, cx| {
assert_eq!(
search_view
.results_editor
@@ -791,7 +791,7 @@ mod tests {
search_view.select_match(&SelectMatch(Direction::Next), cx);
});
- search_view.update(&mut cx, |search_view, cx| {
+ search_view.update(cx, |search_view, cx| {
assert_eq!(search_view.active_match_index, Some(1));
assert_eq!(
search_view
@@ -802,7 +802,7 @@ mod tests {
search_view.select_match(&SelectMatch(Direction::Next), cx);
});
- search_view.update(&mut cx, |search_view, cx| {
+ search_view.update(cx, |search_view, cx| {
assert_eq!(search_view.active_match_index, Some(2));
assert_eq!(
search_view
@@ -813,7 +813,7 @@ mod tests {
search_view.select_match(&SelectMatch(Direction::Next), cx);
});
- search_view.update(&mut cx, |search_view, cx| {
+ search_view.update(cx, |search_view, cx| {
assert_eq!(search_view.active_match_index, Some(0));
assert_eq!(
search_view
@@ -824,7 +824,7 @@ mod tests {
search_view.select_match(&SelectMatch(Direction::Prev), cx);
});
- search_view.update(&mut cx, |search_view, cx| {
+ search_view.update(cx, |search_view, cx| {
assert_eq!(search_view.active_match_index, Some(2));
assert_eq!(
search_view
@@ -835,7 +835,7 @@ mod tests {
search_view.select_match(&SelectMatch(Direction::Prev), cx);
});
- search_view.update(&mut cx, |search_view, cx| {
+ search_view.update(cx, |search_view, cx| {
assert_eq!(search_view.active_match_index, Some(1));
assert_eq!(
search_view
@@ -58,7 +58,12 @@ features = ["runtime-async-std-rustls", "postgres", "time", "uuid"]
collections = { path = "../collections", features = ["test-support"] }
gpui = { path = "../gpui", features = ["test-support"] }
rpc = { path = "../rpc", features = ["test-support"] }
-zed = { path = "../zed", features = ["test-support"] }
+client = { path = "../client", features = ["test-support"] }
+editor = { path = "../editor", features = ["test-support"] }
+language = { path = "../language", features = ["test-support"] }
+lsp = { path = "../lsp", features = ["test-support"] }
+project = { path = "../project", features = ["test-support"] }
+workspace = { path = "../workspace", features = ["test-support"] }
ctor = "0.1"
env_logger = "0.8"
util = { path = "../util" }
@@ -627,7 +627,7 @@ pub mod tests {
use util::post_inc;
#[gpui::test]
- async fn test_get_users_by_ids(cx: TestAppContext) {
+ async fn test_get_users_by_ids(cx: &mut TestAppContext) {
for test_db in [TestDb::postgres(), TestDb::fake(cx.background())] {
let db = test_db.db();
@@ -667,7 +667,7 @@ pub mod tests {
}
#[gpui::test]
- async fn test_recent_channel_messages(cx: TestAppContext) {
+ async fn test_recent_channel_messages(cx: &mut TestAppContext) {
for test_db in [TestDb::postgres(), TestDb::fake(cx.background())] {
let db = test_db.db();
let user = db.create_user("user", false).await.unwrap();
@@ -703,7 +703,7 @@ pub mod tests {
}
#[gpui::test]
- async fn test_channel_message_nonces(cx: TestAppContext) {
+ async fn test_channel_message_nonces(cx: &mut TestAppContext) {
for test_db in [TestDb::postgres(), TestDb::fake(cx.background())] {
let db = test_db.db();
let user = db.create_user("user", false).await.unwrap();
@@ -1112,7 +1112,6 @@ pub mod tests {
.take(count)
.cloned()
.collect::<Vec<_>>();
- dbg!(count, before_id, &messages);
messages.sort_unstable_by_key(|message| message.id);
Ok(messages)
}
@@ -989,10 +989,28 @@ mod tests {
github, AppState, Config,
};
use ::rpc::Peer;
+ use client::{
+ self, test::FakeHttpClient, Channel, ChannelDetails, ChannelList, Client, Credentials,
+ EstablishConnectionError, UserStore,
+ };
use collections::BTreeMap;
+ use editor::{
+ self, ConfirmCodeAction, ConfirmCompletion, ConfirmRename, Editor, Input, MultiBuffer,
+ Redo, Rename, ToOffset, ToggleCodeActions, Undo,
+ };
use gpui::{executor, ModelHandle, TestAppContext};
+ use language::{
+ tree_sitter_rust, AnchorRangeExt, Diagnostic, DiagnosticEntry, Language, LanguageConfig,
+ LanguageRegistry, LanguageServerConfig, Point, ToLspPosition,
+ };
+ use lsp;
use parking_lot::Mutex;
use postage::{sink::Sink, watch};
+ use project::{
+ fs::{FakeFs, Fs as _},
+ search::SearchQuery,
+ DiagnosticSummary, Project, ProjectPath,
+ };
use rand::prelude::*;
use rpc::PeerId;
use serde_json::json;
@@ -1009,24 +1027,7 @@ mod tests {
},
time::Duration,
};
- use zed::{
- client::{
- self, test::FakeHttpClient, Channel, ChannelDetails, ChannelList, Client, Credentials,
- EstablishConnectionError, UserStore,
- },
- editor::{
- self, ConfirmCodeAction, ConfirmCompletion, ConfirmRename, Editor, Input, MultiBuffer,
- Redo, Rename, ToOffset, ToggleCodeActions, Undo,
- },
- fs::{FakeFs, Fs as _},
- language::{
- tree_sitter_rust, AnchorRangeExt, Diagnostic, DiagnosticEntry, Language,
- LanguageConfig, LanguageRegistry, LanguageServerConfig, Point, ToLspPosition,
- },
- lsp,
- project::{search::SearchQuery, DiagnosticSummary, Project, ProjectPath},
- workspace::{Settings, Workspace, WorkspaceParams},
- };
+ use workspace::{Settings, Workspace, WorkspaceParams};
#[cfg(test)]
#[ctor::ctor]
@@ -1037,7 +1038,7 @@ mod tests {
}
#[gpui::test(iterations = 10)]
- async fn test_share_project(mut cx_a: TestAppContext, mut cx_b: TestAppContext) {
+ async fn test_share_project(cx_a: &mut TestAppContext, cx_b: &mut TestAppContext) {
let (window_b, _) = cx_b.add_window(|_| EmptyView);
let lang_registry = Arc::new(LanguageRegistry::new());
let fs = FakeFs::new(cx_a.background());
@@ -1045,8 +1046,8 @@ mod tests {
// Connect to a server as 2 clients.
let mut server = TestServer::start(cx_a.foreground(), cx_a.background()).await;
- let client_a = server.create_client(&mut cx_a, "user_a").await;
- let client_b = server.create_client(&mut cx_b, "user_b").await;
+ let client_a = server.create_client(cx_a, "user_a").await;
+ let client_b = server.create_client(cx_b, "user_b").await;
// Share a project as client A
fs.insert_tree(
@@ -1068,20 +1069,17 @@ mod tests {
)
});
let (worktree_a, _) = project_a
- .update(&mut cx_a, |p, cx| {
+ .update(cx_a, |p, cx| {
p.find_or_create_local_worktree("/a", false, cx)
})
.await
.unwrap();
- let worktree_id = worktree_a.read_with(&cx_a, |tree, _| tree.id());
+ let worktree_id = worktree_a.read_with(cx_a, |tree, _| tree.id());
worktree_a
- .read_with(&cx_a, |tree, _| tree.as_local().unwrap().scan_complete())
+ .read_with(cx_a, |tree, _| tree.as_local().unwrap().scan_complete())
.await;
- let project_id = project_a.update(&mut cx_a, |p, _| p.next_remote_id()).await;
- project_a
- .update(&mut cx_a, |p, cx| p.share(cx))
- .await
- .unwrap();
+ let project_id = project_a.update(cx_a, |p, _| p.next_remote_id()).await;
+ project_a.update(cx_a, |p, cx| p.share(cx)).await.unwrap();
// Join that project as client B
let project_b = Project::remote(
@@ -1095,7 +1093,7 @@ mod tests {
.await
.unwrap();
- let replica_id_b = project_b.read_with(&cx_b, |project, _| {
+ let replica_id_b = project_b.read_with(cx_b, |project, _| {
assert_eq!(
project
.collaborators()
@@ -1120,18 +1118,18 @@ mod tests {
// Open the same file as client B and client A.
let buffer_b = project_b
- .update(&mut cx_b, |p, cx| p.open_buffer((worktree_id, "b.txt"), cx))
+ .update(cx_b, |p, cx| p.open_buffer((worktree_id, "b.txt"), cx))
.await
.unwrap();
let buffer_b = cx_b.add_model(|cx| MultiBuffer::singleton(buffer_b, cx));
- buffer_b.read_with(&cx_b, |buf, cx| {
+ buffer_b.read_with(cx_b, |buf, cx| {
assert_eq!(buf.read(cx).text(), "b-contents")
});
- project_a.read_with(&cx_a, |project, cx| {
+ project_a.read_with(cx_a, |project, cx| {
assert!(project.has_open_buffer((worktree_id, "b.txt"), cx))
});
let buffer_a = project_a
- .update(&mut cx_a, |p, cx| p.open_buffer((worktree_id, "b.txt"), cx))
+ .update(cx_a, |p, cx| p.open_buffer((worktree_id, "b.txt"), cx))
.await
.unwrap();
@@ -1151,7 +1149,7 @@ mod tests {
// .await;
// Edit the buffer as client B and see that edit as client A.
- editor_b.update(&mut cx_b, |editor, cx| {
+ editor_b.update(cx_b, |editor, cx| {
editor.handle_input(&Input("ok, ".into()), cx)
});
buffer_a
@@ -1173,15 +1171,15 @@ mod tests {
}
#[gpui::test(iterations = 10)]
- async fn test_unshare_project(mut cx_a: TestAppContext, mut cx_b: TestAppContext) {
+ async fn test_unshare_project(cx_a: &mut TestAppContext, cx_b: &mut TestAppContext) {
let lang_registry = Arc::new(LanguageRegistry::new());
let fs = FakeFs::new(cx_a.background());
cx_a.foreground().forbid_parking();
// Connect to a server as 2 clients.
let mut server = TestServer::start(cx_a.foreground(), cx_a.background()).await;
- let client_a = server.create_client(&mut cx_a, "user_a").await;
- let client_b = server.create_client(&mut cx_b, "user_b").await;
+ let client_a = server.create_client(cx_a, "user_a").await;
+ let client_b = server.create_client(cx_b, "user_b").await;
// Share a project as client A
fs.insert_tree(
@@ -1203,21 +1201,18 @@ mod tests {
)
});
let (worktree_a, _) = project_a
- .update(&mut cx_a, |p, cx| {
+ .update(cx_a, |p, cx| {
p.find_or_create_local_worktree("/a", false, cx)
})
.await
.unwrap();
worktree_a
- .read_with(&cx_a, |tree, _| tree.as_local().unwrap().scan_complete())
+ .read_with(cx_a, |tree, _| tree.as_local().unwrap().scan_complete())
.await;
- let project_id = project_a.update(&mut cx_a, |p, _| p.next_remote_id()).await;
- let worktree_id = worktree_a.read_with(&cx_a, |tree, _| tree.id());
- project_a
- .update(&mut cx_a, |p, cx| p.share(cx))
- .await
- .unwrap();
- assert!(worktree_a.read_with(&cx_a, |tree, _| tree.as_local().unwrap().is_shared()));
+ let project_id = project_a.update(cx_a, |p, _| p.next_remote_id()).await;
+ let worktree_id = worktree_a.read_with(cx_a, |tree, _| tree.id());
+ project_a.update(cx_a, |p, cx| p.share(cx)).await.unwrap();
+ assert!(worktree_a.read_with(cx_a, |tree, _| tree.as_local().unwrap().is_shared()));
// Join that project as client B
let project_b = Project::remote(
@@ -1231,29 +1226,31 @@ mod tests {
.await
.unwrap();
project_b
- .update(&mut cx_b, |p, cx| p.open_buffer((worktree_id, "a.txt"), cx))
+ .update(cx_b, |p, cx| p.open_buffer((worktree_id, "a.txt"), cx))
.await
.unwrap();
// Unshare the project as client A
project_a
- .update(&mut cx_a, |project, cx| project.unshare(cx))
+ .update(cx_a, |project, cx| project.unshare(cx))
.await
.unwrap();
project_b
- .condition(&mut cx_b, |project, _| project.is_read_only())
+ .condition(cx_b, |project, _| project.is_read_only())
.await;
- assert!(worktree_a.read_with(&cx_a, |tree, _| !tree.as_local().unwrap().is_shared()));
- drop(project_b);
+ assert!(worktree_a.read_with(cx_a, |tree, _| !tree.as_local().unwrap().is_shared()));
+ cx_b.update(|_| {
+ drop(project_b);
+ });
// Share the project again and ensure guests can still join.
project_a
- .update(&mut cx_a, |project, cx| project.share(cx))
+ .update(cx_a, |project, cx| project.share(cx))
.await
.unwrap();
- assert!(worktree_a.read_with(&cx_a, |tree, _| tree.as_local().unwrap().is_shared()));
+ assert!(worktree_a.read_with(cx_a, |tree, _| tree.as_local().unwrap().is_shared()));
- let project_c = Project::remote(
+ let project_b2 = Project::remote(
project_id,
client_b.clone(),
client_b.user_store.clone(),
@@ -1263,17 +1260,17 @@ mod tests {
)
.await
.unwrap();
- project_c
- .update(&mut cx_b, |p, cx| p.open_buffer((worktree_id, "a.txt"), cx))
+ project_b2
+ .update(cx_b, |p, cx| p.open_buffer((worktree_id, "a.txt"), cx))
.await
.unwrap();
}
#[gpui::test(iterations = 10)]
async fn test_propagate_saves_and_fs_changes(
- mut cx_a: TestAppContext,
- mut cx_b: TestAppContext,
- mut cx_c: TestAppContext,
+ cx_a: &mut TestAppContext,
+ cx_b: &mut TestAppContext,
+ cx_c: &mut TestAppContext,
) {
let lang_registry = Arc::new(LanguageRegistry::new());
let fs = FakeFs::new(cx_a.background());
@@ -1281,9 +1278,9 @@ mod tests {
// Connect to a server as 3 clients.
let mut server = TestServer::start(cx_a.foreground(), cx_a.background()).await;
- let client_a = server.create_client(&mut cx_a, "user_a").await;
- let client_b = server.create_client(&mut cx_b, "user_b").await;
- let client_c = server.create_client(&mut cx_c, "user_c").await;
+ let client_a = server.create_client(cx_a, "user_a").await;
+ let client_b = server.create_client(cx_b, "user_b").await;
+ let client_c = server.create_client(cx_c, "user_c").await;
// Share a worktree as client A.
fs.insert_tree(
@@ -1305,20 +1302,17 @@ mod tests {
)
});
let (worktree_a, _) = project_a
- .update(&mut cx_a, |p, cx| {
+ .update(cx_a, |p, cx| {
p.find_or_create_local_worktree("/a", false, cx)
})
.await
.unwrap();
worktree_a
- .read_with(&cx_a, |tree, _| tree.as_local().unwrap().scan_complete())
+ .read_with(cx_a, |tree, _| tree.as_local().unwrap().scan_complete())
.await;
- let project_id = project_a.update(&mut cx_a, |p, _| p.next_remote_id()).await;
- let worktree_id = worktree_a.read_with(&cx_a, |tree, _| tree.id());
- project_a
- .update(&mut cx_a, |p, cx| p.share(cx))
- .await
- .unwrap();
+ let project_id = project_a.update(cx_a, |p, _| p.next_remote_id()).await;
+ let worktree_id = worktree_a.read_with(cx_a, |tree, _| tree.id());
+ project_a.update(cx_a, |p, cx| p.share(cx)).await.unwrap();
// Join that worktree as clients B and C.
let project_b = Project::remote(
@@ -1341,56 +1335,56 @@ mod tests {
)
.await
.unwrap();
- let worktree_b = project_b.read_with(&cx_b, |p, cx| p.worktrees(cx).next().unwrap());
- let worktree_c = project_c.read_with(&cx_c, |p, cx| p.worktrees(cx).next().unwrap());
+ let worktree_b = project_b.read_with(cx_b, |p, cx| p.worktrees(cx).next().unwrap());
+ let worktree_c = project_c.read_with(cx_c, |p, cx| p.worktrees(cx).next().unwrap());
// Open and edit a buffer as both guests B and C.
let buffer_b = project_b
- .update(&mut cx_b, |p, cx| p.open_buffer((worktree_id, "file1"), cx))
+ .update(cx_b, |p, cx| p.open_buffer((worktree_id, "file1"), cx))
.await
.unwrap();
let buffer_c = project_c
- .update(&mut cx_c, |p, cx| p.open_buffer((worktree_id, "file1"), cx))
+ .update(cx_c, |p, cx| p.open_buffer((worktree_id, "file1"), cx))
.await
.unwrap();
- buffer_b.update(&mut cx_b, |buf, cx| buf.edit([0..0], "i-am-b, ", cx));
- buffer_c.update(&mut cx_c, |buf, cx| buf.edit([0..0], "i-am-c, ", cx));
+ buffer_b.update(cx_b, |buf, cx| buf.edit([0..0], "i-am-b, ", cx));
+ buffer_c.update(cx_c, |buf, cx| buf.edit([0..0], "i-am-c, ", cx));
// Open and edit that buffer as the host.
let buffer_a = project_a
- .update(&mut cx_a, |p, cx| p.open_buffer((worktree_id, "file1"), cx))
+ .update(cx_a, |p, cx| p.open_buffer((worktree_id, "file1"), cx))
.await
.unwrap();
buffer_a
- .condition(&mut cx_a, |buf, _| buf.text() == "i-am-c, i-am-b, ")
+ .condition(cx_a, |buf, _| buf.text() == "i-am-c, i-am-b, ")
.await;
- buffer_a.update(&mut cx_a, |buf, cx| {
+ buffer_a.update(cx_a, |buf, cx| {
buf.edit([buf.len()..buf.len()], "i-am-a", cx)
});
// Wait for edits to propagate
buffer_a
- .condition(&mut cx_a, |buf, _| buf.text() == "i-am-c, i-am-b, i-am-a")
+ .condition(cx_a, |buf, _| buf.text() == "i-am-c, i-am-b, i-am-a")
.await;
buffer_b
- .condition(&mut cx_b, |buf, _| buf.text() == "i-am-c, i-am-b, i-am-a")
+ .condition(cx_b, |buf, _| buf.text() == "i-am-c, i-am-b, i-am-a")
.await;
buffer_c
- .condition(&mut cx_c, |buf, _| buf.text() == "i-am-c, i-am-b, i-am-a")
+ .condition(cx_c, |buf, _| buf.text() == "i-am-c, i-am-b, i-am-a")
.await;
// Edit the buffer as the host and concurrently save as guest B.
- let save_b = buffer_b.update(&mut cx_b, |buf, cx| buf.save(cx));
- buffer_a.update(&mut cx_a, |buf, cx| buf.edit([0..0], "hi-a, ", cx));
+ let save_b = buffer_b.update(cx_b, |buf, cx| buf.save(cx));
+ buffer_a.update(cx_a, |buf, cx| buf.edit([0..0], "hi-a, ", cx));
save_b.await.unwrap();
assert_eq!(
fs.load("/a/file1".as_ref()).await.unwrap(),
"hi-a, i-am-c, i-am-b, i-am-a"
);
- buffer_a.read_with(&cx_a, |buf, _| assert!(!buf.is_dirty()));
- buffer_b.read_with(&cx_b, |buf, _| assert!(!buf.is_dirty()));
- buffer_c.condition(&cx_c, |buf, _| !buf.is_dirty()).await;
+ buffer_a.read_with(cx_a, |buf, _| assert!(!buf.is_dirty()));
+ buffer_b.read_with(cx_b, |buf, _| assert!(!buf.is_dirty()));
+ buffer_c.condition(cx_c, |buf, _| !buf.is_dirty()).await;
// Make changes on host's file system, see those changes on guest worktrees.
fs.rename(
@@ -1450,15 +1444,15 @@ mod tests {
}
#[gpui::test(iterations = 10)]
- async fn test_buffer_conflict_after_save(mut cx_a: TestAppContext, mut cx_b: TestAppContext) {
+ async fn test_buffer_conflict_after_save(cx_a: &mut TestAppContext, cx_b: &mut TestAppContext) {
cx_a.foreground().forbid_parking();
let lang_registry = Arc::new(LanguageRegistry::new());
let fs = FakeFs::new(cx_a.background());
// Connect to a server as 2 clients.
let mut server = TestServer::start(cx_a.foreground(), cx_a.background()).await;
- let client_a = server.create_client(&mut cx_a, "user_a").await;
- let client_b = server.create_client(&mut cx_b, "user_b").await;
+ let client_a = server.create_client(cx_a, "user_a").await;
+ let client_b = server.create_client(cx_b, "user_b").await;
// Share a project as client A
fs.insert_tree(
@@ -1480,20 +1474,17 @@ mod tests {
)
});
let (worktree_a, _) = project_a
- .update(&mut cx_a, |p, cx| {
+ .update(cx_a, |p, cx| {
p.find_or_create_local_worktree("/dir", false, cx)
})
.await
.unwrap();
worktree_a
- .read_with(&cx_a, |tree, _| tree.as_local().unwrap().scan_complete())
+ .read_with(cx_a, |tree, _| tree.as_local().unwrap().scan_complete())
.await;
- let project_id = project_a.update(&mut cx_a, |p, _| p.next_remote_id()).await;
- let worktree_id = worktree_a.read_with(&cx_a, |tree, _| tree.id());
- project_a
- .update(&mut cx_a, |p, cx| p.share(cx))
- .await
- .unwrap();
+ let project_id = project_a.update(cx_a, |p, _| p.next_remote_id()).await;
+ let worktree_id = worktree_a.read_with(cx_a, |tree, _| tree.id());
+ project_a.update(cx_a, |p, cx| p.share(cx)).await.unwrap();
// Join that project as client B
let project_b = Project::remote(
@@ -1509,44 +1500,41 @@ mod tests {
// Open a buffer as client B
let buffer_b = project_b
- .update(&mut cx_b, |p, cx| p.open_buffer((worktree_id, "a.txt"), cx))
+ .update(cx_b, |p, cx| p.open_buffer((worktree_id, "a.txt"), cx))
.await
.unwrap();
- buffer_b.update(&mut cx_b, |buf, cx| buf.edit([0..0], "world ", cx));
- buffer_b.read_with(&cx_b, |buf, _| {
+ buffer_b.update(cx_b, |buf, cx| buf.edit([0..0], "world ", cx));
+ buffer_b.read_with(cx_b, |buf, _| {
assert!(buf.is_dirty());
assert!(!buf.has_conflict());
});
- buffer_b
- .update(&mut cx_b, |buf, cx| buf.save(cx))
- .await
- .unwrap();
+ buffer_b.update(cx_b, |buf, cx| buf.save(cx)).await.unwrap();
buffer_b
.condition(&cx_b, |buffer_b, _| !buffer_b.is_dirty())
.await;
- buffer_b.read_with(&cx_b, |buf, _| {
+ buffer_b.read_with(cx_b, |buf, _| {
assert!(!buf.has_conflict());
});
- buffer_b.update(&mut cx_b, |buf, cx| buf.edit([0..0], "hello ", cx));
- buffer_b.read_with(&cx_b, |buf, _| {
+ buffer_b.update(cx_b, |buf, cx| buf.edit([0..0], "hello ", cx));
+ buffer_b.read_with(cx_b, |buf, _| {
assert!(buf.is_dirty());
assert!(!buf.has_conflict());
});
}
#[gpui::test(iterations = 10)]
- async fn test_buffer_reloading(mut cx_a: TestAppContext, mut cx_b: TestAppContext) {
+ async fn test_buffer_reloading(cx_a: &mut TestAppContext, cx_b: &mut TestAppContext) {
cx_a.foreground().forbid_parking();
let lang_registry = Arc::new(LanguageRegistry::new());
let fs = FakeFs::new(cx_a.background());
// Connect to a server as 2 clients.
let mut server = TestServer::start(cx_a.foreground(), cx_a.background()).await;
- let client_a = server.create_client(&mut cx_a, "user_a").await;
- let client_b = server.create_client(&mut cx_b, "user_b").await;
+ let client_a = server.create_client(cx_a, "user_a").await;
+ let client_b = server.create_client(cx_b, "user_b").await;
// Share a project as client A
fs.insert_tree(
@@ -1568,20 +1556,17 @@ mod tests {
)
});
let (worktree_a, _) = project_a
- .update(&mut cx_a, |p, cx| {
+ .update(cx_a, |p, cx| {
p.find_or_create_local_worktree("/dir", false, cx)
})
.await
.unwrap();
worktree_a
- .read_with(&cx_a, |tree, _| tree.as_local().unwrap().scan_complete())
+ .read_with(cx_a, |tree, _| tree.as_local().unwrap().scan_complete())
.await;
- let project_id = project_a.update(&mut cx_a, |p, _| p.next_remote_id()).await;
- let worktree_id = worktree_a.read_with(&cx_a, |tree, _| tree.id());
- project_a
- .update(&mut cx_a, |p, cx| p.share(cx))
- .await
- .unwrap();
+ let project_id = project_a.update(cx_a, |p, _| p.next_remote_id()).await;
+ let worktree_id = worktree_a.read_with(cx_a, |tree, _| tree.id());
+ project_a.update(cx_a, |p, cx| p.share(cx)).await.unwrap();
// Join that project as client B
let project_b = Project::remote(
@@ -1594,14 +1579,14 @@ mod tests {
)
.await
.unwrap();
- let _worktree_b = project_b.update(&mut cx_b, |p, cx| p.worktrees(cx).next().unwrap());
+ let _worktree_b = project_b.update(cx_b, |p, cx| p.worktrees(cx).next().unwrap());
// Open a buffer as client B
let buffer_b = project_b
- .update(&mut cx_b, |p, cx| p.open_buffer((worktree_id, "a.txt"), cx))
+ .update(cx_b, |p, cx| p.open_buffer((worktree_id, "a.txt"), cx))
.await
.unwrap();
- buffer_b.read_with(&cx_b, |buf, _| {
+ buffer_b.read_with(cx_b, |buf, _| {
assert!(!buf.is_dirty());
assert!(!buf.has_conflict());
});
@@ -1614,15 +1599,15 @@ mod tests {
buf.text() == "new contents" && !buf.is_dirty()
})
.await;
- buffer_b.read_with(&cx_b, |buf, _| {
+ buffer_b.read_with(cx_b, |buf, _| {
assert!(!buf.has_conflict());
});
}
#[gpui::test(iterations = 10)]
async fn test_editing_while_guest_opens_buffer(
- mut cx_a: TestAppContext,
- mut cx_b: TestAppContext,
+ cx_a: &mut TestAppContext,
+ cx_b: &mut TestAppContext,
) {
cx_a.foreground().forbid_parking();
let lang_registry = Arc::new(LanguageRegistry::new());
@@ -1630,8 +1615,8 @@ mod tests {
// Connect to a server as 2 clients.
let mut server = TestServer::start(cx_a.foreground(), cx_a.background()).await;
- let client_a = server.create_client(&mut cx_a, "user_a").await;
- let client_b = server.create_client(&mut cx_b, "user_b").await;
+ let client_a = server.create_client(cx_a, "user_a").await;
+ let client_b = server.create_client(cx_b, "user_b").await;
// Share a project as client A
fs.insert_tree(
@@ -1652,20 +1637,17 @@ mod tests {
)
});
let (worktree_a, _) = project_a
- .update(&mut cx_a, |p, cx| {
+ .update(cx_a, |p, cx| {
p.find_or_create_local_worktree("/dir", false, cx)
})
.await
.unwrap();
worktree_a
- .read_with(&cx_a, |tree, _| tree.as_local().unwrap().scan_complete())
+ .read_with(cx_a, |tree, _| tree.as_local().unwrap().scan_complete())
.await;
- let project_id = project_a.update(&mut cx_a, |p, _| p.next_remote_id()).await;
- let worktree_id = worktree_a.read_with(&cx_a, |tree, _| tree.id());
- project_a
- .update(&mut cx_a, |p, cx| p.share(cx))
- .await
- .unwrap();
+ let project_id = project_a.update(cx_a, |p, _| p.next_remote_id()).await;
+ let worktree_id = worktree_a.read_with(cx_a, |tree, _| tree.id());
+ project_a.update(cx_a, |p, cx| p.share(cx)).await.unwrap();
// Join that project as client B
let project_b = Project::remote(
@@ -1681,30 +1663,30 @@ mod tests {
// Open a buffer as client A
let buffer_a = project_a
- .update(&mut cx_a, |p, cx| p.open_buffer((worktree_id, "a.txt"), cx))
+ .update(cx_a, |p, cx| p.open_buffer((worktree_id, "a.txt"), cx))
.await
.unwrap();
// Start opening the same buffer as client B
let buffer_b = cx_b
.background()
- .spawn(project_b.update(&mut cx_b, |p, cx| p.open_buffer((worktree_id, "a.txt"), cx)));
+ .spawn(project_b.update(cx_b, |p, cx| p.open_buffer((worktree_id, "a.txt"), cx)));
// Edit the buffer as client A while client B is still opening it.
cx_b.background().simulate_random_delay().await;
- buffer_a.update(&mut cx_a, |buf, cx| buf.edit([0..0], "X", cx));
+ buffer_a.update(cx_a, |buf, cx| buf.edit([0..0], "X", cx));
cx_b.background().simulate_random_delay().await;
- buffer_a.update(&mut cx_a, |buf, cx| buf.edit([1..1], "Y", cx));
+ buffer_a.update(cx_a, |buf, cx| buf.edit([1..1], "Y", cx));
- let text = buffer_a.read_with(&cx_a, |buf, _| buf.text());
+ let text = buffer_a.read_with(cx_a, |buf, _| buf.text());
let buffer_b = buffer_b.await.unwrap();
buffer_b.condition(&cx_b, |buf, _| buf.text() == text).await;
}
#[gpui::test(iterations = 10)]
async fn test_leaving_worktree_while_opening_buffer(
- mut cx_a: TestAppContext,
- mut cx_b: TestAppContext,
+ cx_a: &mut TestAppContext,
+ cx_b: &mut TestAppContext,
) {
cx_a.foreground().forbid_parking();
let lang_registry = Arc::new(LanguageRegistry::new());
@@ -1712,8 +1694,8 @@ mod tests {
// Connect to a server as 2 clients.
let mut server = TestServer::start(cx_a.foreground(), cx_a.background()).await;
- let client_a = server.create_client(&mut cx_a, "user_a").await;
- let client_b = server.create_client(&mut cx_b, "user_b").await;
+ let client_a = server.create_client(cx_a, "user_a").await;
+ let client_b = server.create_client(cx_b, "user_b").await;
// Share a project as client A
fs.insert_tree(
@@ -1734,20 +1716,17 @@ mod tests {
)
});
let (worktree_a, _) = project_a
- .update(&mut cx_a, |p, cx| {
+ .update(cx_a, |p, cx| {
p.find_or_create_local_worktree("/dir", false, cx)
})
.await
.unwrap();
worktree_a
- .read_with(&cx_a, |tree, _| tree.as_local().unwrap().scan_complete())
+ .read_with(cx_a, |tree, _| tree.as_local().unwrap().scan_complete())
.await;
- let project_id = project_a.update(&mut cx_a, |p, _| p.next_remote_id()).await;
- let worktree_id = worktree_a.read_with(&cx_a, |tree, _| tree.id());
- project_a
- .update(&mut cx_a, |p, cx| p.share(cx))
- .await
- .unwrap();
+ let project_id = project_a.update(cx_a, |p, _| p.next_remote_id()).await;
+ let worktree_id = worktree_a.read_with(cx_a, |tree, _| tree.id());
+ project_a.update(cx_a, |p, cx| p.share(cx)).await.unwrap();
// Join that project as client B
let project_b = Project::remote(
@@ -1769,7 +1748,7 @@ mod tests {
// Begin opening a buffer as client B, but leave the project before the open completes.
let buffer_b = cx_b
.background()
- .spawn(project_b.update(&mut cx_b, |p, cx| p.open_buffer((worktree_id, "a.txt"), cx)));
+ .spawn(project_b.update(cx_b, |p, cx| p.open_buffer((worktree_id, "a.txt"), cx)));
cx_b.update(|_| drop(project_b));
drop(buffer_b);
@@ -1780,15 +1759,15 @@ mod tests {
}
#[gpui::test(iterations = 10)]
- async fn test_peer_disconnection(mut cx_a: TestAppContext, mut cx_b: TestAppContext) {
+ async fn test_peer_disconnection(cx_a: &mut TestAppContext, cx_b: &mut TestAppContext) {
cx_a.foreground().forbid_parking();
let lang_registry = Arc::new(LanguageRegistry::new());
let fs = FakeFs::new(cx_a.background());
// Connect to a server as 2 clients.
let mut server = TestServer::start(cx_a.foreground(), cx_a.background()).await;
- let client_a = server.create_client(&mut cx_a, "user_a").await;
- let client_b = server.create_client(&mut cx_b, "user_b").await;
+ let client_a = server.create_client(cx_a, "user_a").await;
+ let client_b = server.create_client(cx_b, "user_b").await;
// Share a project as client A
fs.insert_tree(
@@ -1810,19 +1789,19 @@ mod tests {
)
});
let (worktree_a, _) = project_a
- .update(&mut cx_a, |p, cx| {
+ .update(cx_a, |p, cx| {
p.find_or_create_local_worktree("/a", false, cx)
})
.await
.unwrap();
worktree_a
- .read_with(&cx_a, |tree, _| tree.as_local().unwrap().scan_complete())
+ .read_with(cx_a, |tree, _| tree.as_local().unwrap().scan_complete())
.await;
let project_id = project_a
- .update(&mut cx_a, |project, _| project.next_remote_id())
+ .update(cx_a, |project, _| project.next_remote_id())
.await;
project_a
- .update(&mut cx_a, |project, cx| project.share(cx))
+ .update(cx_a, |project, cx| project.share(cx))
.await
.unwrap();
@@ -1852,8 +1831,8 @@ mod tests {
#[gpui::test(iterations = 10)]
async fn test_collaborating_with_diagnostics(
- mut cx_a: TestAppContext,
- mut cx_b: TestAppContext,
+ cx_a: &mut TestAppContext,
+ cx_b: &mut TestAppContext,
) {
cx_a.foreground().forbid_parking();
let mut lang_registry = Arc::new(LanguageRegistry::new());
@@ -1875,8 +1854,8 @@ mod tests {
// Connect to a server as 2 clients.
let mut server = TestServer::start(cx_a.foreground(), cx_a.background()).await;
- let client_a = server.create_client(&mut cx_a, "user_a").await;
- let client_b = server.create_client(&mut cx_b, "user_b").await;
+ let client_a = server.create_client(cx_a, "user_a").await;
+ let client_b = server.create_client(cx_b, "user_b").await;
// Share a project as client A
fs.insert_tree(
@@ -1898,25 +1877,22 @@ mod tests {
)
});
let (worktree_a, _) = project_a
- .update(&mut cx_a, |p, cx| {
+ .update(cx_a, |p, cx| {
p.find_or_create_local_worktree("/a", false, cx)
})
.await
.unwrap();
worktree_a
- .read_with(&cx_a, |tree, _| tree.as_local().unwrap().scan_complete())
+ .read_with(cx_a, |tree, _| tree.as_local().unwrap().scan_complete())
.await;
- let project_id = project_a.update(&mut cx_a, |p, _| p.next_remote_id()).await;
- let worktree_id = worktree_a.read_with(&cx_a, |tree, _| tree.id());
- project_a
- .update(&mut cx_a, |p, cx| p.share(cx))
- .await
- .unwrap();
+ let project_id = project_a.update(cx_a, |p, _| p.next_remote_id()).await;
+ let worktree_id = worktree_a.read_with(cx_a, |tree, _| tree.id());
+ project_a.update(cx_a, |p, cx| p.share(cx)).await.unwrap();
// Cause the language server to start.
let _ = cx_a
.background()
- .spawn(project_a.update(&mut cx_a, |project, cx| {
+ .spawn(project_a.update(cx_a, |project, cx| {
project.open_buffer(
ProjectPath {
worktree_id,
@@ -1972,7 +1948,7 @@ mod tests {
.await
.unwrap();
- project_b.read_with(&cx_b, |project, cx| {
+ project_b.read_with(cx_b, |project, cx| {
assert_eq!(
project.diagnostic_summaries(cx).collect::<Vec<_>>(),
&[(
@@ -2035,11 +2011,11 @@ mod tests {
// Open the file with the errors on client B. They should be present.
let buffer_b = cx_b
.background()
- .spawn(project_b.update(&mut cx_b, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx)))
+ .spawn(project_b.update(cx_b, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx)))
.await
.unwrap();
- buffer_b.read_with(&cx_b, |buffer, _| {
+ buffer_b.read_with(cx_b, |buffer, _| {
assert_eq!(
buffer
.snapshot()
@@ -2074,8 +2050,8 @@ mod tests {
#[gpui::test(iterations = 10)]
async fn test_collaborating_with_completion(
- mut cx_a: TestAppContext,
- mut cx_b: TestAppContext,
+ cx_a: &mut TestAppContext,
+ cx_b: &mut TestAppContext,
) {
cx_a.foreground().forbid_parking();
let mut lang_registry = Arc::new(LanguageRegistry::new());
@@ -2104,8 +2080,8 @@ mod tests {
// Connect to a server as 2 clients.
let mut server = TestServer::start(cx_a.foreground(), cx_a.background()).await;
- let client_a = server.create_client(&mut cx_a, "user_a").await;
- let client_b = server.create_client(&mut cx_b, "user_b").await;
+ let client_a = server.create_client(cx_a, "user_a").await;
+ let client_b = server.create_client(cx_b, "user_b").await;
// Share a project as client A
fs.insert_tree(
@@ -2127,20 +2103,17 @@ mod tests {
)
});
let (worktree_a, _) = project_a
- .update(&mut cx_a, |p, cx| {
+ .update(cx_a, |p, cx| {
p.find_or_create_local_worktree("/a", false, cx)
})
.await
.unwrap();
worktree_a
- .read_with(&cx_a, |tree, _| tree.as_local().unwrap().scan_complete())
+ .read_with(cx_a, |tree, _| tree.as_local().unwrap().scan_complete())
.await;
- let project_id = project_a.update(&mut cx_a, |p, _| p.next_remote_id()).await;
- let worktree_id = worktree_a.read_with(&cx_a, |tree, _| tree.id());
- project_a
- .update(&mut cx_a, |p, cx| p.share(cx))
- .await
- .unwrap();
+ let project_id = project_a.update(cx_a, |p, _| p.next_remote_id()).await;
+ let worktree_id = worktree_a.read_with(cx_a, |tree, _| tree.id());
+ project_a.update(cx_a, |p, cx| p.share(cx)).await.unwrap();
// Join the worktree as client B.
let project_b = Project::remote(
@@ -2156,9 +2129,7 @@ mod tests {
// Open a file in an editor as the guest.
let buffer_b = project_b
- .update(&mut cx_b, |p, cx| {
- p.open_buffer((worktree_id, "main.rs"), cx)
- })
+ .update(cx_b, |p, cx| p.open_buffer((worktree_id, "main.rs"), cx))
.await
.unwrap();
let (window_b, _) = cx_b.add_window(|_| EmptyView);
@@ -2177,7 +2148,7 @@ mod tests {
.await;
// Type a completion trigger character as the guest.
- editor_b.update(&mut cx_b, |editor, cx| {
+ editor_b.update(cx_b, |editor, cx| {
editor.select_ranges([13..13], None, cx);
editor.handle_input(&Input(".".into()), cx);
cx.focus(&editor_b);
@@ -2233,9 +2204,7 @@ mod tests {
// Open the buffer on the host.
let buffer_a = project_a
- .update(&mut cx_a, |p, cx| {
- p.open_buffer((worktree_id, "main.rs"), cx)
- })
+ .update(cx_a, |p, cx| p.open_buffer((worktree_id, "main.rs"), cx))
.await
.unwrap();
buffer_a
@@ -2246,7 +2215,7 @@ mod tests {
editor_b
.condition(&cx_b, |editor, _| editor.context_menu_visible())
.await;
- editor_b.update(&mut cx_b, |editor, cx| {
+ editor_b.update(cx_b, |editor, cx| {
editor.confirm_completion(&ConfirmCompletion(Some(0)), cx);
assert_eq!(editor.text(cx), "fn main() { a.first_method() }");
});
@@ -2290,7 +2259,7 @@ mod tests {
}
#[gpui::test(iterations = 10)]
- async fn test_formatting_buffer(mut cx_a: TestAppContext, mut cx_b: TestAppContext) {
+ async fn test_formatting_buffer(cx_a: &mut TestAppContext, cx_b: &mut TestAppContext) {
cx_a.foreground().forbid_parking();
let mut lang_registry = Arc::new(LanguageRegistry::new());
let fs = FakeFs::new(cx_a.background());
@@ -2311,8 +2280,8 @@ mod tests {
// Connect to a server as 2 clients.
let mut server = TestServer::start(cx_a.foreground(), cx_a.background()).await;
- let client_a = server.create_client(&mut cx_a, "user_a").await;
- let client_b = server.create_client(&mut cx_b, "user_b").await;
+ let client_a = server.create_client(cx_a, "user_a").await;
+ let client_b = server.create_client(cx_b, "user_b").await;
// Share a project as client A
fs.insert_tree(
@@ -2333,20 +2302,17 @@ mod tests {
)
});
let (worktree_a, _) = project_a
- .update(&mut cx_a, |p, cx| {
+ .update(cx_a, |p, cx| {
p.find_or_create_local_worktree("/a", false, cx)
})
.await
.unwrap();
worktree_a
- .read_with(&cx_a, |tree, _| tree.as_local().unwrap().scan_complete())
+ .read_with(cx_a, |tree, _| tree.as_local().unwrap().scan_complete())
.await;
- let project_id = project_a.update(&mut cx_a, |p, _| p.next_remote_id()).await;
- let worktree_id = worktree_a.read_with(&cx_a, |tree, _| tree.id());
- project_a
- .update(&mut cx_a, |p, cx| p.share(cx))
- .await
- .unwrap();
+ let project_id = project_a.update(cx_a, |p, _| p.next_remote_id()).await;
+ let worktree_id = worktree_a.read_with(cx_a, |tree, _| tree.id());
+ project_a.update(cx_a, |p, cx| p.share(cx)).await.unwrap();
// Join the worktree as client B.
let project_b = Project::remote(
@@ -2362,11 +2328,11 @@ mod tests {
let buffer_b = cx_b
.background()
- .spawn(project_b.update(&mut cx_b, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx)))
+ .spawn(project_b.update(cx_b, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx)))
.await
.unwrap();
- let format = project_b.update(&mut cx_b, |project, cx| {
+ let format = project_b.update(cx_b, |project, cx| {
project.format(HashSet::from_iter([buffer_b.clone()]), true, cx)
});
@@ -2386,13 +2352,13 @@ mod tests {
format.await.unwrap();
assert_eq!(
- buffer_b.read_with(&cx_b, |buffer, _| buffer.text()),
+ buffer_b.read_with(cx_b, |buffer, _| buffer.text()),
"let honey = two"
);
}
#[gpui::test(iterations = 10)]
- async fn test_definition(mut cx_a: TestAppContext, mut cx_b: TestAppContext) {
+ async fn test_definition(cx_a: &mut TestAppContext, cx_b: &mut TestAppContext) {
cx_a.foreground().forbid_parking();
let mut lang_registry = Arc::new(LanguageRegistry::new());
let fs = FakeFs::new(cx_a.background());
@@ -2428,8 +2394,8 @@ mod tests {
// Connect to a server as 2 clients.
let mut server = TestServer::start(cx_a.foreground(), cx_a.background()).await;
- let client_a = server.create_client(&mut cx_a, "user_a").await;
- let client_b = server.create_client(&mut cx_b, "user_b").await;
+ let client_a = server.create_client(cx_a, "user_a").await;
+ let client_b = server.create_client(cx_b, "user_b").await;
// Share a project as client A
let project_a = cx_a.update(|cx| {
@@ -2442,20 +2408,17 @@ mod tests {
)
});
let (worktree_a, _) = project_a
- .update(&mut cx_a, |p, cx| {
+ .update(cx_a, |p, cx| {
p.find_or_create_local_worktree("/root-1", false, cx)
})
.await
.unwrap();
worktree_a
- .read_with(&cx_a, |tree, _| tree.as_local().unwrap().scan_complete())
+ .read_with(cx_a, |tree, _| tree.as_local().unwrap().scan_complete())
.await;
- let project_id = project_a.update(&mut cx_a, |p, _| p.next_remote_id()).await;
- let worktree_id = worktree_a.read_with(&cx_a, |tree, _| tree.id());
- project_a
- .update(&mut cx_a, |p, cx| p.share(cx))
- .await
- .unwrap();
+ let project_id = project_a.update(cx_a, |p, _| p.next_remote_id()).await;
+ let worktree_id = worktree_a.read_with(cx_a, |tree, _| tree.id());
+ project_a.update(cx_a, |p, cx| p.share(cx)).await.unwrap();
// Join the worktree as client B.
let project_b = Project::remote(
@@ -2472,12 +2435,12 @@ mod tests {
// Open the file on client B.
let buffer_b = cx_b
.background()
- .spawn(project_b.update(&mut cx_b, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx)))
+ .spawn(project_b.update(cx_b, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx)))
.await
.unwrap();
// Request the definition of a symbol as the guest.
- let definitions_1 = project_b.update(&mut cx_b, |p, cx| p.definition(&buffer_b, 23, cx));
+ let definitions_1 = project_b.update(cx_b, |p, cx| p.definition(&buffer_b, 23, cx));
let mut fake_language_server = fake_language_servers.next().await.unwrap();
fake_language_server.handle_request::<lsp::request::GotoDefinition, _>(|_, _| {
@@ -2504,7 +2467,7 @@ mod tests {
// Try getting more definitions for the same buffer, ensuring the buffer gets reused from
// the previous call to `definition`.
- let definitions_2 = project_b.update(&mut cx_b, |p, cx| p.definition(&buffer_b, 33, cx));
+ let definitions_2 = project_b.update(cx_b, |p, cx| p.definition(&buffer_b, 33, cx));
fake_language_server.handle_request::<lsp::request::GotoDefinition, _>(|_, _| {
Some(lsp::GotoDefinitionResponse::Scalar(lsp::Location::new(
lsp::Url::from_file_path("/root-2/b.rs").unwrap(),
@@ -2530,7 +2493,7 @@ mod tests {
}
#[gpui::test(iterations = 10)]
- async fn test_references(mut cx_a: TestAppContext, mut cx_b: TestAppContext) {
+ async fn test_references(cx_a: &mut TestAppContext, cx_b: &mut TestAppContext) {
cx_a.foreground().forbid_parking();
let mut lang_registry = Arc::new(LanguageRegistry::new());
let fs = FakeFs::new(cx_a.background());
@@ -296,7 +296,23 @@ impl Store {
}
}
- Ok(e.remove())
+ let project = e.remove();
+
+ if let Some(host_connection) = self.connections.get_mut(&connection_id) {
+ host_connection.projects.remove(&project_id);
+ }
+
+ if let Some(share) = &project.share {
+ for guest_connection in share.guests.keys() {
+ if let Some(connection) = self.connections.get_mut(&guest_connection) {
+ connection.projects.remove(&project_id);
+ }
+ }
+ }
+
+ #[cfg(test)]
+ self.check_invariants();
+ Ok(project)
} else {
Err(anyhow!("no such project"))?
}
@@ -152,7 +152,7 @@ mod tests {
};
#[gpui::test]
- async fn test_open_paths_action(mut cx: TestAppContext) {
+ async fn test_open_paths_action(cx: &mut TestAppContext) {
let app_state = cx.update(test_app_state);
let dir = temp_tree(json!({
"a": {
@@ -186,7 +186,7 @@ mod tests {
.await;
assert_eq!(cx.window_ids().len(), 1);
let workspace_1 = cx.root_view::<Workspace>(cx.window_ids()[0]).unwrap();
- workspace_1.read_with(&cx, |workspace, cx| {
+ workspace_1.read_with(cx, |workspace, cx| {
assert_eq!(workspace.worktrees(cx).count(), 2)
});
@@ -205,7 +205,7 @@ mod tests {
}
#[gpui::test]
- async fn test_new_empty_workspace(mut cx: TestAppContext) {
+ async fn test_new_empty_workspace(cx: &mut TestAppContext) {
let app_state = cx.update(test_app_state);
cx.update(|cx| {
workspace::init(cx);
@@ -213,7 +213,7 @@ mod tests {
cx.dispatch_global_action(workspace::OpenNew(app_state.clone()));
let window_id = *cx.window_ids().first().unwrap();
let workspace = cx.root_view::<Workspace>(window_id).unwrap();
- let editor = workspace.update(&mut cx, |workspace, cx| {
+ let editor = workspace.update(cx, |workspace, cx| {
workspace
.active_item(cx)
.unwrap()
@@ -221,22 +221,22 @@ mod tests {
.unwrap()
});
- editor.update(&mut cx, |editor, cx| {
+ editor.update(cx, |editor, cx| {
assert!(editor.text(cx).is_empty());
});
- let save_task = workspace.update(&mut cx, |workspace, cx| workspace.save_active_item(cx));
+ let save_task = workspace.update(cx, |workspace, cx| workspace.save_active_item(cx));
app_state.fs.as_fake().insert_dir("/root").await;
cx.simulate_new_path_selection(|_| Some(PathBuf::from("/root/the-new-name")));
save_task.await.unwrap();
- editor.read_with(&cx, |editor, cx| {
+ editor.read_with(cx, |editor, cx| {
assert!(!editor.is_dirty(cx));
assert_eq!(editor.title(cx), "the-new-name");
});
}
#[gpui::test]
- async fn test_open_entry(mut cx: TestAppContext) {
+ async fn test_open_entry(cx: &mut TestAppContext) {
let app_state = cx.update(test_app_state);
app_state
.fs
@@ -256,7 +256,7 @@ mod tests {
let (_, workspace) = cx.add_window(|cx| Workspace::new(¶ms, cx));
params
.project
- .update(&mut cx, |project, cx| {
+ .update(cx, |project, cx| {
project.find_or_create_local_worktree("/root", false, cx)
})
.await
@@ -271,7 +271,7 @@ mod tests {
// Open the first entry
let entry_1 = workspace
- .update(&mut cx, |w, cx| w.open_path(file1.clone(), cx))
+ .update(cx, |w, cx| w.open_path(file1.clone(), cx))
.await
.unwrap();
cx.read(|cx| {
@@ -285,7 +285,7 @@ mod tests {
// Open the second entry
workspace
- .update(&mut cx, |w, cx| w.open_path(file2.clone(), cx))
+ .update(cx, |w, cx| w.open_path(file2.clone(), cx))
.await
.unwrap();
cx.read(|cx| {
@@ -299,7 +299,7 @@ mod tests {
// Open the first entry again. The existing pane item is activated.
let entry_1b = workspace
- .update(&mut cx, |w, cx| w.open_path(file1.clone(), cx))
+ .update(cx, |w, cx| w.open_path(file1.clone(), cx))
.await
.unwrap();
assert_eq!(entry_1.id(), entry_1b.id());
@@ -315,14 +315,14 @@ mod tests {
// Split the pane with the first entry, then open the second entry again.
workspace
- .update(&mut cx, |w, cx| {
+ .update(cx, |w, cx| {
w.split_pane(w.active_pane().clone(), SplitDirection::Right, cx);
w.open_path(file2.clone(), cx)
})
.await
.unwrap();
- workspace.read_with(&cx, |w, cx| {
+ workspace.read_with(cx, |w, cx| {
assert_eq!(
w.active_pane()
.read(cx)
@@ -334,7 +334,7 @@ mod tests {
});
// Open the third entry twice concurrently. Only one pane item is added.
- let (t1, t2) = workspace.update(&mut cx, |w, cx| {
+ let (t1, t2) = workspace.update(cx, |w, cx| {
(
w.open_path(file3.clone(), cx),
w.open_path(file3.clone(), cx),
@@ -357,7 +357,7 @@ mod tests {
}
#[gpui::test]
- async fn test_open_paths(mut cx: TestAppContext) {
+ async fn test_open_paths(cx: &mut TestAppContext) {
let app_state = cx.update(test_app_state);
let fs = app_state.fs.as_fake();
fs.insert_dir("/dir1").await;
@@ -369,7 +369,7 @@ mod tests {
let (_, workspace) = cx.add_window(|cx| Workspace::new(¶ms, cx));
params
.project
- .update(&mut cx, |project, cx| {
+ .update(cx, |project, cx| {
project.find_or_create_local_worktree("/dir1", false, cx)
})
.await
@@ -435,7 +435,7 @@ mod tests {
}
#[gpui::test]
- async fn test_save_conflicting_item(mut cx: TestAppContext) {
+ async fn test_save_conflicting_item(cx: &mut TestAppContext) {
let app_state = cx.update(test_app_state);
let fs = app_state.fs.as_fake();
fs.insert_tree("/root", json!({ "a.txt": "" })).await;
@@ -444,7 +444,7 @@ mod tests {
let (window_id, workspace) = cx.add_window(|cx| Workspace::new(¶ms, cx));
params
.project
- .update(&mut cx, |project, cx| {
+ .update(cx, |project, cx| {
project.find_or_create_local_worktree("/root", false, cx)
})
.await
@@ -474,24 +474,24 @@ mod tests {
.await;
cx.read(|cx| assert!(editor.is_dirty(cx)));
- let save_task = workspace.update(&mut cx, |workspace, cx| workspace.save_active_item(cx));
+ let save_task = workspace.update(cx, |workspace, cx| workspace.save_active_item(cx));
cx.simulate_prompt_answer(window_id, 0);
save_task.await.unwrap();
- editor.read_with(&cx, |editor, cx| {
+ editor.read_with(cx, |editor, cx| {
assert!(!editor.is_dirty(cx));
assert!(!editor.has_conflict(cx));
});
}
#[gpui::test]
- async fn test_open_and_save_new_file(mut cx: TestAppContext) {
+ async fn test_open_and_save_new_file(cx: &mut TestAppContext) {
let app_state = cx.update(test_app_state);
app_state.fs.as_fake().insert_dir("/root").await;
let params = cx.update(|cx| WorkspaceParams::local(&app_state, cx));
let (window_id, workspace) = cx.add_window(|cx| Workspace::new(¶ms, cx));
params
.project
- .update(&mut cx, |project, cx| {
+ .update(cx, |project, cx| {
project.find_or_create_local_worktree("/root", false, cx)
})
.await
@@ -500,7 +500,7 @@ mod tests {
// Create a new untitled buffer
cx.dispatch_action(window_id, vec![workspace.id()], OpenNew(app_state.clone()));
- let editor = workspace.read_with(&cx, |workspace, cx| {
+ let editor = workspace.read_with(cx, |workspace, cx| {
workspace
.active_item(cx)
.unwrap()
@@ -508,7 +508,7 @@ mod tests {
.unwrap()
});
- editor.update(&mut cx, |editor, cx| {
+ editor.update(cx, |editor, cx| {
assert!(!editor.is_dirty(cx));
assert_eq!(editor.title(cx), "untitled");
assert!(Arc::ptr_eq(
@@ -520,7 +520,7 @@ mod tests {
});
// Save the buffer. This prompts for a filename.
- let save_task = workspace.update(&mut cx, |workspace, cx| workspace.save_active_item(cx));
+ let save_task = workspace.update(cx, |workspace, cx| workspace.save_active_item(cx));
cx.simulate_new_path_selection(|parent_dir| {
assert_eq!(parent_dir, Path::new("/root"));
Some(parent_dir.join("the-new-name.rs"))
@@ -533,21 +533,21 @@ mod tests {
// When the save completes, the buffer's title is updated and the language is assigned based
// on the path.
save_task.await.unwrap();
- editor.read_with(&cx, |editor, cx| {
+ editor.read_with(cx, |editor, cx| {
assert!(!editor.is_dirty(cx));
assert_eq!(editor.title(cx), "the-new-name.rs");
assert_eq!(editor.language(cx).unwrap().name().as_ref(), "Rust");
});
// Edit the file and save it again. This time, there is no filename prompt.
- editor.update(&mut cx, |editor, cx| {
+ editor.update(cx, |editor, cx| {
editor.handle_input(&editor::Input(" there".into()), cx);
assert_eq!(editor.is_dirty(cx.as_ref()), true);
});
- let save_task = workspace.update(&mut cx, |workspace, cx| workspace.save_active_item(cx));
+ let save_task = workspace.update(cx, |workspace, cx| workspace.save_active_item(cx));
save_task.await.unwrap();
assert!(!cx.did_prompt_for_new_path());
- editor.read_with(&cx, |editor, cx| {
+ editor.read_with(cx, |editor, cx| {
assert!(!editor.is_dirty(cx));
assert_eq!(editor.title(cx), "the-new-name.rs")
});
@@ -556,7 +556,7 @@ mod tests {
// the same buffer.
cx.dispatch_action(window_id, vec![workspace.id()], OpenNew(app_state.clone()));
workspace
- .update(&mut cx, |workspace, cx| {
+ .update(cx, |workspace, cx| {
workspace.split_pane(workspace.active_pane().clone(), SplitDirection::Right, cx);
workspace.open_path(
ProjectPath {
@@ -568,7 +568,7 @@ mod tests {
})
.await
.unwrap();
- let editor2 = workspace.update(&mut cx, |workspace, cx| {
+ let editor2 = workspace.update(cx, |workspace, cx| {
workspace
.active_item(cx)
.unwrap()
@@ -584,7 +584,7 @@ mod tests {
}
#[gpui::test]
- async fn test_setting_language_when_saving_as_single_file_worktree(mut cx: TestAppContext) {
+ async fn test_setting_language_when_saving_as_single_file_worktree(cx: &mut TestAppContext) {
let app_state = cx.update(test_app_state);
app_state.fs.as_fake().insert_dir("/root").await;
let params = cx.update(|cx| WorkspaceParams::local(&app_state, cx));
@@ -592,7 +592,7 @@ mod tests {
// Create a new untitled buffer
cx.dispatch_action(window_id, vec![workspace.id()], OpenNew(app_state.clone()));
- let editor = workspace.read_with(&cx, |workspace, cx| {
+ let editor = workspace.read_with(cx, |workspace, cx| {
workspace
.active_item(cx)
.unwrap()
@@ -600,7 +600,7 @@ mod tests {
.unwrap()
});
- editor.update(&mut cx, |editor, cx| {
+ editor.update(cx, |editor, cx| {
assert!(Arc::ptr_eq(
editor.language(cx).unwrap(),
&language::PLAIN_TEXT
@@ -610,18 +610,18 @@ mod tests {
});
// Save the buffer. This prompts for a filename.
- let save_task = workspace.update(&mut cx, |workspace, cx| workspace.save_active_item(cx));
+ let save_task = workspace.update(cx, |workspace, cx| workspace.save_active_item(cx));
cx.simulate_new_path_selection(|_| Some(PathBuf::from("/root/the-new-name.rs")));
save_task.await.unwrap();
// The buffer is not dirty anymore and the language is assigned based on the path.
- editor.read_with(&cx, |editor, cx| {
+ editor.read_with(cx, |editor, cx| {
assert!(!editor.is_dirty(cx));
assert_eq!(editor.language(cx).unwrap().name().as_ref(), "Rust")
});
}
#[gpui::test]
- async fn test_pane_actions(mut cx: TestAppContext) {
+ async fn test_pane_actions(cx: &mut TestAppContext) {
cx.update(|cx| pane::init(cx));
let app_state = cx.update(test_app_state);
app_state
@@ -643,7 +643,7 @@ mod tests {
let (window_id, workspace) = cx.add_window(|cx| Workspace::new(¶ms, cx));
params
.project
- .update(&mut cx, |project, cx| {
+ .update(cx, |project, cx| {
project.find_or_create_local_worktree("/root", false, cx)
})
.await
@@ -656,7 +656,7 @@ mod tests {
let pane_1 = cx.read(|cx| workspace.read(cx).active_pane().clone());
workspace
- .update(&mut cx, |w, cx| w.open_path(file1.clone(), cx))
+ .update(cx, |w, cx| w.open_path(file1.clone(), cx))
.await
.unwrap();
cx.read(|cx| {
@@ -686,7 +686,7 @@ mod tests {
}
#[gpui::test]
- async fn test_navigation(mut cx: TestAppContext) {
+ async fn test_navigation(cx: &mut TestAppContext) {
let app_state = cx.update(test_app_state);
app_state
.fs
@@ -706,7 +706,7 @@ mod tests {
let (_, workspace) = cx.add_window(|cx| Workspace::new(¶ms, cx));
params
.project
- .update(&mut cx, |project, cx| {
+ .update(cx, |project, cx| {
project.find_or_create_local_worktree("/root", false, cx)
})
.await
@@ -719,110 +719,94 @@ mod tests {
let file3 = entries[2].clone();
let editor1 = workspace
- .update(&mut cx, |w, cx| w.open_path(file1.clone(), cx))
+ .update(cx, |w, cx| w.open_path(file1.clone(), cx))
.await
.unwrap()
.downcast::<Editor>()
.unwrap();
- editor1.update(&mut cx, |editor, cx| {
+ editor1.update(cx, |editor, cx| {
editor.select_display_ranges(&[DisplayPoint::new(10, 0)..DisplayPoint::new(10, 0)], cx);
});
let editor2 = workspace
- .update(&mut cx, |w, cx| w.open_path(file2.clone(), cx))
+ .update(cx, |w, cx| w.open_path(file2.clone(), cx))
.await
.unwrap()
.downcast::<Editor>()
.unwrap();
let editor3 = workspace
- .update(&mut cx, |w, cx| w.open_path(file3.clone(), cx))
+ .update(cx, |w, cx| w.open_path(file3.clone(), cx))
.await
.unwrap()
.downcast::<Editor>()
.unwrap();
- editor3.update(&mut cx, |editor, cx| {
+ editor3.update(cx, |editor, cx| {
editor.select_display_ranges(&[DisplayPoint::new(15, 0)..DisplayPoint::new(15, 0)], cx);
});
assert_eq!(
- active_location(&workspace, &mut cx),
+ active_location(&workspace, cx),
(file3.clone(), DisplayPoint::new(15, 0))
);
- workspace
- .update(&mut cx, |w, cx| Pane::go_back(w, cx))
- .await;
+ workspace.update(cx, |w, cx| Pane::go_back(w, cx)).await;
assert_eq!(
- active_location(&workspace, &mut cx),
+ active_location(&workspace, cx),
(file3.clone(), DisplayPoint::new(0, 0))
);
- workspace
- .update(&mut cx, |w, cx| Pane::go_back(w, cx))
- .await;
+ workspace.update(cx, |w, cx| Pane::go_back(w, cx)).await;
assert_eq!(
- active_location(&workspace, &mut cx),
+ active_location(&workspace, cx),
(file2.clone(), DisplayPoint::new(0, 0))
);
- workspace
- .update(&mut cx, |w, cx| Pane::go_back(w, cx))
- .await;
+ workspace.update(cx, |w, cx| Pane::go_back(w, cx)).await;
assert_eq!(
- active_location(&workspace, &mut cx),
+ active_location(&workspace, cx),
(file1.clone(), DisplayPoint::new(10, 0))
);
- workspace
- .update(&mut cx, |w, cx| Pane::go_back(w, cx))
- .await;
+ workspace.update(cx, |w, cx| Pane::go_back(w, cx)).await;
assert_eq!(
- active_location(&workspace, &mut cx),
+ active_location(&workspace, cx),
(file1.clone(), DisplayPoint::new(0, 0))
);
// Go back one more time and ensure we don't navigate past the first item in the history.
- workspace
- .update(&mut cx, |w, cx| Pane::go_back(w, cx))
- .await;
+ workspace.update(cx, |w, cx| Pane::go_back(w, cx)).await;
assert_eq!(
- active_location(&workspace, &mut cx),
+ active_location(&workspace, cx),
(file1.clone(), DisplayPoint::new(0, 0))
);
- workspace
- .update(&mut cx, |w, cx| Pane::go_forward(w, cx))
- .await;
+ workspace.update(cx, |w, cx| Pane::go_forward(w, cx)).await;
assert_eq!(
- active_location(&workspace, &mut cx),
+ active_location(&workspace, cx),
(file1.clone(), DisplayPoint::new(10, 0))
);
- workspace
- .update(&mut cx, |w, cx| Pane::go_forward(w, cx))
- .await;
+ workspace.update(cx, |w, cx| Pane::go_forward(w, cx)).await;
assert_eq!(
- active_location(&workspace, &mut cx),
+ active_location(&workspace, cx),
(file2.clone(), DisplayPoint::new(0, 0))
);
// Go forward to an item that has been closed, ensuring it gets re-opened at the same
// location.
- workspace.update(&mut cx, |workspace, cx| {
+ workspace.update(cx, |workspace, cx| {
workspace
.active_pane()
.update(cx, |pane, cx| pane.close_item(editor3.id(), cx));
drop(editor3);
});
- workspace
- .update(&mut cx, |w, cx| Pane::go_forward(w, cx))
- .await;
+ workspace.update(cx, |w, cx| Pane::go_forward(w, cx)).await;
assert_eq!(
- active_location(&workspace, &mut cx),
+ active_location(&workspace, cx),
(file3.clone(), DisplayPoint::new(0, 0))
);
// Go back to an item that has been closed and removed from disk, ensuring it gets skipped.
workspace
- .update(&mut cx, |workspace, cx| {
+ .update(cx, |workspace, cx| {
workspace
.active_pane()
.update(cx, |pane, cx| pane.close_item(editor2.id(), cx));
@@ -834,18 +818,14 @@ mod tests {
})
.await
.unwrap();
- workspace
- .update(&mut cx, |w, cx| Pane::go_back(w, cx))
- .await;
+ workspace.update(cx, |w, cx| Pane::go_back(w, cx)).await;
assert_eq!(
- active_location(&workspace, &mut cx),
+ active_location(&workspace, cx),
(file1.clone(), DisplayPoint::new(10, 0))
);
- workspace
- .update(&mut cx, |w, cx| Pane::go_forward(w, cx))
- .await;
+ workspace.update(cx, |w, cx| Pane::go_forward(w, cx)).await;
assert_eq!(
- active_location(&workspace, &mut cx),
+ active_location(&workspace, cx),
(file3.clone(), DisplayPoint::new(0, 0))
);