From 7d53e37672bbc5af5726f5452e2469134fd6de21 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Mon, 28 Feb 2022 18:19:30 -0800 Subject: [PATCH 01/45] Start work on detecting leaked handles in tests For now, just track models. Tests fail because we don't yet clear the app contexts at the right time. Co-Authored-By: Nathan Sobo --- crates/gpui/src/app.rs | 189 ++++++++++++++++++++------ crates/gpui/src/executor.rs | 37 +---- crates/gpui/src/test.rs | 20 +-- crates/gpui/src/util.rs | 27 +++- crates/gpui_macros/src/gpui_macros.rs | 1 + crates/server/src/rpc.rs | 11 +- 6 files changed, 196 insertions(+), 89 deletions(-) diff --git a/crates/gpui/src/app.rs b/crates/gpui/src/app.rs index dcfb00617d600a2db2feeb94c4aca99d1428b49f..ff2baac8d9b4e0032cca63175730548acc25089f 100644 --- a/crates/gpui/src/app.rs +++ b/crates/gpui/src/app.rs @@ -4,10 +4,11 @@ use crate::{ keymap::{self, Keystroke}, platform::{self, CursorStyle, Platform, PromptLevel, WindowOptions}, presenter::Presenter, - util::{post_inc, timeout}, + util::{post_inc, timeout, CwdBacktrace}, AssetCache, AssetSource, ClipboardItem, FontCache, PathPromptOptions, TextLayoutCache, }; use anyhow::{anyhow, Result}; +use backtrace::Backtrace; use keymap::MatchResult; use parking_lot::Mutex; use platform::Event; @@ -235,7 +236,6 @@ pub struct App(Rc>); #[derive(Clone)] pub struct AsyncAppContext(Rc>); -#[derive(Clone)] pub struct TestAppContext { cx: Rc>, foreground_platform: Rc, @@ -252,6 +252,7 @@ impl App { platform.clone(), foreground_platform.clone(), Arc::new(FontCache::new(platform.fonts())), + Default::default(), asset_source, )))); @@ -389,6 +390,7 @@ impl TestAppContext { foreground: Rc, background: Arc, font_cache: Arc, + leak_detector: Arc>, first_entity_id: usize, ) -> Self { let mut cx = MutableAppContext::new( @@ -397,6 +399,11 @@ impl TestAppContext { platform, foreground_platform.clone(), font_cache, + RefCounts { + #[cfg(feature = "test-support")] + leak_detector, + ..Default::default() + }, (), ); cx.next_entity_id = first_entity_id; @@ -551,6 +558,11 @@ impl TestAppContext { .expect("prompt was not called"); let _ = done_tx.try_send(answer); } + + #[cfg(feature = "test-support")] + pub fn leak_detector(&self) -> Arc> { + self.cx.borrow().leak_detector() + } } impl AsyncAppContext { @@ -758,8 +770,8 @@ impl MutableAppContext { platform: Arc, foreground_platform: Rc, font_cache: Arc, + ref_counts: RefCounts, asset_source: impl AssetSource, - // entity_drop_tx: ) -> Self { Self { weak_self: None, @@ -771,7 +783,7 @@ impl MutableAppContext { windows: Default::default(), app_states: Default::default(), element_states: Default::default(), - ref_counts: Arc::new(Mutex::new(RefCounts::default())), + ref_counts: Arc::new(Mutex::new(ref_counts)), background, font_cache, platform, @@ -1808,6 +1820,11 @@ impl MutableAppContext { pub fn read_from_clipboard(&self) -> Option { self.cx.platform.read_from_clipboard() } + + #[cfg(feature = "test-support")] + pub fn leak_detector(&self) -> Arc> { + self.cx.ref_counts.lock().leak_detector.clone() + } } impl ReadModel for MutableAppContext { @@ -2003,12 +2020,11 @@ impl UpgradeModelHandle for AppContext { fn upgrade_any_model_handle(&self, handle: &AnyWeakModelHandle) -> Option { if self.models.contains_key(&handle.model_id) { - self.ref_counts.lock().inc_model(handle.model_id); - Some(AnyModelHandle { - model_id: handle.model_id, - model_type: handle.model_type, - ref_counts: self.ref_counts.clone(), - }) + Some(AnyModelHandle::new( + handle.model_id, + handle.model_type, + self.ref_counts.clone(), + )) } else { None } @@ -2814,19 +2830,33 @@ pub enum EntityLocation { View(usize, usize), } -pub struct ModelHandle { +pub struct ModelHandle { model_id: usize, model_type: PhantomData, ref_counts: Arc>, + + #[cfg(feature = "test-support")] + handle_id: usize, } impl ModelHandle { fn new(model_id: usize, ref_counts: &Arc>) -> Self { ref_counts.lock().inc_model(model_id); + + #[cfg(feature = "test-support")] + let handle_id = ref_counts + .lock() + .leak_detector + .lock() + .handle_created(Some(type_name::()), model_id); + Self { model_id, model_type: PhantomData, ref_counts: ref_counts.clone(), + + #[cfg(feature = "test-support")] + handle_id, } } @@ -2975,44 +3005,39 @@ impl ModelHandle { } } -impl Clone for ModelHandle { +impl Clone for ModelHandle { fn clone(&self) -> Self { - self.ref_counts.lock().inc_model(self.model_id); - Self { - model_id: self.model_id, - model_type: PhantomData, - ref_counts: self.ref_counts.clone(), - } + Self::new(self.model_id, &self.ref_counts) } } -impl PartialEq for ModelHandle { +impl PartialEq for ModelHandle { fn eq(&self, other: &Self) -> bool { self.model_id == other.model_id } } -impl Eq for ModelHandle {} +impl Eq for ModelHandle {} -impl PartialEq> for ModelHandle { +impl PartialEq> for ModelHandle { fn eq(&self, other: &WeakModelHandle) -> bool { self.model_id == other.model_id } } -impl Hash for ModelHandle { +impl Hash for ModelHandle { fn hash(&self, state: &mut H) { self.model_id.hash(state); } } -impl std::borrow::Borrow for ModelHandle { +impl std::borrow::Borrow for ModelHandle { fn borrow(&self) -> &usize { &self.model_id } } -impl Debug for ModelHandle { +impl Debug for ModelHandle { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_tuple(&format!("ModelHandle<{}>", type_name::())) .field(&self.model_id) @@ -3020,12 +3045,19 @@ impl Debug for ModelHandle { } } -unsafe impl Send for ModelHandle {} -unsafe impl Sync for ModelHandle {} +unsafe impl Send for ModelHandle {} +unsafe impl Sync for ModelHandle {} -impl Drop for ModelHandle { +impl Drop for ModelHandle { fn drop(&mut self) { - self.ref_counts.lock().dec_model(self.model_id); + let mut ref_counts = self.ref_counts.lock(); + ref_counts.dec_model(self.model_id); + + #[cfg(feature = "test-support")] + ref_counts + .leak_detector + .lock() + .handle_dropped(self.model_id, self.handle_id); } } @@ -3431,15 +3463,41 @@ pub struct AnyModelHandle { model_id: usize, model_type: TypeId, ref_counts: Arc>, + + #[cfg(feature = "test-support")] + handle_id: usize, } impl AnyModelHandle { + fn new(model_id: usize, model_type: TypeId, ref_counts: Arc>) -> Self { + ref_counts.lock().inc_model(model_id); + + #[cfg(feature = "test-support")] + let handle_id = ref_counts + .lock() + .leak_detector + .lock() + .handle_created(None, model_id); + + Self { + model_id, + model_type, + ref_counts, + + #[cfg(feature = "test-support")] + handle_id, + } + } + pub fn downcast(self) -> Option> { if self.is::() { let result = Some(ModelHandle { model_id: self.model_id, model_type: PhantomData, ref_counts: self.ref_counts.clone(), + + #[cfg(feature = "test-support")] + handle_id: self.handle_id, }); unsafe { Arc::decrement_strong_count(&self.ref_counts); @@ -3465,29 +3523,30 @@ impl AnyModelHandle { impl From> for AnyModelHandle { fn from(handle: ModelHandle) -> Self { - handle.ref_counts.lock().inc_model(handle.model_id); - Self { - model_id: handle.model_id, - model_type: TypeId::of::(), - ref_counts: handle.ref_counts.clone(), - } + Self::new( + handle.model_id, + TypeId::of::(), + handle.ref_counts.clone(), + ) } } impl Clone for AnyModelHandle { fn clone(&self) -> Self { - self.ref_counts.lock().inc_model(self.model_id); - Self { - model_id: self.model_id, - model_type: self.model_type, - ref_counts: self.ref_counts.clone(), - } + Self::new(self.model_id, self.model_type, self.ref_counts.clone()) } } impl Drop for AnyModelHandle { fn drop(&mut self) { - self.ref_counts.lock().dec_model(self.model_id); + let mut ref_counts = self.ref_counts.lock(); + ref_counts.dec_model(self.model_id); + + #[cfg(feature = "test-support")] + ref_counts + .leak_detector + .lock() + .handle_dropped(self.model_id, self.handle_id); } } @@ -3694,6 +3753,51 @@ impl Drop for Subscription { } } +#[derive(Default)] +pub struct LeakDetector { + next_handle_id: usize, + handle_backtraces: HashMap, HashMap)>, +} + +impl LeakDetector { + fn handle_created(&mut self, type_name: Option<&'static str>, entity_id: usize) -> usize { + let handle_id = post_inc(&mut self.next_handle_id); + let entry = self.handle_backtraces.entry(entity_id).or_default(); + if let Some(type_name) = type_name { + entry.0.get_or_insert(type_name); + } + entry.1.insert(handle_id, Backtrace::new_unresolved()); + handle_id + } + + fn handle_dropped(&mut self, entity_id: usize, handle_id: usize) { + if let Some((_, backtraces)) = self.handle_backtraces.get_mut(&entity_id) { + assert!(backtraces.remove(&handle_id).is_some()); + if backtraces.is_empty() { + self.handle_backtraces.remove(&entity_id); + } + } + } + + pub fn detect(&mut self) { + let mut found_leaks = false; + for (id, (type_name, backtraces)) in self.handle_backtraces.iter_mut() { + eprintln!( + "leaked {} handles to {:?} {}", + backtraces.len(), + type_name.unwrap_or("entity"), + id + ); + for trace in backtraces.values_mut() { + trace.resolve(); + eprintln!("{:?}", CwdBacktrace(trace)); + } + found_leaks = true; + } + assert!(!found_leaks, "detected leaked handles"); + } +} + #[derive(Default)] struct RefCounts { entity_counts: HashMap, @@ -3701,6 +3805,9 @@ struct RefCounts { dropped_models: HashSet, dropped_views: HashSet<(usize, usize)>, dropped_element_states: HashSet, + + #[cfg(feature = "test-support")] + leak_detector: Arc>, } struct ElementStateRefCount { diff --git a/crates/gpui/src/executor.rs b/crates/gpui/src/executor.rs index 24cc60b996addd6fe20f6ad5df35cf7cdcc2bcbc..eb58edd1091a274513f990cbdcb676960b528e2f 100644 --- a/crates/gpui/src/executor.rs +++ b/crates/gpui/src/executor.rs @@ -1,6 +1,6 @@ use anyhow::{anyhow, Result}; use async_task::Runnable; -use backtrace::{Backtrace, BacktraceFmt, BytesOrWideString}; +use backtrace::Backtrace; use collections::HashMap; use parking_lot::Mutex; use postage::{barrier, prelude::Stream as _}; @@ -8,7 +8,7 @@ use rand::prelude::*; use smol::{channel, future::yield_now, prelude::*, Executor, Timer}; use std::{ any::Any, - fmt::{self, Debug, Display}, + fmt::{self, Display}, marker::PhantomData, mem, ops::RangeInclusive, @@ -282,7 +282,7 @@ impl DeterministicState { backtrace.resolve(); backtrace_message = format!( "\nbacktrace of waiting future:\n{:?}", - CwdBacktrace::new(backtrace) + util::CwdBacktrace(backtrace) ); } @@ -294,37 +294,6 @@ impl DeterministicState { } } -struct CwdBacktrace<'a> { - backtrace: &'a Backtrace, -} - -impl<'a> CwdBacktrace<'a> { - fn new(backtrace: &'a Backtrace) -> Self { - Self { backtrace } - } -} - -impl<'a> Debug for CwdBacktrace<'a> { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> std::fmt::Result { - let cwd = std::env::current_dir().unwrap(); - let mut print_path = |fmt: &mut fmt::Formatter<'_>, path: BytesOrWideString<'_>| { - fmt::Display::fmt(&path, fmt) - }; - let mut fmt = BacktraceFmt::new(f, backtrace::PrintFmt::Full, &mut print_path); - for frame in self.backtrace.frames() { - let mut formatted_frame = fmt.frame(); - if frame - .symbols() - .iter() - .any(|s| s.filename().map_or(false, |f| f.starts_with(&cwd))) - { - formatted_frame.backtrace_frame(frame)?; - } - } - fmt.finish() - } -} - impl Foreground { pub fn platform(dispatcher: Arc) -> Result { if dispatcher.is_main_thread() { diff --git a/crates/gpui/src/test.rs b/crates/gpui/src/test.rs index ac5ca0e86682080506e2e91f02f4fb9816932ca7..056c8d70a1e69f8604acb8a79f6015893e8d9cd1 100644 --- a/crates/gpui/src/test.rs +++ b/crates/gpui/src/test.rs @@ -1,3 +1,10 @@ +use crate::{ + executor, platform, Entity, FontCache, Handle, LeakDetector, MutableAppContext, Platform, + Subscription, TestAppContext, +}; +use futures::StreamExt; +use parking_lot::Mutex; +use smol::channel; use std::{ panic::{self, RefUnwindSafe}, rc::Rc, @@ -7,14 +14,6 @@ use std::{ }, }; -use futures::StreamExt; -use smol::channel; - -use crate::{ - executor, platform, Entity, FontCache, Handle, MutableAppContext, Platform, Subscription, - TestAppContext, -}; - #[cfg(test)] #[ctor::ctor] fn init_logger() { @@ -65,24 +64,27 @@ pub fn run_test( } let deterministic = executor::Deterministic::new(seed); + let leak_detector = Arc::new(Mutex::new(LeakDetector::default())); let mut cx = TestAppContext::new( foreground_platform.clone(), platform.clone(), deterministic.build_foreground(usize::MAX), deterministic.build_background(), font_cache.clone(), + leak_detector.clone(), 0, ); cx.update(|cx| { test_fn( cx, foreground_platform.clone(), - deterministic, + deterministic.clone(), seed, is_last_iteration, ) }); + leak_detector.lock().detect(); if is_last_iteration { break; } diff --git a/crates/gpui/src/util.rs b/crates/gpui/src/util.rs index 10731ced5cc136a5aa48224a9746efabadfbacfc..9677be56bec9abd84224415f0eb9e34b2b5b8954 100644 --- a/crates/gpui/src/util.rs +++ b/crates/gpui/src/util.rs @@ -1,5 +1,6 @@ +use backtrace::{Backtrace, BacktraceFmt, BytesOrWideString}; use smol::future::FutureExt; -use std::{future::Future, time::Duration}; +use std::{fmt, future::Future, time::Duration}; pub fn post_inc(value: &mut usize) -> usize { let prev = *value; @@ -18,3 +19,27 @@ where let future = async move { Ok(f.await) }; timer.race(future).await } + +pub struct CwdBacktrace<'a>(pub &'a Backtrace); + +impl<'a> std::fmt::Debug for CwdBacktrace<'a> { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> std::fmt::Result { + let cwd = std::env::current_dir().unwrap(); + let cwd = cwd.parent().unwrap(); + let mut print_path = |fmt: &mut fmt::Formatter<'_>, path: BytesOrWideString<'_>| { + fmt::Display::fmt(&path, fmt) + }; + let mut fmt = BacktraceFmt::new(f, backtrace::PrintFmt::Full, &mut print_path); + for frame in self.0.frames() { + let mut formatted_frame = fmt.frame(); + if frame + .symbols() + .iter() + .any(|s| s.filename().map_or(false, |f| f.starts_with(&cwd))) + { + formatted_frame.backtrace_frame(frame)?; + } + } + fmt.finish() + } +} diff --git a/crates/gpui_macros/src/gpui_macros.rs b/crates/gpui_macros/src/gpui_macros.rs index 885cc8311a42fd788b69b26c9e82400de5a51546..8c7b8dc71db5c8a12cc5f09a8e42b5805661ac27 100644 --- a/crates/gpui_macros/src/gpui_macros.rs +++ b/crates/gpui_macros/src/gpui_macros.rs @@ -80,6 +80,7 @@ pub fn test(args: TokenStream, function: TokenStream) -> TokenStream { deterministic.build_foreground(#ix), deterministic.build_background(), cx.font_cache().clone(), + cx.leak_detector(), #first_entity_id, ), )); diff --git a/crates/server/src/rpc.rs b/crates/server/src/rpc.rs index a9ebdceca09f403a81f8f21cc9a5562faf152329..7b376eca35d4eb31b37ff67532b2125ca7cdc939 100644 --- a/crates/server/src/rpc.rs +++ b/crates/server/src/rpc.rs @@ -4213,6 +4213,7 @@ mod tests { cx.foreground(), cx.background(), cx.font_cache(), + cx.leak_detector(), next_entity_id, ); let host = server.create_client(&mut host_cx, "host").await; @@ -4249,7 +4250,7 @@ mod tests { operations.clone(), max_operations, rng.clone(), - host_cx.clone(), + host_cx, ))); while operations.get() < max_operations { @@ -4266,6 +4267,7 @@ mod tests { cx.foreground(), cx.background(), cx.font_cache(), + cx.leak_detector(), next_entity_id, ); let guest = server @@ -4276,7 +4278,7 @@ mod tests { guest.client.clone(), guest.user_store.clone(), guest_lang_registry.clone(), - fs.clone(), + FakeFs::new(cx.background()), &mut guest_cx.to_async(), ) .await @@ -4294,9 +4296,10 @@ mod tests { } } - let clients = futures::future::join_all(clients).await; + let mut clients = futures::future::join_all(clients).await; cx.foreground().run_until_parked(); + let (_, host_cx) = clients.remove(0); let host_worktree_snapshots = host_project.read_with(&host_cx, |project, cx| { project .worktrees(cx) @@ -4307,7 +4310,7 @@ mod tests { .collect::>() }); - for (guest_client, guest_cx) in clients.iter().skip(1) { + for (guest_client, guest_cx) in clients.iter() { let guest_id = guest_client.client.id(); let worktree_snapshots = guest_client From 426e0e3d4f577dfcec082e8ccd333033016ba383 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Mon, 28 Feb 2022 22:17:20 -0800 Subject: [PATCH 02/45] Flush effects when dropping a TestAppContext This way, at the end of a test, dropped entities will be removed, and will drop the handles that they hold. --- crates/gpui/src/app.rs | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/crates/gpui/src/app.rs b/crates/gpui/src/app.rs index ff2baac8d9b4e0032cca63175730548acc25089f..1c9ab05cdde2a5e374fb59e933de8914e7e7eece 100644 --- a/crates/gpui/src/app.rs +++ b/crates/gpui/src/app.rs @@ -565,6 +565,12 @@ impl TestAppContext { } } +impl Drop for TestAppContext { + fn drop(&mut self) { + self.cx.borrow_mut().flush_effects(); + } +} + impl AsyncAppContext { pub fn spawn(&self, f: F) -> Task where From c09921e790c1949a86e7fcc5dfca213b0cf2594c Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Mon, 28 Feb 2022 22:20:03 -0800 Subject: [PATCH 03/45] Clear the executor at the end of a test by running until it is parked --- crates/gpui/src/executor.rs | 2 +- crates/gpui/src/test.rs | 1 + 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/crates/gpui/src/executor.rs b/crates/gpui/src/executor.rs index eb58edd1091a274513f990cbdcb676960b528e2f..79877029e0fe438469da03db46533c9f4f534ef7 100644 --- a/crates/gpui/src/executor.rs +++ b/crates/gpui/src/executor.rs @@ -174,7 +174,7 @@ impl Deterministic { } } - fn run_until_parked(&self) { + pub(crate) fn run_until_parked(&self) { let woken = Arc::new(AtomicBool::new(false)); self.run_internal(woken, None); } diff --git a/crates/gpui/src/test.rs b/crates/gpui/src/test.rs index 056c8d70a1e69f8604acb8a79f6015893e8d9cd1..57875383ee25d8b1d64c5a5c705f55686206e41c 100644 --- a/crates/gpui/src/test.rs +++ b/crates/gpui/src/test.rs @@ -84,6 +84,7 @@ pub fn run_test( ) }); + deterministic.run_until_parked(); leak_detector.lock().detect(); if is_last_iteration { break; From 3788efeadf7f7fd85246ba1afed5128905ad0f81 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Mon, 28 Feb 2022 22:21:32 -0800 Subject: [PATCH 04/45] Clean up guest connection states correctly when a collaborator disconnects This bug was caught by running the executor until parked after tests. Co-Authored-By: Nathan Sobo --- crates/server/src/rpc/store.rs | 11 ++++++++++- 1 file changed, 10 insertions(+), 1 deletion(-) diff --git a/crates/server/src/rpc/store.rs b/crates/server/src/rpc/store.rs index e6c4429b6960a749175f4ee9fac25a792e715fbc..3d68836b572c54c2e2be278afc72bcc7bca4a472 100644 --- a/crates/server/src/rpc/store.rs +++ b/crates/server/src/rpc/store.rs @@ -296,7 +296,16 @@ impl Store { } } - Ok(e.remove()) + let project = e.remove(); + if let Some(share) = &project.share { + for guest_connection in share.guests.keys() { + if let Some(connection) = self.connections.get_mut(&guest_connection) { + connection.projects.remove(&project_id); + } + } + } + + Ok(project) } else { Err(anyhow!("no such project"))? } From 1faaa91e5269b00b42e70f0a3b159fd7ba2ec520 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Mon, 28 Feb 2022 22:39:17 -0800 Subject: [PATCH 05/45] Avoid retaining executor in the FakeFs This probably isn't the *root* cause of why an executor is leaked, but by cutting off this cyclic references, it may make it a bit easier to track down leaks of an executor. --- crates/project/src/fs.rs | 32 ++++++++++++++++++++------------ 1 file changed, 20 insertions(+), 12 deletions(-) diff --git a/crates/project/src/fs.rs b/crates/project/src/fs.rs index 578be8cf82cf1a6e11648c6a62ba1c4f083bacf2..9660312d5b05ed05089e9ed7fd7baf564a9078c8 100644 --- a/crates/project/src/fs.rs +++ b/crates/project/src/fs.rs @@ -267,13 +267,13 @@ impl FakeFsState { pub struct FakeFs { // Use an unfair lock to ensure tests are deterministic. state: futures::lock::Mutex, - executor: std::sync::Arc, + executor: std::sync::Weak, } #[cfg(any(test, feature = "test-support"))] impl FakeFs { pub fn new(executor: std::sync::Arc) -> std::sync::Arc { - let (events_tx, _) = postage::broadcast::channel(2048); + let (events_tx, _) = postage::broadcast::channel(2); let mut entries = std::collections::BTreeMap::new(); entries.insert( Path::new("/").to_path_buf(), @@ -288,7 +288,7 @@ impl FakeFs { }, ); std::sync::Arc::new(Self { - executor, + executor: std::sync::Arc::downgrade(&executor), state: futures::lock::Mutex::new(FakeFsState { entries, next_inode: 1, @@ -375,13 +375,21 @@ impl FakeFs { } .boxed() } + + async fn simulate_random_delay(&self) { + self.executor + .upgrade() + .expect("excecutor has been dropped") + .simulate_random_delay() + .await; + } } #[cfg(any(test, feature = "test-support"))] #[async_trait::async_trait] impl Fs for FakeFs { async fn create_dir(&self, path: &Path) -> Result<()> { - self.executor.simulate_random_delay().await; + self.simulate_random_delay().await; let state = &mut *self.state.lock().await; let path = normalize_path(path); let mut ancestor_path = PathBuf::new(); @@ -418,7 +426,7 @@ impl Fs for FakeFs { } async fn create_file(&self, path: &Path, options: CreateOptions) -> Result<()> { - self.executor.simulate_random_delay().await; + self.simulate_random_delay().await; let mut state = self.state.lock().await; let path = normalize_path(path); state.validate_path(&path)?; @@ -546,7 +554,7 @@ impl Fs for FakeFs { async fn load(&self, path: &Path) -> Result { let path = normalize_path(path); - self.executor.simulate_random_delay().await; + self.simulate_random_delay().await; let state = self.state.lock().await; let text = state .entries @@ -557,7 +565,7 @@ impl Fs for FakeFs { } async fn save(&self, path: &Path, text: &Rope) -> Result<()> { - self.executor.simulate_random_delay().await; + self.simulate_random_delay().await; let mut state = self.state.lock().await; let path = normalize_path(path); state.validate_path(&path)?; @@ -589,13 +597,13 @@ impl Fs for FakeFs { } async fn canonicalize(&self, path: &Path) -> Result { - self.executor.simulate_random_delay().await; + self.simulate_random_delay().await; Ok(normalize_path(path)) } async fn is_file(&self, path: &Path) -> bool { let path = normalize_path(path); - self.executor.simulate_random_delay().await; + self.simulate_random_delay().await; let state = self.state.lock().await; state .entries @@ -604,7 +612,7 @@ impl Fs for FakeFs { } async fn metadata(&self, path: &Path) -> Result> { - self.executor.simulate_random_delay().await; + self.simulate_random_delay().await; let state = self.state.lock().await; let path = normalize_path(path); Ok(state.entries.get(&path).map(|entry| entry.metadata.clone())) @@ -615,7 +623,7 @@ impl Fs for FakeFs { abs_path: &Path, ) -> Result>>>> { use futures::{future, stream}; - self.executor.simulate_random_delay().await; + self.simulate_random_delay().await; let state = self.state.lock().await; let abs_path = normalize_path(abs_path); Ok(Box::pin(stream::iter(state.entries.clone()).filter_map( @@ -635,7 +643,7 @@ impl Fs for FakeFs { _: Duration, ) -> Pin>>> { let state = self.state.lock().await; - self.executor.simulate_random_delay().await; + self.simulate_random_delay().await; let rx = state.events_tx.subscribe(); let path = path.to_path_buf(); Box::pin(futures::StreamExt::filter(rx, move |events| { From 02ae2d8a4fd7c630fe25c84f1298a84622a0cde8 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Mon, 28 Feb 2022 22:47:50 -0800 Subject: [PATCH 06/45] Hold client weakly in UserStore This avoids a reference cycle that is causing some tests to fail due to leaked handles at the moment. There may be a better way to fix this though. --- crates/client/src/user.rs | 39 ++++++++++++++++++++------------------- 1 file changed, 20 insertions(+), 19 deletions(-) diff --git a/crates/client/src/user.rs b/crates/client/src/user.rs index c318c7f5050fbc7e80125c12289f84d5b8abb20c..5e7d29bfa68665b8ad205384c708b3b1d970778b 100644 --- a/crates/client/src/user.rs +++ b/crates/client/src/user.rs @@ -8,7 +8,7 @@ use gpui::{AsyncAppContext, Entity, ImageData, ModelContext, ModelHandle, Task}; use postage::{prelude::Stream, sink::Sink, watch}; use std::{ collections::{HashMap, HashSet}, - sync::Arc, + sync::{Arc, Weak}, }; use util::TryFutureExt as _; @@ -38,7 +38,7 @@ pub struct UserStore { update_contacts_tx: watch::Sender>, current_user: watch::Receiver>>, contacts: Arc<[Contact]>, - client: Arc, + client: Weak, http: Arc, _maintain_contacts: Task<()>, _maintain_current_user: Task<()>, @@ -65,7 +65,7 @@ impl UserStore { users: Default::default(), current_user: current_user_rx, contacts: Arc::from([]), - client: client.clone(), + client: Arc::downgrade(&client), update_contacts_tx, http, _maintain_contacts: cx.spawn_weak(|this, mut cx| async move { @@ -156,25 +156,26 @@ impl UserStore { let http = self.http.clone(); user_ids.retain(|id| !self.users.contains_key(id)); cx.spawn_weak(|this, mut cx| async move { - if !user_ids.is_empty() { - let response = rpc.request(proto::GetUsers { user_ids }).await?; - let new_users = future::join_all( - response - .users - .into_iter() - .map(|user| User::new(user, http.as_ref())), - ) - .await; + if let Some(rpc) = rpc.upgrade() { + if !user_ids.is_empty() { + let response = rpc.request(proto::GetUsers { user_ids }).await?; + let new_users = future::join_all( + response + .users + .into_iter() + .map(|user| User::new(user, http.as_ref())), + ) + .await; - if let Some(this) = this.upgrade(&cx) { - this.update(&mut cx, |this, _| { - for user in new_users { - this.users.insert(user.id, Arc::new(user)); - } - }); + if let Some(this) = this.upgrade(&cx) { + this.update(&mut cx, |this, _| { + for user in new_users { + this.users.insert(user.id, Arc::new(user)); + } + }); + } } } - Ok(()) }) } From 3b7cfad718469e77155a06c4d6844044719948ab Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Mon, 28 Feb 2022 22:52:08 -0800 Subject: [PATCH 07/45] Try clearing Client's state at the ends of integration tests Co-Authored-By: Nathan Sobo --- crates/client/src/client.rs | 9 +++++++++ crates/server/src/rpc.rs | 6 ++++++ 2 files changed, 15 insertions(+) diff --git a/crates/client/src/client.rs b/crates/client/src/client.rs index f0f6ef6146fe9efe712a8c14a9791b6143714863..d0c34f31f407aa3fe66630cdc040ba29872cb33d 100644 --- a/crates/client/src/client.rs +++ b/crates/client/src/client.rs @@ -228,6 +228,15 @@ impl Client { self.http.clone() } + #[cfg(any(test, feature = "test-support"))] + pub fn tear_down(&self) { + let mut state = self.state.write(); + state.message_handlers.clear(); + state.models_by_message_type.clear(); + state.models_by_entity_type_and_remote_id.clear(); + state.entity_id_extractors.clear(); + } + #[cfg(any(test, feature = "test-support"))] pub fn override_authenticate(&mut self, authenticate: F) -> &mut Self where diff --git a/crates/server/src/rpc.rs b/crates/server/src/rpc.rs index 7b376eca35d4eb31b37ff67532b2125ca7cdc939..af5ee8aa847b01380fe6d9a64bf2c72736272b1a 100644 --- a/crates/server/src/rpc.rs +++ b/crates/server/src/rpc.rs @@ -5052,6 +5052,12 @@ mod tests { } } + impl Drop for TestClient { + fn drop(&mut self) { + self.client.tear_down(); + } + } + impl Executor for Arc { fn spawn_detached>(&self, future: F) { self.spawn(future).detach(); From 471ecae82c3bd9e9401dafc6651c2b2893b6f7c3 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Mon, 28 Feb 2022 22:50:15 -0800 Subject: [PATCH 08/45] WIP - include dhat for memory profiling tests --- Cargo.lock | 58 +++++++++++++++++++++++++++++------------ crates/gpui/Cargo.toml | 1 + crates/gpui/src/test.rs | 5 ++++ 3 files changed, 48 insertions(+), 16 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index e8cf0a12f076aff68c4d12fa8b5e9a1f9df5e70f..71fe0871c4a6198a30ba1b4bb6e92093f08ef53a 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -4,9 +4,9 @@ version = 3 [[package]] name = "addr2line" -version = "0.14.1" +version = "0.17.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a55f82cfe485775d02112886f4169bde0c5894d75e79ead7eafe7e40a25e45f7" +checksum = "b9ecd88a8c8378ca913a680cd98f0f13ac67383d35993f86c90a70e3f137816b" dependencies = [ "gimli", ] @@ -558,11 +558,12 @@ checksum = "cdb031dd78e28731d87d56cc8ffef4a8f36ca26c38fe2de700543e627f8a464a" [[package]] name = "backtrace" -version = "0.3.56" +version = "0.3.64" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9d117600f438b1707d4e4ae15d3595657288f8235a0eb593e80ecc98ab34e1bc" +checksum = "5e121dee8023ce33ab248d9ce1493df03c3b38a659b240096fcbd7048ff9c31f" dependencies = [ "addr2line", + "cc", "cfg-if 1.0.0", "libc", "miniz_oxide 0.4.4", @@ -1438,6 +1439,21 @@ dependencies = [ "const-oid", ] +[[package]] +name = "dhat" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "47003dc9f6368a88e85956c3b2573a7e6872746a3e5d762a8885da3a136a0381" +dependencies = [ + "backtrace", + "lazy_static", + "parking_lot", + "rustc-hash", + "serde", + "serde_json", + "thousands", +] + [[package]] name = "diagnostics" version = "0.1.0" @@ -2124,9 +2140,9 @@ dependencies = [ [[package]] name = "gimli" -version = "0.23.0" +version = "0.26.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f6503fe142514ca4799d4c26297c4248239fe8838d827db6bd6065c6ed29a6ce" +checksum = "78cc372d058dcf6d5ecd98510e7fbc9e5aec4d21de70f65fea8fecebcd881bd4" [[package]] name = "glob" @@ -2187,6 +2203,7 @@ dependencies = [ "core-graphics", "core-text", "ctor", + "dhat", "env_logger", "etagere", "font-kit", @@ -2766,9 +2783,9 @@ dependencies = [ [[package]] name = "lock_api" -version = "0.4.2" +version = "0.4.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dd96ffd135b2fd7b973ac026d28085defbe8983df057ced3eb4f2130b0831312" +checksum = "88943dd7ef4a2e5a4bfa2753aaab3013e34ce2533d1996fb18ef591e315e2b3b" dependencies = [ "scopeguard", ] @@ -3134,9 +3151,12 @@ dependencies = [ [[package]] name = "object" -version = "0.23.0" +version = "0.27.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a9a7ab5d64814df0fe4a4b5ead45ed6c5f181ee3ff04ba344313a6c80446c5d4" +checksum = "67ac1d3f9a1d3616fd9a60c8d74296f22406a238b6a72f5cc1e6f314df4ffbf9" +dependencies = [ + "memchr", +] [[package]] name = "once_cell" @@ -3224,9 +3244,9 @@ checksum = "427c3892f9e783d91cc128285287e70a59e206ca452770ece88a76f7a3eddd72" [[package]] name = "parking_lot" -version = "0.11.1" +version = "0.11.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6d7744ac029df22dca6284efe4e898991d28e3085c706c972bcd7da4a27a15eb" +checksum = "7d17b78036a60663b797adeaee46f5c9dfebb86948d1255007a1d6be0271ff99" dependencies = [ "instant", "lock_api", @@ -3235,9 +3255,9 @@ dependencies = [ [[package]] name = "parking_lot_core" -version = "0.8.3" +version = "0.8.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fa7a782938e745763fe6907fc6ba86946d72f49fe7e21de074e08128a99fb018" +checksum = "d76e8e1493bcac0d2766c42737f34458f1c8c50c0d23bcb24ea953affb273216" dependencies = [ "cfg-if 1.0.0", "instant", @@ -3832,9 +3852,9 @@ dependencies = [ [[package]] name = "redox_syscall" -version = "0.2.5" +version = "0.2.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "94341e4e44e24f6b591b59e47a8a027df12e008d73fd5672dbea9cc22f4507d9" +checksum = "8383f39639269cde97d255a32bdb68c047337295414940c68bdd30c2e13203ff" dependencies = [ "bitflags", ] @@ -5071,6 +5091,12 @@ dependencies = [ "syn", ] +[[package]] +name = "thousands" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3bf63baf9f5039dadc247375c29eb13706706cfde997d0330d05aa63a77d8820" + [[package]] name = "thread_local" version = "1.1.3" diff --git a/crates/gpui/Cargo.toml b/crates/gpui/Cargo.toml index 6d33806f1a06f8687ded64daad336928fb566c4a..7a0e356d2a908dc47f45ea01c137a674ca11f09f 100644 --- a/crates/gpui/Cargo.toml +++ b/crates/gpui/Cargo.toml @@ -17,6 +17,7 @@ sum_tree = { path = "../sum_tree" } async-task = "4.0.3" backtrace = "0.3" ctor = "0.1" +dhat = "0.3" env_logger = { version = "0.8", optional = true } etagere = "0.2" futures = "0.3" diff --git a/crates/gpui/src/test.rs b/crates/gpui/src/test.rs index 57875383ee25d8b1d64c5a5c705f55686206e41c..788084067dce985c8c265868a37f7c8506848890 100644 --- a/crates/gpui/src/test.rs +++ b/crates/gpui/src/test.rs @@ -22,6 +22,9 @@ fn init_logger() { } } +// #[global_allocator] +// static ALLOC: dhat::Alloc = dhat::Alloc; + pub fn run_test( mut num_iterations: u64, mut starting_seed: u64, @@ -35,6 +38,8 @@ pub fn run_test( bool, )), ) { + // let _profiler = dhat::Profiler::new_heap(); + let is_randomized = num_iterations > 1; if is_randomized { if let Ok(value) = std::env::var("SEED") { From 10a872a370b5f9d323bfd4451b7eded089c97085 Mon Sep 17 00:00:00 2001 From: Antonio Scandurra Date: Tue, 1 Mar 2022 10:49:40 +0100 Subject: [PATCH 09/45] Avoid reference cycle between `Client` and its models --- crates/client/src/client.rs | 23 +++++++++++++++-------- crates/gpui/src/app.rs | 9 +++++++++ 2 files changed, 24 insertions(+), 8 deletions(-) diff --git a/crates/client/src/client.rs b/crates/client/src/client.rs index d0c34f31f407aa3fe66630cdc040ba29872cb33d..87f3b0c43c8c14b2504f2c1a904a28509abadab3 100644 --- a/crates/client/src/client.rs +++ b/crates/client/src/client.rs @@ -133,9 +133,8 @@ struct ClientState { entity_id_extractors: HashMap u64>>, _maintain_connection: Option>, heartbeat_interval: Duration, - models_by_entity_type_and_remote_id: HashMap<(TypeId, u64), AnyWeakModelHandle>, - models_by_message_type: HashMap, + models_by_message_type: HashMap, model_types_by_message_type: HashMap, message_handlers: HashMap< TypeId, @@ -348,18 +347,22 @@ impl Client { { let message_type_id = TypeId::of::(); - let client = self.clone(); + let client = Arc::downgrade(self); let mut state = self.state.write(); state .models_by_message_type - .insert(message_type_id, model.into()); + .insert(message_type_id, model.downgrade().into()); let prev_handler = state.message_handlers.insert( message_type_id, Arc::new(move |handle, envelope, cx| { let model = handle.downcast::().unwrap(); let envelope = envelope.into_any().downcast::>().unwrap(); - handler(model, *envelope, client.clone(), cx).boxed_local() + if let Some(client) = client.upgrade() { + handler(model, *envelope, client.clone(), cx).boxed_local() + } else { + async move { Ok(()) }.boxed_local() + } }), ); if prev_handler.is_some() { @@ -385,7 +388,7 @@ impl Client { let model_type_id = TypeId::of::(); let message_type_id = TypeId::of::(); - let client = self.clone(); + let client = Arc::downgrade(self); let mut state = self.state.write(); state .model_types_by_message_type @@ -408,7 +411,11 @@ impl Client { Arc::new(move |handle, envelope, cx| { let model = handle.downcast::().unwrap(); let envelope = envelope.into_any().downcast::>().unwrap(); - handler(model, *envelope, client.clone(), cx).boxed_local() + if let Some(client) = client.upgrade() { + handler(model, *envelope, client.clone(), cx).boxed_local() + } else { + async move { Ok(()) }.boxed_local() + } }), ); if prev_handler.is_some() { @@ -550,7 +557,7 @@ impl Client { let model = state .models_by_message_type .get(&payload_type_id) - .cloned() + .and_then(|model| model.upgrade(&cx)) .or_else(|| { let model_type_id = *state.model_types_by_message_type.get(&payload_type_id)?; diff --git a/crates/gpui/src/app.rs b/crates/gpui/src/app.rs index 1c9ab05cdde2a5e374fb59e933de8914e7e7eece..fc52a66d1b2e60a12fe1c74d9cae0bb60dca423c 100644 --- a/crates/gpui/src/app.rs +++ b/crates/gpui/src/app.rs @@ -3567,6 +3567,15 @@ impl AnyWeakModelHandle { } } +impl From> for AnyWeakModelHandle { + fn from(handle: WeakModelHandle) -> Self { + AnyWeakModelHandle { + model_id: handle.model_id, + model_type: TypeId::of::(), + } + } +} + pub struct WeakViewHandle { window_id: usize, view_id: usize, From 7ce6f23ed42f147b42d3b637664ca47277936df5 Mon Sep 17 00:00:00 2001 From: Antonio Scandurra Date: Tue, 1 Mar 2022 10:55:12 +0100 Subject: [PATCH 10/45] Remove all windows on dropping `TestAppContext` This allows us to remove the dropped entities and flush effects as necessary. --- crates/gpui/src/app.rs | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/crates/gpui/src/app.rs b/crates/gpui/src/app.rs index fc52a66d1b2e60a12fe1c74d9cae0bb60dca423c..9842e14fbdc518e43015fb6da9f02a841ef486b3 100644 --- a/crates/gpui/src/app.rs +++ b/crates/gpui/src/app.rs @@ -567,7 +567,7 @@ impl TestAppContext { impl Drop for TestAppContext { fn drop(&mut self) { - self.cx.borrow_mut().flush_effects(); + self.cx.borrow_mut().remove_all_windows(); } } @@ -851,7 +851,7 @@ impl MutableAppContext { for (window_id, _) in self.cx.windows.drain() { self.presenters_and_platform_windows.remove(&window_id); } - self.remove_dropped_entities(); + self.flush_effects(); } pub fn platform(&self) -> Arc { @@ -1401,7 +1401,6 @@ impl MutableAppContext { pub fn remove_window(&mut self, window_id: usize) { self.cx.windows.remove(&window_id); self.presenters_and_platform_windows.remove(&window_id); - self.remove_dropped_entities(); self.flush_effects(); } From 8390f04e7d86a0f0bb6aebfa4858d91e3e858b2c Mon Sep 17 00:00:00 2001 From: Antonio Scandurra Date: Tue, 1 Mar 2022 11:59:58 +0100 Subject: [PATCH 11/45] Allow capturing references in the future passed to `Deterministic::run` --- crates/gpui/src/executor.rs | 33 +++++++++++++++++++++++++++------ 1 file changed, 27 insertions(+), 6 deletions(-) diff --git a/crates/gpui/src/executor.rs b/crates/gpui/src/executor.rs index 79877029e0fe438469da03db46533c9f4f534ef7..bfe3eff3d4a6eca93b5671749dc04fe7b02c947f 100644 --- a/crates/gpui/src/executor.rs +++ b/crates/gpui/src/executor.rs @@ -156,9 +156,30 @@ impl Deterministic { task } - fn run(&self, cx_id: usize, main_future: AnyLocalFuture) -> Box { + fn run<'a>( + &self, + cx_id: usize, + main_future: Pin>>>, + ) -> Box { let woken = Arc::new(AtomicBool::new(false)); - let mut main_task = self.spawn_from_foreground(cx_id, main_future, true); + + let state = self.state.clone(); + let unparker = self.parker.lock().unparker(); + let (runnable, mut main_task) = unsafe { + async_task::spawn_unchecked(main_future, move |runnable| { + let mut state = state.lock(); + state + .scheduled_from_foreground + .entry(cx_id) + .or_default() + .push(ForegroundRunnable { + runnable, + main: true, + }); + unparker.unpark(); + }) + }; + runnable.schedule(); loop { if let Some(result) = self.run_internal(woken.clone(), Some(&mut main_task)) { @@ -330,13 +351,13 @@ impl Foreground { Task::local(any_task) } - pub fn run(&self, future: impl 'static + Future) -> T { - let future = any_local_future(future); - let any_value = match self { + pub fn run(&self, future: impl Future) -> T { + let future = async move { Box::new(future.await) as Box }.boxed_local(); + let result = match self { Self::Deterministic { cx_id, executor } => executor.run(*cx_id, future), Self::Platform { .. } => panic!("you can't call run on a platform foreground executor"), }; - *any_value.downcast().unwrap() + *result.downcast().unwrap() } pub fn run_until_parked(&self) { From 466db69780f8c941cf6a7bdca3209d1efa772bb0 Mon Sep 17 00:00:00 2001 From: Antonio Scandurra Date: Tue, 1 Mar 2022 12:01:02 +0100 Subject: [PATCH 12/45] Pass a reference to `TestAppContext` in tests This allows us to drop the context *after* we ran all futures to completion and that's crucial otherwise we'll never drop entities and/or flush effects. --- crates/client/src/channel.rs | 20 +- crates/client/src/client.rs | 21 +- crates/diagnostics/src/diagnostics.rs | 16 +- crates/editor/src/display_map.rs | 30 +- crates/editor/src/display_map/wrap_map.rs | 39 +- crates/editor/src/editor.rs | 82 +-- crates/file_finder/src/file_finder.rs | 32 +- crates/gpui/src/app.rs | 28 +- crates/gpui_macros/src/gpui_macros.rs | 55 +- crates/language/src/tests.rs | 56 +- crates/lsp/src/lsp.rs | 2 +- crates/project/src/project.rs | 216 ++++--- crates/project/src/worktree.rs | 6 +- crates/project_panel/src/project_panel.rs | 20 +- crates/rpc/src/peer.rs | 10 +- crates/search/src/buffer_search.rs | 78 +-- crates/search/src/project_search.rs | 20 +- crates/server/src/db.rs | 6 +- crates/server/src/rpc.rs | 670 ++++++++++------------ crates/zed/src/zed.rs | 160 +++--- 20 files changed, 748 insertions(+), 819 deletions(-) diff --git a/crates/client/src/channel.rs b/crates/client/src/channel.rs index 1b00d4daf697d73f7bda0dea140dc399711a06f1..18a0e156db6b881cf0c6a0a8e779c573d1c34f40 100644 --- a/crates/client/src/channel.rs +++ b/crates/client/src/channel.rs @@ -597,7 +597,7 @@ mod tests { use surf::http::Response; #[gpui::test] - async fn test_channel_messages(mut cx: TestAppContext) { + async fn test_channel_messages(cx: &mut TestAppContext) { cx.foreground().forbid_parking(); let user_id = 5; @@ -609,7 +609,7 @@ mod tests { let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http_client, cx)); let channel_list = cx.add_model(|cx| ChannelList::new(user_store, client.clone(), cx)); - channel_list.read_with(&cx, |list, _| assert_eq!(list.available_channels(), None)); + channel_list.read_with(cx, |list, _| assert_eq!(list.available_channels(), None)); // Get the available channels. let get_channels = server.receive::().await.unwrap(); @@ -625,7 +625,7 @@ mod tests { ) .await; channel_list.next_notification(&cx).await; - channel_list.read_with(&cx, |list, _| { + channel_list.read_with(cx, |list, _| { assert_eq!( list.available_channels().unwrap(), &[ChannelDetails { @@ -652,12 +652,12 @@ mod tests { // Join a channel and populate its existing messages. let channel = channel_list - .update(&mut cx, |list, cx| { + .update(cx, |list, cx| { let channel_id = list.available_channels().unwrap()[0].id; list.get_channel(channel_id, cx) }) .unwrap(); - channel.read_with(&cx, |channel, _| assert!(channel.messages().is_empty())); + channel.read_with(cx, |channel, _| assert!(channel.messages().is_empty())); let join_channel = server.receive::().await.unwrap(); server .respond( @@ -708,7 +708,7 @@ mod tests { new_count: 2, } ); - channel.read_with(&cx, |channel, _| { + channel.read_with(cx, |channel, _| { assert_eq!( channel .messages_in_range(0..2) @@ -723,7 +723,7 @@ mod tests { // Receive a new message. server.send(proto::ChannelMessageSent { - channel_id: channel.read_with(&cx, |channel, _| channel.details.id), + channel_id: channel.read_with(cx, |channel, _| channel.details.id), message: Some(proto::ChannelMessage { id: 12, body: "c".into(), @@ -756,7 +756,7 @@ mod tests { new_count: 1, } ); - channel.read_with(&cx, |channel, _| { + channel.read_with(cx, |channel, _| { assert_eq!( channel .messages_in_range(2..3) @@ -767,7 +767,7 @@ mod tests { }); // Scroll up to view older messages. - channel.update(&mut cx, |channel, cx| { + channel.update(cx, |channel, cx| { assert!(channel.load_more_messages(cx)); }); let get_messages = server.receive::().await.unwrap(); @@ -805,7 +805,7 @@ mod tests { new_count: 2, } ); - channel.read_with(&cx, |channel, _| { + channel.read_with(cx, |channel, _| { assert_eq!( channel .messages_in_range(0..2) diff --git a/crates/client/src/client.rs b/crates/client/src/client.rs index 87f3b0c43c8c14b2504f2c1a904a28509abadab3..5126c9384ce0ade3d5c4e7f2b4feaa09d3d4bba5 100644 --- a/crates/client/src/client.rs +++ b/crates/client/src/client.rs @@ -933,7 +933,7 @@ mod tests { use gpui::TestAppContext; #[gpui::test(iterations = 10)] - async fn test_heartbeat(cx: TestAppContext) { + async fn test_heartbeat(cx: &mut TestAppContext) { cx.foreground().forbid_parking(); let user_id = 5; @@ -953,7 +953,7 @@ mod tests { } #[gpui::test(iterations = 10)] - async fn test_reconnection(cx: TestAppContext) { + async fn test_reconnection(cx: &mut TestAppContext) { cx.foreground().forbid_parking(); let user_id = 5; @@ -1001,7 +1001,7 @@ mod tests { } #[gpui::test] - async fn test_subscribing_to_entity(mut cx: TestAppContext) { + async fn test_subscribing_to_entity(cx: &mut TestAppContext) { cx.foreground().forbid_parking(); let user_id = 5; @@ -1033,14 +1033,11 @@ mod tests { subscription: None, }); - let _subscription1 = - model1.update(&mut cx, |_, cx| client.add_model_for_remote_entity(1, cx)); - let _subscription2 = - model2.update(&mut cx, |_, cx| client.add_model_for_remote_entity(2, cx)); + let _subscription1 = model1.update(cx, |_, cx| client.add_model_for_remote_entity(1, cx)); + let _subscription2 = model2.update(cx, |_, cx| client.add_model_for_remote_entity(2, cx)); // Ensure dropping a subscription for the same entity type still allows receiving of // messages for other entity IDs of the same type. - let subscription3 = - model3.update(&mut cx, |_, cx| client.add_model_for_remote_entity(3, cx)); + let subscription3 = model3.update(cx, |_, cx| client.add_model_for_remote_entity(3, cx)); drop(subscription3); server.send(proto::UnshareProject { project_id: 1 }); @@ -1050,7 +1047,7 @@ mod tests { } #[gpui::test] - async fn test_subscribing_after_dropping_subscription(mut cx: TestAppContext) { + async fn test_subscribing_after_dropping_subscription(cx: &mut TestAppContext) { cx.foreground().forbid_parking(); let user_id = 5; @@ -1078,7 +1075,7 @@ mod tests { } #[gpui::test] - async fn test_dropping_subscription_in_handler(mut cx: TestAppContext) { + async fn test_dropping_subscription_in_handler(cx: &mut TestAppContext) { cx.foreground().forbid_parking(); let user_id = 5; @@ -1095,7 +1092,7 @@ mod tests { async { Ok(()) } }, ); - model.update(&mut cx, |model, _| { + model.update(cx, |model, _| { model.subscription = Some(subscription); }); server.send(proto::Ping {}); diff --git a/crates/diagnostics/src/diagnostics.rs b/crates/diagnostics/src/diagnostics.rs index bb269fdbab1f1cf883337a7b46a7152a0ad8a76b..60a57d3cca7cf66f3fea2a8a1c053058bf0e42d3 100644 --- a/crates/diagnostics/src/diagnostics.rs +++ b/crates/diagnostics/src/diagnostics.rs @@ -725,7 +725,7 @@ mod tests { use workspace::WorkspaceParams; #[gpui::test] - async fn test_diagnostics(mut cx: TestAppContext) { + async fn test_diagnostics(cx: &mut TestAppContext) { let params = cx.update(WorkspaceParams::test); let project = params.project.clone(); let workspace = cx.add_view(0, |cx| Workspace::new(¶ms, cx)); @@ -760,14 +760,14 @@ mod tests { .await; project - .update(&mut cx, |project, cx| { + .update(cx, |project, cx| { project.find_or_create_local_worktree("/test", false, cx) }) .await .unwrap(); // Create some diagnostics - project.update(&mut cx, |project, cx| { + project.update(cx, |project, cx| { project .update_diagnostic_entries( PathBuf::from("/test/main.rs"), @@ -856,7 +856,7 @@ mod tests { }); view.next_notification(&cx).await; - view.update(&mut cx, |view, cx| { + view.update(cx, |view, cx| { let editor = view.editor.update(cx, |editor, cx| editor.snapshot(cx)); assert_eq!( @@ -920,7 +920,7 @@ mod tests { }); // Diagnostics are added for another earlier path. - project.update(&mut cx, |project, cx| { + project.update(cx, |project, cx| { project.disk_based_diagnostics_started(cx); project .update_diagnostic_entries( @@ -944,7 +944,7 @@ mod tests { }); view.next_notification(&cx).await; - view.update(&mut cx, |view, cx| { + view.update(cx, |view, cx| { let editor = view.editor.update(cx, |editor, cx| editor.snapshot(cx)); assert_eq!( @@ -1021,7 +1021,7 @@ mod tests { }); // Diagnostics are added to the first path - project.update(&mut cx, |project, cx| { + project.update(cx, |project, cx| { project.disk_based_diagnostics_started(cx); project .update_diagnostic_entries( @@ -1059,7 +1059,7 @@ mod tests { }); view.next_notification(&cx).await; - view.update(&mut cx, |view, cx| { + view.update(cx, |view, cx| { let editor = view.editor.update(cx, |editor, cx| editor.snapshot(cx)); assert_eq!( diff --git a/crates/editor/src/display_map.rs b/crates/editor/src/display_map.rs index dd23799175d4b7b04415df07ae99a0921b678e4f..053883e93888914bbb8277f65fe54c5850ccb44a 100644 --- a/crates/editor/src/display_map.rs +++ b/crates/editor/src/display_map.rs @@ -464,7 +464,7 @@ mod tests { use Bias::*; #[gpui::test(iterations = 100)] - async fn test_random_display_map(mut cx: gpui::TestAppContext, mut rng: StdRng) { + async fn test_random_display_map(cx: &mut gpui::TestAppContext, mut rng: StdRng) { cx.foreground().set_block_on_ticks(0..=50); cx.foreground().forbid_parking(); let operations = env::var("OPERATIONS") @@ -512,11 +512,11 @@ mod tests { cx, ) }); - let mut notifications = observe(&map, &mut cx); + let mut notifications = observe(&map, cx); let mut fold_count = 0; let mut blocks = Vec::new(); - let snapshot = map.update(&mut cx, |map, cx| map.snapshot(cx)); + let snapshot = map.update(cx, |map, cx| map.snapshot(cx)); log::info!("buffer text: {:?}", snapshot.buffer_snapshot.text()); log::info!("fold text: {:?}", snapshot.folds_snapshot.text()); log::info!("tab text: {:?}", snapshot.tabs_snapshot.text()); @@ -533,10 +533,10 @@ mod tests { Some(rng.gen_range(0.0..=max_wrap_width)) }; log::info!("setting wrap width to {:?}", wrap_width); - map.update(&mut cx, |map, cx| map.set_wrap_width(wrap_width, cx)); + map.update(cx, |map, cx| map.set_wrap_width(wrap_width, cx)); } 20..=44 => { - map.update(&mut cx, |map, cx| { + map.update(cx, |map, cx| { if rng.gen() || blocks.is_empty() { let buffer = map.snapshot(cx).buffer_snapshot; let block_properties = (0..rng.gen_range(1..=1)) @@ -582,7 +582,7 @@ mod tests { 45..=79 => { let mut ranges = Vec::new(); for _ in 0..rng.gen_range(1..=3) { - buffer.read_with(&cx, |buffer, cx| { + buffer.read_with(cx, |buffer, cx| { let buffer = buffer.read(cx); let end = buffer.clip_offset(rng.gen_range(0..=buffer.len()), Right); let start = buffer.clip_offset(rng.gen_range(0..=end), Left); @@ -592,26 +592,26 @@ mod tests { if rng.gen() && fold_count > 0 { log::info!("unfolding ranges: {:?}", ranges); - map.update(&mut cx, |map, cx| { + map.update(cx, |map, cx| { map.unfold(ranges, cx); }); } else { log::info!("folding ranges: {:?}", ranges); - map.update(&mut cx, |map, cx| { + map.update(cx, |map, cx| { map.fold(ranges, cx); }); } } _ => { - buffer.update(&mut cx, |buffer, cx| buffer.randomly_edit(&mut rng, 5, cx)); + buffer.update(cx, |buffer, cx| buffer.randomly_edit(&mut rng, 5, cx)); } } - if map.read_with(&cx, |map, cx| map.is_rewrapping(cx)) { + if map.read_with(cx, |map, cx| map.is_rewrapping(cx)) { notifications.next().await.unwrap(); } - let snapshot = map.update(&mut cx, |map, cx| map.snapshot(cx)); + let snapshot = map.update(cx, |map, cx| map.snapshot(cx)); fold_count = snapshot.fold_count(); log::info!("buffer text: {:?}", snapshot.buffer_snapshot.text()); log::info!("fold text: {:?}", snapshot.folds_snapshot.text()); @@ -846,7 +846,7 @@ mod tests { } #[gpui::test] - async fn test_chunks(mut cx: gpui::TestAppContext) { + async fn test_chunks(cx: &mut gpui::TestAppContext) { use unindent::Unindent as _; let text = r#" @@ -914,7 +914,7 @@ mod tests { ] ); - map.update(&mut cx, |map, cx| { + map.update(cx, |map, cx| { map.fold(vec![Point::new(0, 6)..Point::new(3, 2)], cx) }); assert_eq!( @@ -931,7 +931,7 @@ mod tests { } #[gpui::test] - async fn test_chunks_with_soft_wrapping(mut cx: gpui::TestAppContext) { + async fn test_chunks_with_soft_wrapping(cx: &mut gpui::TestAppContext) { use unindent::Unindent as _; cx.foreground().set_block_on_ticks(usize::MAX..=usize::MAX); @@ -996,7 +996,7 @@ mod tests { [("{}\n\n".to_string(), None)] ); - map.update(&mut cx, |map, cx| { + map.update(cx, |map, cx| { map.fold(vec![Point::new(0, 6)..Point::new(3, 2)], cx) }); assert_eq!( diff --git a/crates/editor/src/display_map/wrap_map.rs b/crates/editor/src/display_map/wrap_map.rs index 566efe7f4f99f0eaac91cfa53e41fb8c0cbe05f9..7858197902dcd90a6f98e7739991744ed8639af6 100644 --- a/crates/editor/src/display_map/wrap_map.rs +++ b/crates/editor/src/display_map/wrap_map.rs @@ -1010,7 +1010,7 @@ mod tests { use text::Rope; #[gpui::test(iterations = 100)] - async fn test_random_wraps(mut cx: gpui::TestAppContext, mut rng: StdRng) { + async fn test_random_wraps(cx: &mut gpui::TestAppContext, mut rng: StdRng) { cx.foreground().set_block_on_ticks(0..=50); cx.foreground().forbid_parking(); let operations = env::var("OPERATIONS") @@ -1043,7 +1043,7 @@ mod tests { MultiBuffer::build_simple(&text, cx) } }); - let mut buffer_snapshot = buffer.read_with(&cx, |buffer, cx| buffer.snapshot(cx)); + let mut buffer_snapshot = buffer.read_with(cx, |buffer, cx| buffer.snapshot(cx)); let (mut fold_map, folds_snapshot) = FoldMap::new(buffer_snapshot.clone()); let (tab_map, tabs_snapshot) = TabMap::new(folds_snapshot.clone(), tab_size); log::info!("Unwrapped text (no folds): {:?}", buffer_snapshot.text()); @@ -1059,13 +1059,13 @@ mod tests { let (wrap_map, _) = cx.update(|cx| WrapMap::new(tabs_snapshot.clone(), font_id, font_size, wrap_width, cx)); - let mut notifications = observe(&wrap_map, &mut cx); + let mut notifications = observe(&wrap_map, cx); - if wrap_map.read_with(&cx, |map, _| map.is_rewrapping()) { + if wrap_map.read_with(cx, |map, _| map.is_rewrapping()) { notifications.next().await.unwrap(); } - let (initial_snapshot, _) = wrap_map.update(&mut cx, |map, cx| { + let (initial_snapshot, _) = wrap_map.update(cx, |map, cx| { assert!(!map.is_rewrapping()); map.sync(tabs_snapshot.clone(), Vec::new(), cx) }); @@ -1091,20 +1091,20 @@ mod tests { Some(rng.gen_range(0.0..=1000.0)) }; log::info!("Setting wrap width to {:?}", wrap_width); - wrap_map.update(&mut cx, |map, cx| map.set_wrap_width(wrap_width, cx)); + wrap_map.update(cx, |map, cx| map.set_wrap_width(wrap_width, cx)); } 20..=39 => { for (folds_snapshot, fold_edits) in fold_map.randomly_mutate(&mut rng) { let (tabs_snapshot, tab_edits) = tab_map.sync(folds_snapshot, fold_edits); - let (mut snapshot, wrap_edits) = wrap_map - .update(&mut cx, |map, cx| map.sync(tabs_snapshot, tab_edits, cx)); + let (mut snapshot, wrap_edits) = + wrap_map.update(cx, |map, cx| map.sync(tabs_snapshot, tab_edits, cx)); snapshot.check_invariants(); snapshot.verify_chunks(&mut rng); edits.push((snapshot, wrap_edits)); } } _ => { - buffer.update(&mut cx, |buffer, cx| { + buffer.update(cx, |buffer, cx| { let subscription = buffer.subscribe(); let edit_count = rng.gen_range(1..=5); buffer.randomly_mutate(&mut rng, edit_count, cx); @@ -1125,24 +1125,23 @@ mod tests { let unwrapped_text = tabs_snapshot.text(); let expected_text = wrap_text(&unwrapped_text, wrap_width, &mut line_wrapper); - let (mut snapshot, wrap_edits) = wrap_map.update(&mut cx, |map, cx| { - map.sync(tabs_snapshot.clone(), tab_edits, cx) - }); + let (mut snapshot, wrap_edits) = + wrap_map.update(cx, |map, cx| map.sync(tabs_snapshot.clone(), tab_edits, cx)); snapshot.check_invariants(); snapshot.verify_chunks(&mut rng); edits.push((snapshot, wrap_edits)); - if wrap_map.read_with(&cx, |map, _| map.is_rewrapping()) && rng.gen_bool(0.4) { + if wrap_map.read_with(cx, |map, _| map.is_rewrapping()) && rng.gen_bool(0.4) { log::info!("Waiting for wrapping to finish"); - while wrap_map.read_with(&cx, |map, _| map.is_rewrapping()) { + while wrap_map.read_with(cx, |map, _| map.is_rewrapping()) { notifications.next().await.unwrap(); } - wrap_map.read_with(&cx, |map, _| assert!(map.pending_edits.is_empty())); + wrap_map.read_with(cx, |map, _| assert!(map.pending_edits.is_empty())); } - if !wrap_map.read_with(&cx, |map, _| map.is_rewrapping()) { + if !wrap_map.read_with(cx, |map, _| map.is_rewrapping()) { let (mut wrapped_snapshot, wrap_edits) = - wrap_map.update(&mut cx, |map, cx| map.sync(tabs_snapshot, Vec::new(), cx)); + wrap_map.update(cx, |map, cx| map.sync(tabs_snapshot, Vec::new(), cx)); let actual_text = wrapped_snapshot.text(); let actual_longest_row = wrapped_snapshot.longest_row(); log::info!("Wrapping finished: {:?}", actual_text); @@ -1220,13 +1219,13 @@ mod tests { assert_eq!(initial_text.to_string(), snapshot_text.to_string()); } - if wrap_map.read_with(&cx, |map, _| map.is_rewrapping()) { + if wrap_map.read_with(cx, |map, _| map.is_rewrapping()) { log::info!("Waiting for wrapping to finish"); - while wrap_map.read_with(&cx, |map, _| map.is_rewrapping()) { + while wrap_map.read_with(cx, |map, _| map.is_rewrapping()) { notifications.next().await.unwrap(); } } - wrap_map.read_with(&cx, |map, _| assert!(map.pending_edits.is_empty())); + wrap_map.read_with(cx, |map, _| assert!(map.pending_edits.is_empty())); } fn wrap_text( diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index 1dd4a514257e54f8733836533839771663b00199..f36edb99045b07d4cf2785dad6b4fa1b27290bd8 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -7772,7 +7772,7 @@ mod tests { } #[gpui::test] - async fn test_select_larger_smaller_syntax_node(mut cx: gpui::TestAppContext) { + async fn test_select_larger_smaller_syntax_node(cx: &mut gpui::TestAppContext) { let settings = cx.read(Settings::test); let language = Arc::new(Language::new( LanguageConfig::default(), @@ -7794,7 +7794,7 @@ mod tests { view.condition(&cx, |view, cx| !view.buffer.read(cx).is_parsing(cx)) .await; - view.update(&mut cx, |view, cx| { + view.update(cx, |view, cx| { view.select_display_ranges( &[ DisplayPoint::new(0, 25)..DisplayPoint::new(0, 25), @@ -7806,7 +7806,7 @@ mod tests { view.select_larger_syntax_node(&SelectLargerSyntaxNode, cx); }); assert_eq!( - view.update(&mut cx, |view, cx| view.selected_display_ranges(cx)), + view.update(cx, |view, cx| view.selected_display_ranges(cx)), &[ DisplayPoint::new(0, 23)..DisplayPoint::new(0, 27), DisplayPoint::new(2, 35)..DisplayPoint::new(2, 7), @@ -7814,50 +7814,50 @@ mod tests { ] ); - view.update(&mut cx, |view, cx| { + view.update(cx, |view, cx| { view.select_larger_syntax_node(&SelectLargerSyntaxNode, cx); }); assert_eq!( - view.update(&mut cx, |view, cx| view.selected_display_ranges(cx)), + view.update(cx, |view, cx| view.selected_display_ranges(cx)), &[ DisplayPoint::new(0, 16)..DisplayPoint::new(0, 28), DisplayPoint::new(4, 1)..DisplayPoint::new(2, 0), ] ); - view.update(&mut cx, |view, cx| { + view.update(cx, |view, cx| { view.select_larger_syntax_node(&SelectLargerSyntaxNode, cx); }); assert_eq!( - view.update(&mut cx, |view, cx| view.selected_display_ranges(cx)), + view.update(cx, |view, cx| view.selected_display_ranges(cx)), &[DisplayPoint::new(5, 0)..DisplayPoint::new(0, 0)] ); // Trying to expand the selected syntax node one more time has no effect. - view.update(&mut cx, |view, cx| { + view.update(cx, |view, cx| { view.select_larger_syntax_node(&SelectLargerSyntaxNode, cx); }); assert_eq!( - view.update(&mut cx, |view, cx| view.selected_display_ranges(cx)), + view.update(cx, |view, cx| view.selected_display_ranges(cx)), &[DisplayPoint::new(5, 0)..DisplayPoint::new(0, 0)] ); - view.update(&mut cx, |view, cx| { + view.update(cx, |view, cx| { view.select_smaller_syntax_node(&SelectSmallerSyntaxNode, cx); }); assert_eq!( - view.update(&mut cx, |view, cx| view.selected_display_ranges(cx)), + view.update(cx, |view, cx| view.selected_display_ranges(cx)), &[ DisplayPoint::new(0, 16)..DisplayPoint::new(0, 28), DisplayPoint::new(4, 1)..DisplayPoint::new(2, 0), ] ); - view.update(&mut cx, |view, cx| { + view.update(cx, |view, cx| { view.select_smaller_syntax_node(&SelectSmallerSyntaxNode, cx); }); assert_eq!( - view.update(&mut cx, |view, cx| view.selected_display_ranges(cx)), + view.update(cx, |view, cx| view.selected_display_ranges(cx)), &[ DisplayPoint::new(0, 23)..DisplayPoint::new(0, 27), DisplayPoint::new(2, 35)..DisplayPoint::new(2, 7), @@ -7865,11 +7865,11 @@ mod tests { ] ); - view.update(&mut cx, |view, cx| { + view.update(cx, |view, cx| { view.select_smaller_syntax_node(&SelectSmallerSyntaxNode, cx); }); assert_eq!( - view.update(&mut cx, |view, cx| view.selected_display_ranges(cx)), + view.update(cx, |view, cx| view.selected_display_ranges(cx)), &[ DisplayPoint::new(0, 25)..DisplayPoint::new(0, 25), DisplayPoint::new(2, 24)..DisplayPoint::new(2, 12), @@ -7878,11 +7878,11 @@ mod tests { ); // Trying to shrink the selected syntax node one more time has no effect. - view.update(&mut cx, |view, cx| { + view.update(cx, |view, cx| { view.select_smaller_syntax_node(&SelectSmallerSyntaxNode, cx); }); assert_eq!( - view.update(&mut cx, |view, cx| view.selected_display_ranges(cx)), + view.update(cx, |view, cx| view.selected_display_ranges(cx)), &[ DisplayPoint::new(0, 25)..DisplayPoint::new(0, 25), DisplayPoint::new(2, 24)..DisplayPoint::new(2, 12), @@ -7892,7 +7892,7 @@ mod tests { // Ensure that we keep expanding the selection if the larger selection starts or ends within // a fold. - view.update(&mut cx, |view, cx| { + view.update(cx, |view, cx| { view.fold_ranges( vec![ Point::new(0, 21)..Point::new(0, 24), @@ -7903,7 +7903,7 @@ mod tests { view.select_larger_syntax_node(&SelectLargerSyntaxNode, cx); }); assert_eq!( - view.update(&mut cx, |view, cx| view.selected_display_ranges(cx)), + view.update(cx, |view, cx| view.selected_display_ranges(cx)), &[ DisplayPoint::new(0, 16)..DisplayPoint::new(0, 28), DisplayPoint::new(2, 35)..DisplayPoint::new(2, 7), @@ -7913,7 +7913,7 @@ mod tests { } #[gpui::test] - async fn test_autoindent_selections(mut cx: gpui::TestAppContext) { + async fn test_autoindent_selections(cx: &mut gpui::TestAppContext) { let settings = cx.read(Settings::test); let language = Arc::new( Language::new( @@ -7954,7 +7954,7 @@ mod tests { .condition(&cx, |editor, cx| !editor.buffer.read(cx).is_parsing(cx)) .await; - editor.update(&mut cx, |editor, cx| { + editor.update(cx, |editor, cx| { editor.select_ranges([5..5, 8..8, 9..9], None, cx); editor.newline(&Newline, cx); assert_eq!(editor.text(cx), "fn a(\n \n) {\n \n}\n"); @@ -7970,7 +7970,7 @@ mod tests { } #[gpui::test] - async fn test_autoclose_pairs(mut cx: gpui::TestAppContext) { + async fn test_autoclose_pairs(cx: &mut gpui::TestAppContext) { let settings = cx.read(Settings::test); let language = Arc::new(Language::new( LanguageConfig { @@ -8007,7 +8007,7 @@ mod tests { view.condition(&cx, |view, cx| !view.buffer.read(cx).is_parsing(cx)) .await; - view.update(&mut cx, |view, cx| { + view.update(cx, |view, cx| { view.select_display_ranges( &[ DisplayPoint::new(0, 0)..DisplayPoint::new(0, 1), @@ -8081,7 +8081,7 @@ mod tests { } #[gpui::test] - async fn test_snippets(mut cx: gpui::TestAppContext) { + async fn test_snippets(cx: &mut gpui::TestAppContext) { let settings = cx.read(Settings::test); let text = " @@ -8093,7 +8093,7 @@ mod tests { let buffer = cx.update(|cx| MultiBuffer::build_simple(&text, cx)); let (_, editor) = cx.add_window(|cx| build_editor(buffer, settings, cx)); - editor.update(&mut cx, |editor, cx| { + editor.update(cx, |editor, cx| { let buffer = &editor.snapshot(cx).buffer_snapshot; let snippet = Snippet::parse("f(${1:one}, ${2:two}, ${1:three})$0").unwrap(); let insertion_ranges = [ @@ -8188,7 +8188,7 @@ mod tests { } #[gpui::test] - async fn test_completion(mut cx: gpui::TestAppContext) { + async fn test_completion(cx: &mut gpui::TestAppContext) { let settings = cx.read(Settings::test); let (language_server, mut fake) = cx.update(|cx| { lsp::LanguageServer::fake_with_capabilities( @@ -8213,23 +8213,23 @@ mod tests { let fs = FakeFs::new(cx.background().clone()); fs.insert_file("/file", text).await; - let project = Project::test(fs, &mut cx); + let project = Project::test(fs, cx); let (worktree, relative_path) = project - .update(&mut cx, |project, cx| { + .update(cx, |project, cx| { project.find_or_create_local_worktree("/file", false, cx) }) .await .unwrap(); let project_path = ProjectPath { - worktree_id: worktree.read_with(&cx, |worktree, _| worktree.id()), + worktree_id: worktree.read_with(cx, |worktree, _| worktree.id()), path: relative_path.into(), }; let buffer = project - .update(&mut cx, |project, cx| project.open_buffer(project_path, cx)) + .update(cx, |project, cx| project.open_buffer(project_path, cx)) .await .unwrap(); - buffer.update(&mut cx, |buffer, cx| { + buffer.update(cx, |buffer, cx| { buffer.set_language_server(Some(language_server), cx); }); @@ -8238,7 +8238,7 @@ mod tests { let (_, editor) = cx.add_window(|cx| build_editor(buffer, settings, cx)); - editor.update(&mut cx, |editor, cx| { + editor.update(cx, |editor, cx| { editor.project = Some(project); editor.select_ranges([Point::new(0, 3)..Point::new(0, 3)], None, cx); editor.handle_input(&Input(".".to_string()), cx); @@ -8258,7 +8258,7 @@ mod tests { .condition(&cx, |editor, _| editor.context_menu_visible()) .await; - let apply_additional_edits = editor.update(&mut cx, |editor, cx| { + let apply_additional_edits = editor.update(cx, |editor, cx| { editor.move_down(&MoveDown, cx); let apply_additional_edits = editor .confirm_completion(&ConfirmCompletion(None), cx) @@ -8282,7 +8282,7 @@ mod tests { .await; apply_additional_edits.await.unwrap(); assert_eq!( - editor.read_with(&cx, |editor, cx| editor.text(cx)), + editor.read_with(cx, |editor, cx| editor.text(cx)), " one.second_completion two @@ -8292,7 +8292,7 @@ mod tests { .unindent() ); - editor.update(&mut cx, |editor, cx| { + editor.update(cx, |editor, cx| { editor.select_ranges( [ Point::new(1, 3)..Point::new(1, 3), @@ -8323,7 +8323,7 @@ mod tests { .condition(&cx, |editor, _| editor.context_menu_visible()) .await; - editor.update(&mut cx, |editor, cx| { + editor.update(cx, |editor, cx| { editor.handle_input(&Input("i".to_string()), cx); }); @@ -8342,7 +8342,7 @@ mod tests { .condition(&cx, |editor, _| editor.context_menu_visible()) .await; - let apply_additional_edits = editor.update(&mut cx, |editor, cx| { + let apply_additional_edits = editor.update(cx, |editor, cx| { let apply_additional_edits = editor .confirm_completion(&ConfirmCompletion(None), cx) .unwrap(); @@ -8421,7 +8421,7 @@ mod tests { } #[gpui::test] - async fn test_toggle_comment(mut cx: gpui::TestAppContext) { + async fn test_toggle_comment(cx: &mut gpui::TestAppContext) { let settings = cx.read(Settings::test); let language = Arc::new(Language::new( LanguageConfig { @@ -8444,7 +8444,7 @@ mod tests { let buffer = cx.add_model(|cx| MultiBuffer::singleton(buffer, cx)); let (_, view) = cx.add_window(|cx| build_editor(buffer, settings, cx)); - view.update(&mut cx, |editor, cx| { + view.update(cx, |editor, cx| { // If multiple selections intersect a line, the line is only // toggled once. editor.select_display_ranges( @@ -8678,7 +8678,7 @@ mod tests { } #[gpui::test] - async fn test_extra_newline_insertion(mut cx: gpui::TestAppContext) { + async fn test_extra_newline_insertion(cx: &mut gpui::TestAppContext) { let settings = cx.read(Settings::test); let language = Arc::new(Language::new( LanguageConfig { @@ -8715,7 +8715,7 @@ mod tests { view.condition(&cx, |view, cx| !view.buffer.read(cx).is_parsing(cx)) .await; - view.update(&mut cx, |view, cx| { + view.update(cx, |view, cx| { view.select_display_ranges( &[ DisplayPoint::new(0, 2)..DisplayPoint::new(0, 3), diff --git a/crates/file_finder/src/file_finder.rs b/crates/file_finder/src/file_finder.rs index 1ecbad6d12ed939dacd99f9a5e9bd276b27a8a66..572ce7b473727c6dc2f5e0e90f9bae0d61398fa4 100644 --- a/crates/file_finder/src/file_finder.rs +++ b/crates/file_finder/src/file_finder.rs @@ -421,7 +421,7 @@ mod tests { use workspace::{Workspace, WorkspaceParams}; #[gpui::test] - async fn test_matching_paths(mut cx: gpui::TestAppContext) { + async fn test_matching_paths(cx: &mut gpui::TestAppContext) { let mut path_openers = Vec::new(); cx.update(|cx| { super::init(cx); @@ -447,7 +447,7 @@ mod tests { let (window_id, workspace) = cx.add_window(|cx| Workspace::new(¶ms, cx)); params .project - .update(&mut cx, |project, cx| { + .update(cx, |project, cx| { project.find_or_create_local_worktree("/root", false, cx) }) .await @@ -496,7 +496,7 @@ mod tests { } #[gpui::test] - async fn test_matching_cancellation(mut cx: gpui::TestAppContext) { + async fn test_matching_cancellation(cx: &mut gpui::TestAppContext) { let params = cx.update(WorkspaceParams::test); let fs = params.fs.as_fake(); fs.insert_tree( @@ -516,7 +516,7 @@ mod tests { let (_, workspace) = cx.add_window(|cx| Workspace::new(¶ms, cx)); params .project - .update(&mut cx, |project, cx| { + .update(cx, |project, cx| { project.find_or_create_local_worktree("/dir", false, cx) }) .await @@ -533,12 +533,12 @@ mod tests { let query = "hi".to_string(); finder - .update(&mut cx, |f, cx| f.spawn_search(query.clone(), cx)) + .update(cx, |f, cx| f.spawn_search(query.clone(), cx)) .unwrap() .await; - finder.read_with(&cx, |f, _| assert_eq!(f.matches.len(), 5)); + finder.read_with(cx, |f, _| assert_eq!(f.matches.len(), 5)); - finder.update(&mut cx, |finder, cx| { + finder.update(cx, |finder, cx| { let matches = finder.matches.clone(); // Simulate a search being cancelled after the time limit, @@ -571,7 +571,7 @@ mod tests { } #[gpui::test] - async fn test_single_file_worktrees(mut cx: gpui::TestAppContext) { + async fn test_single_file_worktrees(cx: &mut gpui::TestAppContext) { let params = cx.update(WorkspaceParams::test); params .fs @@ -582,7 +582,7 @@ mod tests { let (_, workspace) = cx.add_window(|cx| Workspace::new(¶ms, cx)); params .project - .update(&mut cx, |project, cx| { + .update(cx, |project, cx| { project.find_or_create_local_worktree("/root/the-parent-dir/the-file", false, cx) }) .await @@ -600,7 +600,7 @@ mod tests { // Even though there is only one worktree, that worktree's filename // is included in the matching, because the worktree is a single file. finder - .update(&mut cx, |f, cx| f.spawn_search("thf".into(), cx)) + .update(cx, |f, cx| f.spawn_search("thf".into(), cx)) .unwrap() .await; cx.read(|cx| { @@ -618,14 +618,14 @@ mod tests { // Since the worktree root is a file, searching for its name followed by a slash does // not match anything. finder - .update(&mut cx, |f, cx| f.spawn_search("thf/".into(), cx)) + .update(cx, |f, cx| f.spawn_search("thf/".into(), cx)) .unwrap() .await; - finder.read_with(&cx, |f, _| assert_eq!(f.matches.len(), 0)); + finder.read_with(cx, |f, _| assert_eq!(f.matches.len(), 0)); } #[gpui::test(retries = 5)] - async fn test_multiple_matches_with_same_relative_path(mut cx: gpui::TestAppContext) { + async fn test_multiple_matches_with_same_relative_path(cx: &mut gpui::TestAppContext) { let params = cx.update(WorkspaceParams::test); params .fs @@ -642,7 +642,7 @@ mod tests { let (_, workspace) = cx.add_window(|cx| Workspace::new(¶ms, cx)); workspace - .update(&mut cx, |workspace, cx| { + .update(cx, |workspace, cx| { workspace.open_paths( &[PathBuf::from("/root/dir1"), PathBuf::from("/root/dir2")], cx, @@ -662,12 +662,12 @@ mod tests { // Run a search that matches two files with the same relative path. finder - .update(&mut cx, |f, cx| f.spawn_search("a.t".into(), cx)) + .update(cx, |f, cx| f.spawn_search("a.t".into(), cx)) .unwrap() .await; // Can switch between different matches with the same relative path. - finder.update(&mut cx, |f, cx| { + finder.update(cx, |f, cx| { assert_eq!(f.matches.len(), 2); assert_eq!(f.selected_index(), 0); f.select_next(&SelectNext, cx); diff --git a/crates/gpui/src/app.rs b/crates/gpui/src/app.rs index 9842e14fbdc518e43015fb6da9f02a841ef486b3..90eeccdaf6bc65aa0979f9a8d429a7dc148002eb 100644 --- a/crates/gpui/src/app.rs +++ b/crates/gpui/src/app.rs @@ -4726,7 +4726,7 @@ mod tests { } #[crate::test(self)] - async fn test_model_condition(mut cx: TestAppContext) { + async fn test_model_condition(cx: &mut TestAppContext) { struct Counter(usize); impl super::Entity for Counter { @@ -4746,23 +4746,23 @@ mod tests { let condition2 = model.condition(&cx, |model, _| model.0 == 3); smol::pin!(condition1, condition2); - model.update(&mut cx, |model, cx| model.inc(cx)); + model.update(cx, |model, cx| model.inc(cx)); assert_eq!(poll_once(&mut condition1).await, None); assert_eq!(poll_once(&mut condition2).await, None); - model.update(&mut cx, |model, cx| model.inc(cx)); + model.update(cx, |model, cx| model.inc(cx)); assert_eq!(poll_once(&mut condition1).await, Some(())); assert_eq!(poll_once(&mut condition2).await, None); - model.update(&mut cx, |model, cx| model.inc(cx)); + model.update(cx, |model, cx| model.inc(cx)); assert_eq!(poll_once(&mut condition2).await, Some(())); - model.update(&mut cx, |_, cx| cx.notify()); + model.update(cx, |_, cx| cx.notify()); } #[crate::test(self)] #[should_panic] - async fn test_model_condition_timeout(mut cx: TestAppContext) { + async fn test_model_condition_timeout(cx: &mut TestAppContext) { struct Model; impl super::Entity for Model { @@ -4775,7 +4775,7 @@ mod tests { #[crate::test(self)] #[should_panic(expected = "model dropped with pending condition")] - async fn test_model_condition_panic_on_drop(mut cx: TestAppContext) { + async fn test_model_condition_panic_on_drop(cx: &mut TestAppContext) { struct Model; impl super::Entity for Model { @@ -4789,7 +4789,7 @@ mod tests { } #[crate::test(self)] - async fn test_view_condition(mut cx: TestAppContext) { + async fn test_view_condition(cx: &mut TestAppContext) { struct Counter(usize); impl super::Entity for Counter { @@ -4819,22 +4819,22 @@ mod tests { let condition2 = view.condition(&cx, |view, _| view.0 == 3); smol::pin!(condition1, condition2); - view.update(&mut cx, |view, cx| view.inc(cx)); + view.update(cx, |view, cx| view.inc(cx)); assert_eq!(poll_once(&mut condition1).await, None); assert_eq!(poll_once(&mut condition2).await, None); - view.update(&mut cx, |view, cx| view.inc(cx)); + view.update(cx, |view, cx| view.inc(cx)); assert_eq!(poll_once(&mut condition1).await, Some(())); assert_eq!(poll_once(&mut condition2).await, None); - view.update(&mut cx, |view, cx| view.inc(cx)); + view.update(cx, |view, cx| view.inc(cx)); assert_eq!(poll_once(&mut condition2).await, Some(())); - view.update(&mut cx, |_, cx| cx.notify()); + view.update(cx, |_, cx| cx.notify()); } #[crate::test(self)] #[should_panic] - async fn test_view_condition_timeout(mut cx: TestAppContext) { + async fn test_view_condition_timeout(cx: &mut TestAppContext) { struct View; impl super::Entity for View { @@ -4857,7 +4857,7 @@ mod tests { #[crate::test(self)] #[should_panic(expected = "view dropped with pending condition")] - async fn test_view_condition_panic_on_drop(mut cx: TestAppContext) { + async fn test_view_condition_panic_on_drop(cx: &mut TestAppContext) { struct View; impl super::Entity for View { diff --git a/crates/gpui_macros/src/gpui_macros.rs b/crates/gpui_macros/src/gpui_macros.rs index 8c7b8dc71db5c8a12cc5f09a8e42b5805661ac27..83bb33b756e4aa7ed1536f7a53f9e4e59a0c4339 100644 --- a/crates/gpui_macros/src/gpui_macros.rs +++ b/crates/gpui_macros/src/gpui_macros.rs @@ -65,26 +65,13 @@ pub fn test(args: TokenStream, function: TokenStream) -> TokenStream { let mut outer_fn: ItemFn = if inner_fn.sig.asyncness.is_some() { // Pass to the test function the number of app contexts that it needs, // based on its parameter list. + let mut cx_vars = proc_macro2::TokenStream::new(); let mut inner_fn_args = proc_macro2::TokenStream::new(); for (ix, arg) in inner_fn.sig.inputs.iter().enumerate() { if let FnArg::Typed(arg) = arg { if let Type::Path(ty) = &*arg.ty { let last_segment = ty.path.segments.last(); match last_segment.map(|s| s.ident.to_string()).as_deref() { - Some("TestAppContext") => { - let first_entity_id = ix * 100_000; - inner_fn_args.extend(quote!( - #namespace::TestAppContext::new( - foreground_platform.clone(), - cx.platform().clone(), - deterministic.build_foreground(#ix), - deterministic.build_background(), - cx.font_cache().clone(), - cx.leak_detector(), - #first_entity_id, - ), - )); - } Some("StdRng") => { inner_fn_args.extend(quote!(rand::SeedableRng::seed_from_u64(seed),)); } @@ -98,6 +85,42 @@ pub fn test(args: TokenStream, function: TokenStream) -> TokenStream { ) } } + } else if let Type::Reference(ty) = &*arg.ty { + match &*ty.elem { + Type::Path(ty) => { + let last_segment = ty.path.segments.last(); + match last_segment.map(|s| s.ident.to_string()).as_deref() { + Some("TestAppContext") => { + let first_entity_id = ix * 100_000; + let cx_varname = format_ident!("cx_{}", ix); + cx_vars.extend(quote!( + let mut #cx_varname = #namespace::TestAppContext::new( + foreground_platform.clone(), + cx.platform().clone(), + deterministic.build_foreground(#ix), + deterministic.build_background(), + cx.font_cache().clone(), + cx.leak_detector(), + #first_entity_id, + ); + )); + inner_fn_args.extend(quote!(&mut #cx_varname,)); + } + _ => { + return TokenStream::from( + syn::Error::new_spanned(arg, "invalid argument") + .into_compile_error(), + ) + } + } + } + _ => { + return TokenStream::from( + syn::Error::new_spanned(arg, "invalid argument") + .into_compile_error(), + ) + } + } } else { return TokenStream::from( syn::Error::new_spanned(arg, "invalid argument").into_compile_error(), @@ -120,7 +143,9 @@ pub fn test(args: TokenStream, function: TokenStream) -> TokenStream { #starting_seed as u64, #max_retries, &mut |cx, foreground_platform, deterministic, seed, is_last_iteration| { - cx.foreground().run(#inner_fn_name(#inner_fn_args)) + #cx_vars + cx.foreground().run(#inner_fn_name(#inner_fn_args)); + cx.foreground().run_until_parked(); } ); } diff --git a/crates/language/src/tests.rs b/crates/language/src/tests.rs index eac6c6b5d6be0741ae750768ec315d16790a9a5f..247385a810a4891695030db5d1c069f9191621a0 100644 --- a/crates/language/src/tests.rs +++ b/crates/language/src/tests.rs @@ -125,23 +125,23 @@ fn test_edit_events(cx: &mut gpui::MutableAppContext) { } #[gpui::test] -async fn test_apply_diff(mut cx: gpui::TestAppContext) { +async fn test_apply_diff(cx: &mut gpui::TestAppContext) { let text = "a\nbb\nccc\ndddd\neeeee\nffffff\n"; let buffer = cx.add_model(|cx| Buffer::new(0, text, cx)); let text = "a\nccc\ndddd\nffffff\n"; - let diff = buffer.read_with(&cx, |b, cx| b.diff(text.into(), cx)).await; - buffer.update(&mut cx, |b, cx| b.apply_diff(diff, cx)); + let diff = buffer.read_with(cx, |b, cx| b.diff(text.into(), cx)).await; + buffer.update(cx, |b, cx| b.apply_diff(diff, cx)); cx.read(|cx| assert_eq!(buffer.read(cx).text(), text)); let text = "a\n1\n\nccc\ndd2dd\nffffff\n"; - let diff = buffer.read_with(&cx, |b, cx| b.diff(text.into(), cx)).await; - buffer.update(&mut cx, |b, cx| b.apply_diff(diff, cx)); + let diff = buffer.read_with(cx, |b, cx| b.diff(text.into(), cx)).await; + buffer.update(cx, |b, cx| b.apply_diff(diff, cx)); cx.read(|cx| assert_eq!(buffer.read(cx).text(), text)); } #[gpui::test] -async fn test_reparse(mut cx: gpui::TestAppContext) { +async fn test_reparse(cx: &mut gpui::TestAppContext) { let text = "fn a() {}"; let buffer = cx.add_model(|cx| Buffer::new(0, text, cx).with_language(Arc::new(rust_lang()), cx)); @@ -159,13 +159,13 @@ async fn test_reparse(mut cx: gpui::TestAppContext) { ) ); - buffer.update(&mut cx, |buffer, _| { + buffer.update(cx, |buffer, _| { buffer.set_sync_parse_timeout(Duration::ZERO) }); // Perform some edits (add parameter and variable reference) // Parsing doesn't begin until the transaction is complete - buffer.update(&mut cx, |buf, cx| { + buffer.update(cx, |buf, cx| { buf.start_transaction(); let offset = buf.text().find(")").unwrap(); @@ -196,19 +196,19 @@ async fn test_reparse(mut cx: gpui::TestAppContext) { // * turn identifier into a field expression // * turn field expression into a method call // * add a turbofish to the method call - buffer.update(&mut cx, |buf, cx| { + buffer.update(cx, |buf, cx| { let offset = buf.text().find(";").unwrap(); buf.edit(vec![offset..offset], ".e", cx); assert_eq!(buf.text(), "fn a(b: C) { d.e; }"); assert!(buf.is_parsing()); }); - buffer.update(&mut cx, |buf, cx| { + buffer.update(cx, |buf, cx| { let offset = buf.text().find(";").unwrap(); buf.edit(vec![offset..offset], "(f)", cx); assert_eq!(buf.text(), "fn a(b: C) { d.e(f); }"); assert!(buf.is_parsing()); }); - buffer.update(&mut cx, |buf, cx| { + buffer.update(cx, |buf, cx| { let offset = buf.text().find("(f)").unwrap(); buf.edit(vec![offset..offset], "::", cx); assert_eq!(buf.text(), "fn a(b: C) { d.e::(f); }"); @@ -230,7 +230,7 @@ async fn test_reparse(mut cx: gpui::TestAppContext) { ) ); - buffer.update(&mut cx, |buf, cx| { + buffer.update(cx, |buf, cx| { buf.undo(cx); assert_eq!(buf.text(), "fn a() {}"); assert!(buf.is_parsing()); @@ -247,7 +247,7 @@ async fn test_reparse(mut cx: gpui::TestAppContext) { ) ); - buffer.update(&mut cx, |buf, cx| { + buffer.update(cx, |buf, cx| { buf.redo(cx); assert_eq!(buf.text(), "fn a(b: C) { d.e::(f); }"); assert!(buf.is_parsing()); @@ -276,7 +276,7 @@ async fn test_reparse(mut cx: gpui::TestAppContext) { } #[gpui::test] -async fn test_outline(mut cx: gpui::TestAppContext) { +async fn test_outline(cx: &mut gpui::TestAppContext) { let language = Arc::new( rust_lang() .with_outline_query( @@ -336,7 +336,7 @@ async fn test_outline(mut cx: gpui::TestAppContext) { let buffer = cx.add_model(|cx| Buffer::new(0, text, cx).with_language(language, cx)); let outline = buffer - .read_with(&cx, |buffer, _| buffer.snapshot().outline(None)) + .read_with(cx, |buffer, _| buffer.snapshot().outline(None)) .unwrap(); assert_eq!( @@ -553,7 +553,7 @@ fn test_autoindent_adjusts_lines_when_only_text_changes(cx: &mut MutableAppConte } #[gpui::test] -async fn test_diagnostics(mut cx: gpui::TestAppContext) { +async fn test_diagnostics(cx: &mut gpui::TestAppContext) { let (language_server, mut fake) = cx.update(lsp::LanguageServer::fake); let mut rust_lang = rust_lang(); rust_lang.config.language_server = Some(LanguageServerConfig { @@ -579,13 +579,13 @@ async fn test_diagnostics(mut cx: gpui::TestAppContext) { .await; // Edit the buffer, moving the content down - buffer.update(&mut cx, |buffer, cx| buffer.edit([0..0], "\n\n", cx)); + buffer.update(cx, |buffer, cx| buffer.edit([0..0], "\n\n", cx)); let change_notification_1 = fake .receive_notification::() .await; assert!(change_notification_1.text_document.version > open_notification.text_document.version); - buffer.update(&mut cx, |buffer, cx| { + buffer.update(cx, |buffer, cx| { // Receive diagnostics for an earlier version of the buffer. buffer .update_diagnostics( @@ -760,7 +760,7 @@ async fn test_diagnostics(mut cx: gpui::TestAppContext) { // Keep editing the buffer and ensure disk-based diagnostics get translated according to the // changes since the last save. - buffer.update(&mut cx, |buffer, cx| { + buffer.update(cx, |buffer, cx| { buffer.edit(Some(Point::new(2, 0)..Point::new(2, 0)), " ", cx); buffer.edit(Some(Point::new(2, 8)..Point::new(2, 10)), "(x: usize)", cx); }); @@ -771,7 +771,7 @@ async fn test_diagnostics(mut cx: gpui::TestAppContext) { change_notification_2.text_document.version > change_notification_1.text_document.version ); - buffer.update(&mut cx, |buffer, cx| { + buffer.update(cx, |buffer, cx| { buffer .update_diagnostics( vec![ @@ -836,7 +836,7 @@ async fn test_diagnostics(mut cx: gpui::TestAppContext) { } #[gpui::test] -async fn test_edits_from_lsp_with_past_version(mut cx: gpui::TestAppContext) { +async fn test_edits_from_lsp_with_past_version(cx: &mut gpui::TestAppContext) { let (language_server, mut fake) = cx.update(lsp::LanguageServer::fake); let text = " @@ -865,7 +865,7 @@ async fn test_edits_from_lsp_with_past_version(mut cx: gpui::TestAppContext) { .version; // Simulate editing the buffer after the language server computes some edits. - buffer.update(&mut cx, |buffer, cx| { + buffer.update(cx, |buffer, cx| { buffer.edit( [Point::new(0, 0)..Point::new(0, 0)], "// above first function\n", @@ -902,7 +902,7 @@ async fn test_edits_from_lsp_with_past_version(mut cx: gpui::TestAppContext) { }); let edits = buffer - .update(&mut cx, |buffer, cx| { + .update(cx, |buffer, cx| { buffer.edits_from_lsp( vec![ // replace body of first function @@ -937,7 +937,7 @@ async fn test_edits_from_lsp_with_past_version(mut cx: gpui::TestAppContext) { .await .unwrap(); - buffer.update(&mut cx, |buffer, cx| { + buffer.update(cx, |buffer, cx| { for (range, new_text) in edits { buffer.edit([range], new_text, cx); } @@ -962,7 +962,7 @@ async fn test_edits_from_lsp_with_past_version(mut cx: gpui::TestAppContext) { } #[gpui::test] -async fn test_edits_from_lsp_with_edits_on_adjacent_lines(mut cx: gpui::TestAppContext) { +async fn test_edits_from_lsp_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) { let text = " use a::b; use a::c; @@ -979,7 +979,7 @@ async fn test_edits_from_lsp_with_edits_on_adjacent_lines(mut cx: gpui::TestAppC // Simulate the language server sending us a small edit in the form of a very large diff. // Rust-analyzer does this when performing a merge-imports code action. let edits = buffer - .update(&mut cx, |buffer, cx| { + .update(cx, |buffer, cx| { buffer.edits_from_lsp( [ // Replace the first use statement without editing the semicolon. @@ -1015,7 +1015,7 @@ async fn test_edits_from_lsp_with_edits_on_adjacent_lines(mut cx: gpui::TestAppC .await .unwrap(); - buffer.update(&mut cx, |buffer, cx| { + buffer.update(cx, |buffer, cx| { let edits = edits .into_iter() .map(|(range, text)| { @@ -1053,7 +1053,7 @@ async fn test_edits_from_lsp_with_edits_on_adjacent_lines(mut cx: gpui::TestAppC } #[gpui::test] -async fn test_empty_diagnostic_ranges(mut cx: gpui::TestAppContext) { +async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) { cx.add_model(|cx| { let text = concat!( "let one = ;\n", // diff --git a/crates/lsp/src/lsp.rs b/crates/lsp/src/lsp.rs index 2e2efdb28c0da5462184f11851ebeb71537c91c1..ed3502208e0b13978de2d86b4be632627240f11b 100644 --- a/crates/lsp/src/lsp.rs +++ b/crates/lsp/src/lsp.rs @@ -712,7 +712,7 @@ mod tests { } #[gpui::test] - async fn test_fake(mut cx: TestAppContext) { + async fn test_fake(cx: &mut TestAppContext) { let (server, mut fake) = cx.update(LanguageServer::fake); let (message_tx, message_rx) = channel::unbounded(); diff --git a/crates/project/src/project.rs b/crates/project/src/project.rs index 507870341a1c92a727d8dc18f3e0f19b3d0c8060..c789bad6c2d4071c20a07aa4ff84d34825f3da4d 100644 --- a/crates/project/src/project.rs +++ b/crates/project/src/project.rs @@ -3603,7 +3603,7 @@ mod tests { use worktree::WorktreeHandle as _; #[gpui::test] - async fn test_populate_and_search(mut cx: gpui::TestAppContext) { + async fn test_populate_and_search(cx: &mut gpui::TestAppContext) { let dir = temp_tree(json!({ "root": { "apple": "", @@ -3627,10 +3627,10 @@ mod tests { ) .unwrap(); - let project = Project::test(Arc::new(RealFs), &mut cx); + let project = Project::test(Arc::new(RealFs), cx); let (tree, _) = project - .update(&mut cx, |project, cx| { + .update(cx, |project, cx| { project.find_or_create_local_worktree(&root_link_path, false, cx) }) .await @@ -3649,7 +3649,7 @@ mod tests { let cancel_flag = Default::default(); let results = project - .read_with(&cx, |project, cx| { + .read_with(cx, |project, cx| { project.match_paths("bna", false, false, 10, &cancel_flag, cx) }) .await; @@ -3666,7 +3666,7 @@ mod tests { } #[gpui::test] - async fn test_language_server_diagnostics(mut cx: gpui::TestAppContext) { + async fn test_language_server_diagnostics(cx: &mut gpui::TestAppContext) { let (language_server_config, mut fake_servers) = LanguageServerConfig::fake(); let progress_token = language_server_config .disk_based_diagnostics_progress_token @@ -3693,31 +3693,31 @@ mod tests { ) .await; - let project = Project::test(fs, &mut cx); - project.update(&mut cx, |project, _| { + let project = Project::test(fs, cx); + project.update(cx, |project, _| { Arc::get_mut(&mut project.languages).unwrap().add(language); }); let (tree, _) = project - .update(&mut cx, |project, cx| { + .update(cx, |project, cx| { project.find_or_create_local_worktree("/dir", false, cx) }) .await .unwrap(); - let worktree_id = tree.read_with(&cx, |tree, _| tree.id()); + let worktree_id = tree.read_with(cx, |tree, _| tree.id()); cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete()) .await; // Cause worktree to start the fake language server let _buffer = project - .update(&mut cx, |project, cx| { + .update(cx, |project, cx| { project.open_buffer((worktree_id, Path::new("b.rs")), cx) }) .await .unwrap(); - let mut events = subscribe(&project, &mut cx); + let mut events = subscribe(&project, cx); let mut fake_server = fake_servers.next().await.unwrap(); fake_server.start_progress(&progress_token).await; @@ -3759,11 +3759,11 @@ mod tests { ); let buffer = project - .update(&mut cx, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx)) + .update(cx, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx)) .await .unwrap(); - buffer.read_with(&cx, |buffer, _| { + buffer.read_with(cx, |buffer, _| { let snapshot = buffer.snapshot(); let diagnostics = snapshot .diagnostics_in_range::<_, Point>(0..buffer.len()) @@ -3785,7 +3785,7 @@ mod tests { } #[gpui::test] - async fn test_search_worktree_without_files(mut cx: gpui::TestAppContext) { + async fn test_search_worktree_without_files(cx: &mut gpui::TestAppContext) { let dir = temp_tree(json!({ "root": { "dir1": {}, @@ -3795,9 +3795,9 @@ mod tests { } })); - let project = Project::test(Arc::new(RealFs), &mut cx); + let project = Project::test(Arc::new(RealFs), cx); let (tree, _) = project - .update(&mut cx, |project, cx| { + .update(cx, |project, cx| { project.find_or_create_local_worktree(&dir.path(), false, cx) }) .await @@ -3808,7 +3808,7 @@ mod tests { let cancel_flag = Default::default(); let results = project - .read_with(&cx, |project, cx| { + .read_with(cx, |project, cx| { project.match_paths("dir", false, false, 10, &cancel_flag, cx) }) .await; @@ -3817,7 +3817,7 @@ mod tests { } #[gpui::test] - async fn test_definition(mut cx: gpui::TestAppContext) { + async fn test_definition(cx: &mut gpui::TestAppContext) { let (language_server_config, mut fake_servers) = LanguageServerConfig::fake(); let language = Arc::new(Language::new( LanguageConfig { @@ -3839,23 +3839,23 @@ mod tests { ) .await; - let project = Project::test(fs, &mut cx); - project.update(&mut cx, |project, _| { + let project = Project::test(fs, cx); + project.update(cx, |project, _| { Arc::get_mut(&mut project.languages).unwrap().add(language); }); let (tree, _) = project - .update(&mut cx, |project, cx| { + .update(cx, |project, cx| { project.find_or_create_local_worktree("/dir/b.rs", false, cx) }) .await .unwrap(); - let worktree_id = tree.read_with(&cx, |tree, _| tree.id()); + let worktree_id = tree.read_with(cx, |tree, _| tree.id()); cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete()) .await; let buffer = project - .update(&mut cx, |project, cx| { + .update(cx, |project, cx| { project.open_buffer( ProjectPath { worktree_id, @@ -3883,7 +3883,7 @@ mod tests { }); let mut definitions = project - .update(&mut cx, |project, cx| project.definition(&buffer, 22, cx)) + .update(cx, |project, cx| project.definition(&buffer, 22, cx)) .await .unwrap(); @@ -3934,7 +3934,7 @@ mod tests { } #[gpui::test] - async fn test_save_file(mut cx: gpui::TestAppContext) { + async fn test_save_file(cx: &mut gpui::TestAppContext) { let fs = FakeFs::new(cx.background()); fs.insert_tree( "/dir", @@ -3944,22 +3944,22 @@ mod tests { ) .await; - let project = Project::test(fs.clone(), &mut cx); + let project = Project::test(fs.clone(), cx); let worktree_id = project - .update(&mut cx, |p, cx| { + .update(cx, |p, cx| { p.find_or_create_local_worktree("/dir", false, cx) }) .await .unwrap() .0 - .read_with(&cx, |tree, _| tree.id()); + .read_with(cx, |tree, _| tree.id()); let buffer = project - .update(&mut cx, |p, cx| p.open_buffer((worktree_id, "file1"), cx)) + .update(cx, |p, cx| p.open_buffer((worktree_id, "file1"), cx)) .await .unwrap(); buffer - .update(&mut cx, |buffer, cx| { + .update(cx, |buffer, cx| { assert_eq!(buffer.text(), "the old contents"); buffer.edit(Some(0..0), "a line of text.\n".repeat(10 * 1024), cx); buffer.save(cx) @@ -3968,11 +3968,11 @@ mod tests { .unwrap(); let new_text = fs.load(Path::new("/dir/file1")).await.unwrap(); - assert_eq!(new_text, buffer.read_with(&cx, |buffer, _| buffer.text())); + assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text())); } #[gpui::test] - async fn test_save_in_single_file_worktree(mut cx: gpui::TestAppContext) { + async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) { let fs = FakeFs::new(cx.background()); fs.insert_tree( "/dir", @@ -3982,22 +3982,22 @@ mod tests { ) .await; - let project = Project::test(fs.clone(), &mut cx); + let project = Project::test(fs.clone(), cx); let worktree_id = project - .update(&mut cx, |p, cx| { + .update(cx, |p, cx| { p.find_or_create_local_worktree("/dir/file1", false, cx) }) .await .unwrap() .0 - .read_with(&cx, |tree, _| tree.id()); + .read_with(cx, |tree, _| tree.id()); let buffer = project - .update(&mut cx, |p, cx| p.open_buffer((worktree_id, ""), cx)) + .update(cx, |p, cx| p.open_buffer((worktree_id, ""), cx)) .await .unwrap(); buffer - .update(&mut cx, |buffer, cx| { + .update(cx, |buffer, cx| { buffer.edit(Some(0..0), "a line of text.\n".repeat(10 * 1024), cx); buffer.save(cx) }) @@ -4005,11 +4005,11 @@ mod tests { .unwrap(); let new_text = fs.load(Path::new("/dir/file1")).await.unwrap(); - assert_eq!(new_text, buffer.read_with(&cx, |buffer, _| buffer.text())); + assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text())); } #[gpui::test(retries = 5)] - async fn test_rescan_and_remote_updates(mut cx: gpui::TestAppContext) { + async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) { let dir = temp_tree(json!({ "a": { "file1": "", @@ -4024,16 +4024,16 @@ mod tests { } })); - let project = Project::test(Arc::new(RealFs), &mut cx); - let rpc = project.read_with(&cx, |p, _| p.client.clone()); + let project = Project::test(Arc::new(RealFs), cx); + let rpc = project.read_with(cx, |p, _| p.client.clone()); let (tree, _) = project - .update(&mut cx, |p, cx| { + .update(cx, |p, cx| { p.find_or_create_local_worktree(dir.path(), false, cx) }) .await .unwrap(); - let worktree_id = tree.read_with(&cx, |tree, _| tree.id()); + let worktree_id = tree.read_with(cx, |tree, _| tree.id()); let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| { let buffer = project.update(cx, |p, cx| p.open_buffer((worktree_id, path), cx)); @@ -4047,10 +4047,10 @@ mod tests { }) }; - let buffer2 = buffer_for_path("a/file2", &mut cx).await; - let buffer3 = buffer_for_path("a/file3", &mut cx).await; - let buffer4 = buffer_for_path("b/c/file4", &mut cx).await; - let buffer5 = buffer_for_path("b/c/file5", &mut cx).await; + let buffer2 = buffer_for_path("a/file2", cx).await; + let buffer3 = buffer_for_path("a/file3", cx).await; + let buffer4 = buffer_for_path("b/c/file4", cx).await; + let buffer5 = buffer_for_path("b/c/file5", cx).await; let file2_id = id_for_path("a/file2", &cx); let file3_id = id_for_path("a/file3", &cx); @@ -4061,7 +4061,7 @@ mod tests { .await; // Create a remote copy of this worktree. - let initial_snapshot = tree.read_with(&cx, |tree, _| tree.as_local().unwrap().snapshot()); + let initial_snapshot = tree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot()); let (remote, load_task) = cx.update(|cx| { Worktree::remote( 1, @@ -4136,7 +4136,7 @@ mod tests { // Update the remote worktree. Check that it becomes consistent with the // local worktree. - remote.update(&mut cx, |remote, cx| { + remote.update(cx, |remote, cx| { let update_message = tree.read(cx).as_local().unwrap().snapshot().build_update( &initial_snapshot, 1, @@ -4161,7 +4161,7 @@ mod tests { } #[gpui::test] - async fn test_buffer_deduping(mut cx: gpui::TestAppContext) { + async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) { let fs = FakeFs::new(cx.background()); fs.insert_tree( "/the-dir", @@ -4172,18 +4172,18 @@ mod tests { ) .await; - let project = Project::test(fs.clone(), &mut cx); + let project = Project::test(fs.clone(), cx); let worktree_id = project - .update(&mut cx, |p, cx| { + .update(cx, |p, cx| { p.find_or_create_local_worktree("/the-dir", false, cx) }) .await .unwrap() .0 - .read_with(&cx, |tree, _| tree.id()); + .read_with(cx, |tree, _| tree.id()); // Spawn multiple tasks to open paths, repeating some paths. - let (buffer_a_1, buffer_b, buffer_a_2) = project.update(&mut cx, |p, cx| { + let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| { ( p.open_buffer((worktree_id, "a.txt"), cx), p.open_buffer((worktree_id, "b.txt"), cx), @@ -4194,8 +4194,8 @@ mod tests { let buffer_a_1 = buffer_a_1.await.unwrap(); let buffer_a_2 = buffer_a_2.await.unwrap(); let buffer_b = buffer_b.await.unwrap(); - assert_eq!(buffer_a_1.read_with(&cx, |b, _| b.text()), "a-contents"); - assert_eq!(buffer_b.read_with(&cx, |b, _| b.text()), "b-contents"); + assert_eq!(buffer_a_1.read_with(cx, |b, _| b.text()), "a-contents"); + assert_eq!(buffer_b.read_with(cx, |b, _| b.text()), "b-contents"); // There is only one buffer per path. let buffer_a_id = buffer_a_1.id(); @@ -4204,7 +4204,7 @@ mod tests { // Open the same path again while it is still open. drop(buffer_a_1); let buffer_a_3 = project - .update(&mut cx, |p, cx| p.open_buffer((worktree_id, "a.txt"), cx)) + .update(cx, |p, cx| p.open_buffer((worktree_id, "a.txt"), cx)) .await .unwrap(); @@ -4213,7 +4213,7 @@ mod tests { } #[gpui::test] - async fn test_buffer_is_dirty(mut cx: gpui::TestAppContext) { + async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) { use std::fs; let dir = temp_tree(json!({ @@ -4222,28 +4222,28 @@ mod tests { "file3": "ghi", })); - let project = Project::test(Arc::new(RealFs), &mut cx); + let project = Project::test(Arc::new(RealFs), cx); let (worktree, _) = project - .update(&mut cx, |p, cx| { + .update(cx, |p, cx| { p.find_or_create_local_worktree(dir.path(), false, cx) }) .await .unwrap(); - let worktree_id = worktree.read_with(&cx, |worktree, _| worktree.id()); + let worktree_id = worktree.read_with(cx, |worktree, _| worktree.id()); worktree.flush_fs_events(&cx).await; worktree - .read_with(&cx, |t, _| t.as_local().unwrap().scan_complete()) + .read_with(cx, |t, _| t.as_local().unwrap().scan_complete()) .await; let buffer1 = project - .update(&mut cx, |p, cx| p.open_buffer((worktree_id, "file1"), cx)) + .update(cx, |p, cx| p.open_buffer((worktree_id, "file1"), cx)) .await .unwrap(); let events = Rc::new(RefCell::new(Vec::new())); // initially, the buffer isn't dirty. - buffer1.update(&mut cx, |buffer, cx| { + buffer1.update(cx, |buffer, cx| { cx.subscribe(&buffer1, { let events = events.clone(); move |_, _, event, _| events.borrow_mut().push(event.clone()) @@ -4257,7 +4257,7 @@ mod tests { }); // after the first edit, the buffer is dirty, and emits a dirtied event. - buffer1.update(&mut cx, |buffer, cx| { + buffer1.update(cx, |buffer, cx| { assert!(buffer.text() == "ac"); assert!(buffer.is_dirty()); assert_eq!( @@ -4269,7 +4269,7 @@ mod tests { }); // after saving, the buffer is not dirty, and emits a saved event. - buffer1.update(&mut cx, |buffer, cx| { + buffer1.update(cx, |buffer, cx| { assert!(!buffer.is_dirty()); assert_eq!(*events.borrow(), &[language::Event::Saved]); events.borrow_mut().clear(); @@ -4279,7 +4279,7 @@ mod tests { }); // after editing again, the buffer is dirty, and emits another dirty event. - buffer1.update(&mut cx, |buffer, cx| { + buffer1.update(cx, |buffer, cx| { assert!(buffer.text() == "aBDc"); assert!(buffer.is_dirty()); assert_eq!( @@ -4304,10 +4304,10 @@ mod tests { // When a file is deleted, the buffer is considered dirty. let events = Rc::new(RefCell::new(Vec::new())); let buffer2 = project - .update(&mut cx, |p, cx| p.open_buffer((worktree_id, "file2"), cx)) + .update(cx, |p, cx| p.open_buffer((worktree_id, "file2"), cx)) .await .unwrap(); - buffer2.update(&mut cx, |_, cx| { + buffer2.update(cx, |_, cx| { cx.subscribe(&buffer2, { let events = events.clone(); move |_, _, event, _| events.borrow_mut().push(event.clone()) @@ -4325,10 +4325,10 @@ mod tests { // When a file is already dirty when deleted, we don't emit a Dirtied event. let events = Rc::new(RefCell::new(Vec::new())); let buffer3 = project - .update(&mut cx, |p, cx| p.open_buffer((worktree_id, "file3"), cx)) + .update(cx, |p, cx| p.open_buffer((worktree_id, "file3"), cx)) .await .unwrap(); - buffer3.update(&mut cx, |_, cx| { + buffer3.update(cx, |_, cx| { cx.subscribe(&buffer3, { let events = events.clone(); move |_, _, event, _| events.borrow_mut().push(event.clone()) @@ -4337,7 +4337,7 @@ mod tests { }); worktree.flush_fs_events(&cx).await; - buffer3.update(&mut cx, |buffer, cx| { + buffer3.update(cx, |buffer, cx| { buffer.edit(Some(0..0), "x", cx); }); events.borrow_mut().clear(); @@ -4350,30 +4350,28 @@ mod tests { } #[gpui::test] - async fn test_buffer_file_changes_on_disk(mut cx: gpui::TestAppContext) { + async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) { use std::fs; let initial_contents = "aaa\nbbbbb\nc\n"; let dir = temp_tree(json!({ "the-file": initial_contents })); - let project = Project::test(Arc::new(RealFs), &mut cx); + let project = Project::test(Arc::new(RealFs), cx); let (worktree, _) = project - .update(&mut cx, |p, cx| { + .update(cx, |p, cx| { p.find_or_create_local_worktree(dir.path(), false, cx) }) .await .unwrap(); - let worktree_id = worktree.read_with(&cx, |tree, _| tree.id()); + let worktree_id = worktree.read_with(cx, |tree, _| tree.id()); worktree - .read_with(&cx, |t, _| t.as_local().unwrap().scan_complete()) + .read_with(cx, |t, _| t.as_local().unwrap().scan_complete()) .await; let abs_path = dir.path().join("the-file"); let buffer = project - .update(&mut cx, |p, cx| { - p.open_buffer((worktree_id, "the-file"), cx) - }) + .update(cx, |p, cx| p.open_buffer((worktree_id, "the-file"), cx)) .await .unwrap(); @@ -4397,7 +4395,7 @@ mod tests { // Change the file on disk, adding two new lines of text, and removing // one line. - buffer.read_with(&cx, |buffer, _| { + buffer.read_with(cx, |buffer, _| { assert!(!buffer.is_dirty()); assert!(!buffer.has_conflict()); }); @@ -4411,7 +4409,7 @@ mod tests { .condition(&cx, |buffer, _| buffer.text() == new_contents) .await; - buffer.update(&mut cx, |buffer, _| { + buffer.update(cx, |buffer, _| { assert_eq!(buffer.text(), new_contents); assert!(!buffer.is_dirty()); assert!(!buffer.has_conflict()); @@ -4433,7 +4431,7 @@ mod tests { }); // Modify the buffer - buffer.update(&mut cx, |buffer, cx| { + buffer.update(cx, |buffer, cx| { buffer.edit(vec![0..0], " ", cx); assert!(buffer.is_dirty()); assert!(!buffer.has_conflict()); @@ -4450,7 +4448,7 @@ mod tests { } #[gpui::test] - async fn test_grouped_diagnostics(mut cx: gpui::TestAppContext) { + async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) { let fs = FakeFs::new(cx.background()); fs.insert_tree( "/the-dir", @@ -4467,17 +4465,17 @@ mod tests { ) .await; - let project = Project::test(fs.clone(), &mut cx); + let project = Project::test(fs.clone(), cx); let (worktree, _) = project - .update(&mut cx, |p, cx| { + .update(cx, |p, cx| { p.find_or_create_local_worktree("/the-dir", false, cx) }) .await .unwrap(); - let worktree_id = worktree.read_with(&cx, |tree, _| tree.id()); + let worktree_id = worktree.read_with(cx, |tree, _| tree.id()); let buffer = project - .update(&mut cx, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx)) + .update(cx, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx)) .await .unwrap(); @@ -4582,11 +4580,11 @@ mod tests { }; project - .update(&mut cx, |p, cx| { + .update(cx, |p, cx| { p.update_diagnostics(message, &Default::default(), cx) }) .unwrap(); - let buffer = buffer.read_with(&cx, |buffer, _| buffer.snapshot()); + let buffer = buffer.read_with(cx, |buffer, _| buffer.snapshot()); assert_eq!( buffer @@ -4709,7 +4707,7 @@ mod tests { } #[gpui::test] - async fn test_rename(mut cx: gpui::TestAppContext) { + async fn test_rename(cx: &mut gpui::TestAppContext) { let (language_server_config, mut fake_servers) = LanguageServerConfig::fake(); let language = Arc::new(Language::new( LanguageConfig { @@ -4731,23 +4729,23 @@ mod tests { ) .await; - let project = Project::test(fs.clone(), &mut cx); - project.update(&mut cx, |project, _| { + let project = Project::test(fs.clone(), cx); + project.update(cx, |project, _| { Arc::get_mut(&mut project.languages).unwrap().add(language); }); let (tree, _) = project - .update(&mut cx, |project, cx| { + .update(cx, |project, cx| { project.find_or_create_local_worktree("/dir", false, cx) }) .await .unwrap(); - let worktree_id = tree.read_with(&cx, |tree, _| tree.id()); + let worktree_id = tree.read_with(cx, |tree, _| tree.id()); cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete()) .await; let buffer = project - .update(&mut cx, |project, cx| { + .update(cx, |project, cx| { project.open_buffer((worktree_id, Path::new("one.rs")), cx) }) .await @@ -4755,7 +4753,7 @@ mod tests { let mut fake_server = fake_servers.next().await.unwrap(); - let response = project.update(&mut cx, |project, cx| { + let response = project.update(cx, |project, cx| { project.prepare_rename(buffer.clone(), 7, cx) }); fake_server @@ -4771,10 +4769,10 @@ mod tests { .await .unwrap(); let range = response.await.unwrap().unwrap(); - let range = buffer.read_with(&cx, |buffer, _| range.to_offset(buffer)); + let range = buffer.read_with(cx, |buffer, _| range.to_offset(buffer)); assert_eq!(range, 6..9); - let response = project.update(&mut cx, |project, cx| { + let response = project.update(cx, |project, cx| { project.perform_rename(buffer.clone(), 7, "THREE".to_string(), true, cx) }); fake_server @@ -4837,7 +4835,7 @@ mod tests { .remove_entry(&buffer) .unwrap() .0 - .read_with(&cx, |buffer, _| buffer.text()), + .read_with(cx, |buffer, _| buffer.text()), "const THREE: usize = 1;" ); assert_eq!( @@ -4845,13 +4843,13 @@ mod tests { .into_keys() .next() .unwrap() - .read_with(&cx, |buffer, _| buffer.text()), + .read_with(cx, |buffer, _| buffer.text()), "const TWO: usize = one::THREE + one::THREE;" ); } #[gpui::test] - async fn test_search(mut cx: gpui::TestAppContext) { + async fn test_search(cx: &mut gpui::TestAppContext) { let fs = FakeFs::new(cx.background()); fs.insert_tree( "/dir", @@ -4863,19 +4861,19 @@ mod tests { }), ) .await; - let project = Project::test(fs.clone(), &mut cx); + let project = Project::test(fs.clone(), cx); let (tree, _) = project - .update(&mut cx, |project, cx| { + .update(cx, |project, cx| { project.find_or_create_local_worktree("/dir", false, cx) }) .await .unwrap(); - let worktree_id = tree.read_with(&cx, |tree, _| tree.id()); + let worktree_id = tree.read_with(cx, |tree, _| tree.id()); cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete()) .await; assert_eq!( - search(&project, SearchQuery::text("TWO", false, true), &mut cx) + search(&project, SearchQuery::text("TWO", false, true), cx) .await .unwrap(), HashMap::from_iter([ @@ -4885,17 +4883,17 @@ mod tests { ); let buffer_4 = project - .update(&mut cx, |project, cx| { + .update(cx, |project, cx| { project.open_buffer((worktree_id, "four.rs"), cx) }) .await .unwrap(); - buffer_4.update(&mut cx, |buffer, cx| { + buffer_4.update(cx, |buffer, cx| { buffer.edit([20..28, 31..43], "two::TWO", cx); }); assert_eq!( - search(&project, SearchQuery::text("TWO", false, true), &mut cx) + search(&project, SearchQuery::text("TWO", false, true), cx) .await .unwrap(), HashMap::from_iter([ diff --git a/crates/project/src/worktree.rs b/crates/project/src/worktree.rs index 6455af3e53864caf647df14a3510424b71a7d96c..fe9958b9cde5b2c1f3d174140e5d7176ac01a66b 100644 --- a/crates/project/src/worktree.rs +++ b/crates/project/src/worktree.rs @@ -2441,7 +2441,7 @@ mod tests { use util::test::temp_tree; #[gpui::test] - async fn test_traversal(cx: gpui::TestAppContext) { + async fn test_traversal(cx: &mut gpui::TestAppContext) { let fs = FakeFs::new(cx.background()); fs.insert_tree( "/root", @@ -2470,7 +2470,7 @@ mod tests { cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete()) .await; - tree.read_with(&cx, |tree, _| { + tree.read_with(cx, |tree, _| { assert_eq!( tree.entries(false) .map(|entry| entry.path.as_ref()) @@ -2486,7 +2486,7 @@ mod tests { } #[gpui::test] - async fn test_rescan_with_gitignore(cx: gpui::TestAppContext) { + async fn test_rescan_with_gitignore(cx: &mut gpui::TestAppContext) { let dir = temp_tree(json!({ ".git": {}, ".gitignore": "ignored-dir\n", diff --git a/crates/project_panel/src/project_panel.rs b/crates/project_panel/src/project_panel.rs index 76fb51108a03f5088e8544b85eb6b169a4486b51..8407cc3c26d6cf9a27c247fac9f878c114d2b2a6 100644 --- a/crates/project_panel/src/project_panel.rs +++ b/crates/project_panel/src/project_panel.rs @@ -589,7 +589,7 @@ mod tests { use workspace::WorkspaceParams; #[gpui::test] - async fn test_visible_list(mut cx: gpui::TestAppContext) { + async fn test_visible_list(cx: &mut gpui::TestAppContext) { let params = cx.update(WorkspaceParams::test); let settings = params.settings.clone(); let fs = params.fs.as_fake(); @@ -639,28 +639,28 @@ mod tests { ) }); let (root1, _) = project - .update(&mut cx, |project, cx| { + .update(cx, |project, cx| { project.find_or_create_local_worktree("/root1", false, cx) }) .await .unwrap(); root1 - .read_with(&cx, |t, _| t.as_local().unwrap().scan_complete()) + .read_with(cx, |t, _| t.as_local().unwrap().scan_complete()) .await; let (root2, _) = project - .update(&mut cx, |project, cx| { + .update(cx, |project, cx| { project.find_or_create_local_worktree("/root2", false, cx) }) .await .unwrap(); root2 - .read_with(&cx, |t, _| t.as_local().unwrap().scan_complete()) + .read_with(cx, |t, _| t.as_local().unwrap().scan_complete()) .await; let (_, workspace) = cx.add_window(|cx| Workspace::new(¶ms, cx)); - let panel = workspace.update(&mut cx, |_, cx| ProjectPanel::new(project, settings, cx)); + let panel = workspace.update(cx, |_, cx| ProjectPanel::new(project, settings, cx)); assert_eq!( - visible_entry_details(&panel, 0..50, &mut cx), + visible_entry_details(&panel, 0..50, cx), &[ EntryDetails { filename: "root1".to_string(), @@ -721,9 +721,9 @@ mod tests { ], ); - toggle_expand_dir(&panel, "root1/b", &mut cx); + toggle_expand_dir(&panel, "root1/b", cx); assert_eq!( - visible_entry_details(&panel, 0..50, &mut cx), + visible_entry_details(&panel, 0..50, cx), &[ EntryDetails { filename: "root1".to_string(), @@ -799,7 +799,7 @@ mod tests { ); assert_eq!( - visible_entry_details(&panel, 5..8, &mut cx), + visible_entry_details(&panel, 5..8, cx), [ EntryDetails { filename: "4".to_string(), diff --git a/crates/rpc/src/peer.rs b/crates/rpc/src/peer.rs index d37aec47678d6ef1858982c180d39940104e1da6..a1d5ab8e9677a3161e4a6fa5a7842845fecb0a21 100644 --- a/crates/rpc/src/peer.rs +++ b/crates/rpc/src/peer.rs @@ -347,7 +347,7 @@ mod tests { use gpui::TestAppContext; #[gpui::test(iterations = 50)] - async fn test_request_response(cx: TestAppContext) { + async fn test_request_response(cx: &mut TestAppContext) { let executor = cx.foreground(); // create 2 clients connected to 1 server @@ -441,7 +441,7 @@ mod tests { } #[gpui::test(iterations = 50)] - async fn test_order_of_response_and_incoming(cx: TestAppContext) { + async fn test_order_of_response_and_incoming(cx: &mut TestAppContext) { let executor = cx.foreground(); let server = Peer::new(); let client = Peer::new(); @@ -539,7 +539,7 @@ mod tests { } #[gpui::test(iterations = 50)] - async fn test_dropping_request_before_completion(cx: TestAppContext) { + async fn test_dropping_request_before_completion(cx: &mut TestAppContext) { let executor = cx.foreground(); let server = Peer::new(); let client = Peer::new(); @@ -651,7 +651,7 @@ mod tests { } #[gpui::test(iterations = 50)] - async fn test_disconnect(cx: TestAppContext) { + async fn test_disconnect(cx: &mut TestAppContext) { let executor = cx.foreground(); let (client_conn, mut server_conn, _) = Connection::in_memory(cx.background()); @@ -686,7 +686,7 @@ mod tests { } #[gpui::test(iterations = 50)] - async fn test_io_error(cx: TestAppContext) { + async fn test_io_error(cx: &mut TestAppContext) { let executor = cx.foreground(); let (client_conn, mut server_conn, _) = Connection::in_memory(cx.background()); diff --git a/crates/search/src/buffer_search.rs b/crates/search/src/buffer_search.rs index 847345448a1dfad09424a403377e9a2802a0ef51..9a8f4a77d7030609335df130ac499e4e33989193 100644 --- a/crates/search/src/buffer_search.rs +++ b/crates/search/src/buffer_search.rs @@ -520,7 +520,7 @@ mod tests { use unindent::Unindent as _; #[gpui::test] - async fn test_search_simple(mut cx: TestAppContext) { + async fn test_search_simple(cx: &mut TestAppContext) { let fonts = cx.font_cache(); let mut theme = gpui::fonts::with_font_cache(fonts.clone(), || theme::Theme::default()); theme.search.match_background = Color::red(); @@ -551,11 +551,11 @@ mod tests { // Search for a string that appears with different casing. // By default, search is case-insensitive. - search_bar.update(&mut cx, |search_bar, cx| { + search_bar.update(cx, |search_bar, cx| { search_bar.set_query("us", cx); }); editor.next_notification(&cx).await; - editor.update(&mut cx, |editor, cx| { + editor.update(cx, |editor, cx| { assert_eq!( editor.all_highlighted_ranges(cx), &[ @@ -572,11 +572,11 @@ mod tests { }); // Switch to a case sensitive search. - search_bar.update(&mut cx, |search_bar, cx| { + search_bar.update(cx, |search_bar, cx| { search_bar.toggle_search_option(&ToggleSearchOption(SearchOption::CaseSensitive), cx); }); editor.next_notification(&cx).await; - editor.update(&mut cx, |editor, cx| { + editor.update(cx, |editor, cx| { assert_eq!( editor.all_highlighted_ranges(cx), &[( @@ -588,11 +588,11 @@ mod tests { // Search for a string that appears both as a whole word and // within other words. By default, all results are found. - search_bar.update(&mut cx, |search_bar, cx| { + search_bar.update(cx, |search_bar, cx| { search_bar.set_query("or", cx); }); editor.next_notification(&cx).await; - editor.update(&mut cx, |editor, cx| { + editor.update(cx, |editor, cx| { assert_eq!( editor.all_highlighted_ranges(cx), &[ @@ -629,11 +629,11 @@ mod tests { }); // Switch to a whole word search. - search_bar.update(&mut cx, |search_bar, cx| { + search_bar.update(cx, |search_bar, cx| { search_bar.toggle_search_option(&ToggleSearchOption(SearchOption::WholeWord), cx); }); editor.next_notification(&cx).await; - editor.update(&mut cx, |editor, cx| { + editor.update(cx, |editor, cx| { assert_eq!( editor.all_highlighted_ranges(cx), &[ @@ -653,10 +653,10 @@ mod tests { ); }); - editor.update(&mut cx, |editor, cx| { + editor.update(cx, |editor, cx| { editor.select_display_ranges(&[DisplayPoint::new(0, 0)..DisplayPoint::new(0, 0)], cx); }); - search_bar.update(&mut cx, |search_bar, cx| { + search_bar.update(cx, |search_bar, cx| { assert_eq!(search_bar.active_match_index, Some(0)); search_bar.select_match(&SelectMatch(Direction::Next), cx); assert_eq!( @@ -664,82 +664,82 @@ mod tests { [DisplayPoint::new(0, 41)..DisplayPoint::new(0, 43)] ); }); - search_bar.read_with(&cx, |search_bar, _| { + search_bar.read_with(cx, |search_bar, _| { assert_eq!(search_bar.active_match_index, Some(0)); }); - search_bar.update(&mut cx, |search_bar, cx| { + search_bar.update(cx, |search_bar, cx| { search_bar.select_match(&SelectMatch(Direction::Next), cx); assert_eq!( editor.update(cx, |editor, cx| editor.selected_display_ranges(cx)), [DisplayPoint::new(3, 11)..DisplayPoint::new(3, 13)] ); }); - search_bar.read_with(&cx, |search_bar, _| { + search_bar.read_with(cx, |search_bar, _| { assert_eq!(search_bar.active_match_index, Some(1)); }); - search_bar.update(&mut cx, |search_bar, cx| { + search_bar.update(cx, |search_bar, cx| { search_bar.select_match(&SelectMatch(Direction::Next), cx); assert_eq!( editor.update(cx, |editor, cx| editor.selected_display_ranges(cx)), [DisplayPoint::new(3, 56)..DisplayPoint::new(3, 58)] ); }); - search_bar.read_with(&cx, |search_bar, _| { + search_bar.read_with(cx, |search_bar, _| { assert_eq!(search_bar.active_match_index, Some(2)); }); - search_bar.update(&mut cx, |search_bar, cx| { + search_bar.update(cx, |search_bar, cx| { search_bar.select_match(&SelectMatch(Direction::Next), cx); assert_eq!( editor.update(cx, |editor, cx| editor.selected_display_ranges(cx)), [DisplayPoint::new(0, 41)..DisplayPoint::new(0, 43)] ); }); - search_bar.read_with(&cx, |search_bar, _| { + search_bar.read_with(cx, |search_bar, _| { assert_eq!(search_bar.active_match_index, Some(0)); }); - search_bar.update(&mut cx, |search_bar, cx| { + search_bar.update(cx, |search_bar, cx| { search_bar.select_match(&SelectMatch(Direction::Prev), cx); assert_eq!( editor.update(cx, |editor, cx| editor.selected_display_ranges(cx)), [DisplayPoint::new(3, 56)..DisplayPoint::new(3, 58)] ); }); - search_bar.read_with(&cx, |search_bar, _| { + search_bar.read_with(cx, |search_bar, _| { assert_eq!(search_bar.active_match_index, Some(2)); }); - search_bar.update(&mut cx, |search_bar, cx| { + search_bar.update(cx, |search_bar, cx| { search_bar.select_match(&SelectMatch(Direction::Prev), cx); assert_eq!( editor.update(cx, |editor, cx| editor.selected_display_ranges(cx)), [DisplayPoint::new(3, 11)..DisplayPoint::new(3, 13)] ); }); - search_bar.read_with(&cx, |search_bar, _| { + search_bar.read_with(cx, |search_bar, _| { assert_eq!(search_bar.active_match_index, Some(1)); }); - search_bar.update(&mut cx, |search_bar, cx| { + search_bar.update(cx, |search_bar, cx| { search_bar.select_match(&SelectMatch(Direction::Prev), cx); assert_eq!( editor.update(cx, |editor, cx| editor.selected_display_ranges(cx)), [DisplayPoint::new(0, 41)..DisplayPoint::new(0, 43)] ); }); - search_bar.read_with(&cx, |search_bar, _| { + search_bar.read_with(cx, |search_bar, _| { assert_eq!(search_bar.active_match_index, Some(0)); }); // Park the cursor in between matches and ensure that going to the previous match selects // the closest match to the left. - editor.update(&mut cx, |editor, cx| { + editor.update(cx, |editor, cx| { editor.select_display_ranges(&[DisplayPoint::new(1, 0)..DisplayPoint::new(1, 0)], cx); }); - search_bar.update(&mut cx, |search_bar, cx| { + search_bar.update(cx, |search_bar, cx| { assert_eq!(search_bar.active_match_index, Some(1)); search_bar.select_match(&SelectMatch(Direction::Prev), cx); assert_eq!( @@ -747,16 +747,16 @@ mod tests { [DisplayPoint::new(0, 41)..DisplayPoint::new(0, 43)] ); }); - search_bar.read_with(&cx, |search_bar, _| { + search_bar.read_with(cx, |search_bar, _| { assert_eq!(search_bar.active_match_index, Some(0)); }); // Park the cursor in between matches and ensure that going to the next match selects the // closest match to the right. - editor.update(&mut cx, |editor, cx| { + editor.update(cx, |editor, cx| { editor.select_display_ranges(&[DisplayPoint::new(1, 0)..DisplayPoint::new(1, 0)], cx); }); - search_bar.update(&mut cx, |search_bar, cx| { + search_bar.update(cx, |search_bar, cx| { assert_eq!(search_bar.active_match_index, Some(1)); search_bar.select_match(&SelectMatch(Direction::Next), cx); assert_eq!( @@ -764,16 +764,16 @@ mod tests { [DisplayPoint::new(3, 11)..DisplayPoint::new(3, 13)] ); }); - search_bar.read_with(&cx, |search_bar, _| { + search_bar.read_with(cx, |search_bar, _| { assert_eq!(search_bar.active_match_index, Some(1)); }); // Park the cursor after the last match and ensure that going to the previous match selects // the last match. - editor.update(&mut cx, |editor, cx| { + editor.update(cx, |editor, cx| { editor.select_display_ranges(&[DisplayPoint::new(3, 60)..DisplayPoint::new(3, 60)], cx); }); - search_bar.update(&mut cx, |search_bar, cx| { + search_bar.update(cx, |search_bar, cx| { assert_eq!(search_bar.active_match_index, Some(2)); search_bar.select_match(&SelectMatch(Direction::Prev), cx); assert_eq!( @@ -781,16 +781,16 @@ mod tests { [DisplayPoint::new(3, 56)..DisplayPoint::new(3, 58)] ); }); - search_bar.read_with(&cx, |search_bar, _| { + search_bar.read_with(cx, |search_bar, _| { assert_eq!(search_bar.active_match_index, Some(2)); }); // Park the cursor after the last match and ensure that going to the next match selects the // first match. - editor.update(&mut cx, |editor, cx| { + editor.update(cx, |editor, cx| { editor.select_display_ranges(&[DisplayPoint::new(3, 60)..DisplayPoint::new(3, 60)], cx); }); - search_bar.update(&mut cx, |search_bar, cx| { + search_bar.update(cx, |search_bar, cx| { assert_eq!(search_bar.active_match_index, Some(2)); search_bar.select_match(&SelectMatch(Direction::Next), cx); assert_eq!( @@ -798,16 +798,16 @@ mod tests { [DisplayPoint::new(0, 41)..DisplayPoint::new(0, 43)] ); }); - search_bar.read_with(&cx, |search_bar, _| { + search_bar.read_with(cx, |search_bar, _| { assert_eq!(search_bar.active_match_index, Some(0)); }); // Park the cursor before the first match and ensure that going to the previous match // selects the last match. - editor.update(&mut cx, |editor, cx| { + editor.update(cx, |editor, cx| { editor.select_display_ranges(&[DisplayPoint::new(0, 0)..DisplayPoint::new(0, 0)], cx); }); - search_bar.update(&mut cx, |search_bar, cx| { + search_bar.update(cx, |search_bar, cx| { assert_eq!(search_bar.active_match_index, Some(0)); search_bar.select_match(&SelectMatch(Direction::Prev), cx); assert_eq!( @@ -815,7 +815,7 @@ mod tests { [DisplayPoint::new(3, 56)..DisplayPoint::new(3, 58)] ); }); - search_bar.read_with(&cx, |search_bar, _| { + search_bar.read_with(cx, |search_bar, _| { assert_eq!(search_bar.active_match_index, Some(2)); }); } diff --git a/crates/search/src/project_search.rs b/crates/search/src/project_search.rs index 78031dd951ccf17b9cd25a12793d546588cf8f74..c345172d0423f60529c3cadbf3fde9b4a679a9f8 100644 --- a/crates/search/src/project_search.rs +++ b/crates/search/src/project_search.rs @@ -714,7 +714,7 @@ mod tests { use std::sync::Arc; #[gpui::test] - async fn test_project_search(mut cx: TestAppContext) { + async fn test_project_search(cx: &mut TestAppContext) { let fonts = cx.font_cache(); let mut theme = gpui::fonts::with_font_cache(fonts.clone(), || theme::Theme::default()); theme.search.match_background = Color::red(); @@ -732,9 +732,9 @@ mod tests { }), ) .await; - let project = Project::test(fs.clone(), &mut cx); + let project = Project::test(fs.clone(), cx); let (tree, _) = project - .update(&mut cx, |project, cx| { + .update(cx, |project, cx| { project.find_or_create_local_worktree("/dir", false, cx) }) .await @@ -747,14 +747,14 @@ mod tests { ProjectSearchView::new(search.clone(), None, settings, cx) }); - search_view.update(&mut cx, |search_view, cx| { + search_view.update(cx, |search_view, cx| { search_view .query_editor .update(cx, |query_editor, cx| query_editor.set_text("TWO", cx)); search_view.search(&Search, cx); }); search_view.next_notification(&cx).await; - search_view.update(&mut cx, |search_view, cx| { + search_view.update(cx, |search_view, cx| { assert_eq!( search_view .results_editor @@ -791,7 +791,7 @@ mod tests { search_view.select_match(&SelectMatch(Direction::Next), cx); }); - search_view.update(&mut cx, |search_view, cx| { + search_view.update(cx, |search_view, cx| { assert_eq!(search_view.active_match_index, Some(1)); assert_eq!( search_view @@ -802,7 +802,7 @@ mod tests { search_view.select_match(&SelectMatch(Direction::Next), cx); }); - search_view.update(&mut cx, |search_view, cx| { + search_view.update(cx, |search_view, cx| { assert_eq!(search_view.active_match_index, Some(2)); assert_eq!( search_view @@ -813,7 +813,7 @@ mod tests { search_view.select_match(&SelectMatch(Direction::Next), cx); }); - search_view.update(&mut cx, |search_view, cx| { + search_view.update(cx, |search_view, cx| { assert_eq!(search_view.active_match_index, Some(0)); assert_eq!( search_view @@ -824,7 +824,7 @@ mod tests { search_view.select_match(&SelectMatch(Direction::Prev), cx); }); - search_view.update(&mut cx, |search_view, cx| { + search_view.update(cx, |search_view, cx| { assert_eq!(search_view.active_match_index, Some(2)); assert_eq!( search_view @@ -835,7 +835,7 @@ mod tests { search_view.select_match(&SelectMatch(Direction::Prev), cx); }); - search_view.update(&mut cx, |search_view, cx| { + search_view.update(cx, |search_view, cx| { assert_eq!(search_view.active_match_index, Some(1)); assert_eq!( search_view diff --git a/crates/server/src/db.rs b/crates/server/src/db.rs index 37e35be5f8ca21bf810cee15147db5bd01d4019c..53a9c7d25971d609c3cce3b94c55a44654082b3c 100644 --- a/crates/server/src/db.rs +++ b/crates/server/src/db.rs @@ -627,7 +627,7 @@ pub mod tests { use util::post_inc; #[gpui::test] - async fn test_get_users_by_ids(cx: TestAppContext) { + async fn test_get_users_by_ids(cx: &mut TestAppContext) { for test_db in [TestDb::postgres(), TestDb::fake(cx.background())] { let db = test_db.db(); @@ -667,7 +667,7 @@ pub mod tests { } #[gpui::test] - async fn test_recent_channel_messages(cx: TestAppContext) { + async fn test_recent_channel_messages(cx: &mut TestAppContext) { for test_db in [TestDb::postgres(), TestDb::fake(cx.background())] { let db = test_db.db(); let user = db.create_user("user", false).await.unwrap(); @@ -703,7 +703,7 @@ pub mod tests { } #[gpui::test] - async fn test_channel_message_nonces(cx: TestAppContext) { + async fn test_channel_message_nonces(cx: &mut TestAppContext) { for test_db in [TestDb::postgres(), TestDb::fake(cx.background())] { let db = test_db.db(); let user = db.create_user("user", false).await.unwrap(); diff --git a/crates/server/src/rpc.rs b/crates/server/src/rpc.rs index af5ee8aa847b01380fe6d9a64bf2c72736272b1a..db324a30fc3ece0e09887dfd70e8a40da96a43de 100644 --- a/crates/server/src/rpc.rs +++ b/crates/server/src/rpc.rs @@ -1037,7 +1037,7 @@ mod tests { } #[gpui::test(iterations = 10)] - async fn test_share_project(mut cx_a: TestAppContext, mut cx_b: TestAppContext) { + async fn test_share_project(cx_a: &mut TestAppContext, cx_b: &mut TestAppContext) { let (window_b, _) = cx_b.add_window(|_| EmptyView); let lang_registry = Arc::new(LanguageRegistry::new()); let fs = FakeFs::new(cx_a.background()); @@ -1045,8 +1045,8 @@ mod tests { // Connect to a server as 2 clients. let mut server = TestServer::start(cx_a.foreground(), cx_a.background()).await; - let client_a = server.create_client(&mut cx_a, "user_a").await; - let client_b = server.create_client(&mut cx_b, "user_b").await; + let client_a = server.create_client(cx_a, "user_a").await; + let client_b = server.create_client(cx_b, "user_b").await; // Share a project as client A fs.insert_tree( @@ -1068,20 +1068,17 @@ mod tests { ) }); let (worktree_a, _) = project_a - .update(&mut cx_a, |p, cx| { + .update(cx_a, |p, cx| { p.find_or_create_local_worktree("/a", false, cx) }) .await .unwrap(); - let worktree_id = worktree_a.read_with(&cx_a, |tree, _| tree.id()); + let worktree_id = worktree_a.read_with(cx_a, |tree, _| tree.id()); worktree_a - .read_with(&cx_a, |tree, _| tree.as_local().unwrap().scan_complete()) + .read_with(cx_a, |tree, _| tree.as_local().unwrap().scan_complete()) .await; - let project_id = project_a.update(&mut cx_a, |p, _| p.next_remote_id()).await; - project_a - .update(&mut cx_a, |p, cx| p.share(cx)) - .await - .unwrap(); + let project_id = project_a.update(cx_a, |p, _| p.next_remote_id()).await; + project_a.update(cx_a, |p, cx| p.share(cx)).await.unwrap(); // Join that project as client B let project_b = Project::remote( @@ -1095,7 +1092,7 @@ mod tests { .await .unwrap(); - let replica_id_b = project_b.read_with(&cx_b, |project, _| { + let replica_id_b = project_b.read_with(cx_b, |project, _| { assert_eq!( project .collaborators() @@ -1120,18 +1117,18 @@ mod tests { // Open the same file as client B and client A. let buffer_b = project_b - .update(&mut cx_b, |p, cx| p.open_buffer((worktree_id, "b.txt"), cx)) + .update(cx_b, |p, cx| p.open_buffer((worktree_id, "b.txt"), cx)) .await .unwrap(); let buffer_b = cx_b.add_model(|cx| MultiBuffer::singleton(buffer_b, cx)); - buffer_b.read_with(&cx_b, |buf, cx| { + buffer_b.read_with(cx_b, |buf, cx| { assert_eq!(buf.read(cx).text(), "b-contents") }); - project_a.read_with(&cx_a, |project, cx| { + project_a.read_with(cx_a, |project, cx| { assert!(project.has_open_buffer((worktree_id, "b.txt"), cx)) }); let buffer_a = project_a - .update(&mut cx_a, |p, cx| p.open_buffer((worktree_id, "b.txt"), cx)) + .update(cx_a, |p, cx| p.open_buffer((worktree_id, "b.txt"), cx)) .await .unwrap(); @@ -1151,7 +1148,7 @@ mod tests { // .await; // Edit the buffer as client B and see that edit as client A. - editor_b.update(&mut cx_b, |editor, cx| { + editor_b.update(cx_b, |editor, cx| { editor.handle_input(&Input("ok, ".into()), cx) }); buffer_a @@ -1173,15 +1170,15 @@ mod tests { } #[gpui::test(iterations = 10)] - async fn test_unshare_project(mut cx_a: TestAppContext, mut cx_b: TestAppContext) { + async fn test_unshare_project(cx_a: &mut TestAppContext, cx_b: &mut TestAppContext) { let lang_registry = Arc::new(LanguageRegistry::new()); let fs = FakeFs::new(cx_a.background()); cx_a.foreground().forbid_parking(); // Connect to a server as 2 clients. let mut server = TestServer::start(cx_a.foreground(), cx_a.background()).await; - let client_a = server.create_client(&mut cx_a, "user_a").await; - let client_b = server.create_client(&mut cx_b, "user_b").await; + let client_a = server.create_client(cx_a, "user_a").await; + let client_b = server.create_client(cx_b, "user_b").await; // Share a project as client A fs.insert_tree( @@ -1203,21 +1200,18 @@ mod tests { ) }); let (worktree_a, _) = project_a - .update(&mut cx_a, |p, cx| { + .update(cx_a, |p, cx| { p.find_or_create_local_worktree("/a", false, cx) }) .await .unwrap(); worktree_a - .read_with(&cx_a, |tree, _| tree.as_local().unwrap().scan_complete()) + .read_with(cx_a, |tree, _| tree.as_local().unwrap().scan_complete()) .await; - let project_id = project_a.update(&mut cx_a, |p, _| p.next_remote_id()).await; - let worktree_id = worktree_a.read_with(&cx_a, |tree, _| tree.id()); - project_a - .update(&mut cx_a, |p, cx| p.share(cx)) - .await - .unwrap(); - assert!(worktree_a.read_with(&cx_a, |tree, _| tree.as_local().unwrap().is_shared())); + let project_id = project_a.update(cx_a, |p, _| p.next_remote_id()).await; + let worktree_id = worktree_a.read_with(cx_a, |tree, _| tree.id()); + project_a.update(cx_a, |p, cx| p.share(cx)).await.unwrap(); + assert!(worktree_a.read_with(cx_a, |tree, _| tree.as_local().unwrap().is_shared())); // Join that project as client B let project_b = Project::remote( @@ -1231,27 +1225,27 @@ mod tests { .await .unwrap(); project_b - .update(&mut cx_b, |p, cx| p.open_buffer((worktree_id, "a.txt"), cx)) + .update(cx_b, |p, cx| p.open_buffer((worktree_id, "a.txt"), cx)) .await .unwrap(); // Unshare the project as client A project_a - .update(&mut cx_a, |project, cx| project.unshare(cx)) + .update(cx_a, |project, cx| project.unshare(cx)) .await .unwrap(); project_b - .condition(&mut cx_b, |project, _| project.is_read_only()) + .condition(cx_b, |project, _| project.is_read_only()) .await; - assert!(worktree_a.read_with(&cx_a, |tree, _| !tree.as_local().unwrap().is_shared())); + assert!(worktree_a.read_with(cx_a, |tree, _| !tree.as_local().unwrap().is_shared())); drop(project_b); // Share the project again and ensure guests can still join. project_a - .update(&mut cx_a, |project, cx| project.share(cx)) + .update(cx_a, |project, cx| project.share(cx)) .await .unwrap(); - assert!(worktree_a.read_with(&cx_a, |tree, _| tree.as_local().unwrap().is_shared())); + assert!(worktree_a.read_with(cx_a, |tree, _| tree.as_local().unwrap().is_shared())); let project_c = Project::remote( project_id, @@ -1264,16 +1258,16 @@ mod tests { .await .unwrap(); project_c - .update(&mut cx_b, |p, cx| p.open_buffer((worktree_id, "a.txt"), cx)) + .update(cx_b, |p, cx| p.open_buffer((worktree_id, "a.txt"), cx)) .await .unwrap(); } #[gpui::test(iterations = 10)] async fn test_propagate_saves_and_fs_changes( - mut cx_a: TestAppContext, - mut cx_b: TestAppContext, - mut cx_c: TestAppContext, + cx_a: &mut TestAppContext, + cx_b: &mut TestAppContext, + cx_c: &mut TestAppContext, ) { let lang_registry = Arc::new(LanguageRegistry::new()); let fs = FakeFs::new(cx_a.background()); @@ -1281,9 +1275,9 @@ mod tests { // Connect to a server as 3 clients. let mut server = TestServer::start(cx_a.foreground(), cx_a.background()).await; - let client_a = server.create_client(&mut cx_a, "user_a").await; - let client_b = server.create_client(&mut cx_b, "user_b").await; - let client_c = server.create_client(&mut cx_c, "user_c").await; + let client_a = server.create_client(cx_a, "user_a").await; + let client_b = server.create_client(cx_b, "user_b").await; + let client_c = server.create_client(cx_c, "user_c").await; // Share a worktree as client A. fs.insert_tree( @@ -1305,20 +1299,17 @@ mod tests { ) }); let (worktree_a, _) = project_a - .update(&mut cx_a, |p, cx| { + .update(cx_a, |p, cx| { p.find_or_create_local_worktree("/a", false, cx) }) .await .unwrap(); worktree_a - .read_with(&cx_a, |tree, _| tree.as_local().unwrap().scan_complete()) + .read_with(cx_a, |tree, _| tree.as_local().unwrap().scan_complete()) .await; - let project_id = project_a.update(&mut cx_a, |p, _| p.next_remote_id()).await; - let worktree_id = worktree_a.read_with(&cx_a, |tree, _| tree.id()); - project_a - .update(&mut cx_a, |p, cx| p.share(cx)) - .await - .unwrap(); + let project_id = project_a.update(cx_a, |p, _| p.next_remote_id()).await; + let worktree_id = worktree_a.read_with(cx_a, |tree, _| tree.id()); + project_a.update(cx_a, |p, cx| p.share(cx)).await.unwrap(); // Join that worktree as clients B and C. let project_b = Project::remote( @@ -1341,56 +1332,56 @@ mod tests { ) .await .unwrap(); - let worktree_b = project_b.read_with(&cx_b, |p, cx| p.worktrees(cx).next().unwrap()); - let worktree_c = project_c.read_with(&cx_c, |p, cx| p.worktrees(cx).next().unwrap()); + let worktree_b = project_b.read_with(cx_b, |p, cx| p.worktrees(cx).next().unwrap()); + let worktree_c = project_c.read_with(cx_c, |p, cx| p.worktrees(cx).next().unwrap()); // Open and edit a buffer as both guests B and C. let buffer_b = project_b - .update(&mut cx_b, |p, cx| p.open_buffer((worktree_id, "file1"), cx)) + .update(cx_b, |p, cx| p.open_buffer((worktree_id, "file1"), cx)) .await .unwrap(); let buffer_c = project_c - .update(&mut cx_c, |p, cx| p.open_buffer((worktree_id, "file1"), cx)) + .update(cx_c, |p, cx| p.open_buffer((worktree_id, "file1"), cx)) .await .unwrap(); - buffer_b.update(&mut cx_b, |buf, cx| buf.edit([0..0], "i-am-b, ", cx)); - buffer_c.update(&mut cx_c, |buf, cx| buf.edit([0..0], "i-am-c, ", cx)); + buffer_b.update(cx_b, |buf, cx| buf.edit([0..0], "i-am-b, ", cx)); + buffer_c.update(cx_c, |buf, cx| buf.edit([0..0], "i-am-c, ", cx)); // Open and edit that buffer as the host. let buffer_a = project_a - .update(&mut cx_a, |p, cx| p.open_buffer((worktree_id, "file1"), cx)) + .update(cx_a, |p, cx| p.open_buffer((worktree_id, "file1"), cx)) .await .unwrap(); buffer_a - .condition(&mut cx_a, |buf, _| buf.text() == "i-am-c, i-am-b, ") + .condition(cx_a, |buf, _| buf.text() == "i-am-c, i-am-b, ") .await; - buffer_a.update(&mut cx_a, |buf, cx| { + buffer_a.update(cx_a, |buf, cx| { buf.edit([buf.len()..buf.len()], "i-am-a", cx) }); // Wait for edits to propagate buffer_a - .condition(&mut cx_a, |buf, _| buf.text() == "i-am-c, i-am-b, i-am-a") + .condition(cx_a, |buf, _| buf.text() == "i-am-c, i-am-b, i-am-a") .await; buffer_b - .condition(&mut cx_b, |buf, _| buf.text() == "i-am-c, i-am-b, i-am-a") + .condition(cx_b, |buf, _| buf.text() == "i-am-c, i-am-b, i-am-a") .await; buffer_c - .condition(&mut cx_c, |buf, _| buf.text() == "i-am-c, i-am-b, i-am-a") + .condition(cx_c, |buf, _| buf.text() == "i-am-c, i-am-b, i-am-a") .await; // Edit the buffer as the host and concurrently save as guest B. - let save_b = buffer_b.update(&mut cx_b, |buf, cx| buf.save(cx)); - buffer_a.update(&mut cx_a, |buf, cx| buf.edit([0..0], "hi-a, ", cx)); + let save_b = buffer_b.update(cx_b, |buf, cx| buf.save(cx)); + buffer_a.update(cx_a, |buf, cx| buf.edit([0..0], "hi-a, ", cx)); save_b.await.unwrap(); assert_eq!( fs.load("/a/file1".as_ref()).await.unwrap(), "hi-a, i-am-c, i-am-b, i-am-a" ); - buffer_a.read_with(&cx_a, |buf, _| assert!(!buf.is_dirty())); - buffer_b.read_with(&cx_b, |buf, _| assert!(!buf.is_dirty())); - buffer_c.condition(&cx_c, |buf, _| !buf.is_dirty()).await; + buffer_a.read_with(cx_a, |buf, _| assert!(!buf.is_dirty())); + buffer_b.read_with(cx_b, |buf, _| assert!(!buf.is_dirty())); + buffer_c.condition(cx_c, |buf, _| !buf.is_dirty()).await; // Make changes on host's file system, see those changes on guest worktrees. fs.rename( @@ -1450,15 +1441,15 @@ mod tests { } #[gpui::test(iterations = 10)] - async fn test_buffer_conflict_after_save(mut cx_a: TestAppContext, mut cx_b: TestAppContext) { + async fn test_buffer_conflict_after_save(cx_a: &mut TestAppContext, cx_b: &mut TestAppContext) { cx_a.foreground().forbid_parking(); let lang_registry = Arc::new(LanguageRegistry::new()); let fs = FakeFs::new(cx_a.background()); // Connect to a server as 2 clients. let mut server = TestServer::start(cx_a.foreground(), cx_a.background()).await; - let client_a = server.create_client(&mut cx_a, "user_a").await; - let client_b = server.create_client(&mut cx_b, "user_b").await; + let client_a = server.create_client(cx_a, "user_a").await; + let client_b = server.create_client(cx_b, "user_b").await; // Share a project as client A fs.insert_tree( @@ -1480,20 +1471,17 @@ mod tests { ) }); let (worktree_a, _) = project_a - .update(&mut cx_a, |p, cx| { + .update(cx_a, |p, cx| { p.find_or_create_local_worktree("/dir", false, cx) }) .await .unwrap(); worktree_a - .read_with(&cx_a, |tree, _| tree.as_local().unwrap().scan_complete()) + .read_with(cx_a, |tree, _| tree.as_local().unwrap().scan_complete()) .await; - let project_id = project_a.update(&mut cx_a, |p, _| p.next_remote_id()).await; - let worktree_id = worktree_a.read_with(&cx_a, |tree, _| tree.id()); - project_a - .update(&mut cx_a, |p, cx| p.share(cx)) - .await - .unwrap(); + let project_id = project_a.update(cx_a, |p, _| p.next_remote_id()).await; + let worktree_id = worktree_a.read_with(cx_a, |tree, _| tree.id()); + project_a.update(cx_a, |p, cx| p.share(cx)).await.unwrap(); // Join that project as client B let project_b = Project::remote( @@ -1509,44 +1497,41 @@ mod tests { // Open a buffer as client B let buffer_b = project_b - .update(&mut cx_b, |p, cx| p.open_buffer((worktree_id, "a.txt"), cx)) + .update(cx_b, |p, cx| p.open_buffer((worktree_id, "a.txt"), cx)) .await .unwrap(); - buffer_b.update(&mut cx_b, |buf, cx| buf.edit([0..0], "world ", cx)); - buffer_b.read_with(&cx_b, |buf, _| { + buffer_b.update(cx_b, |buf, cx| buf.edit([0..0], "world ", cx)); + buffer_b.read_with(cx_b, |buf, _| { assert!(buf.is_dirty()); assert!(!buf.has_conflict()); }); - buffer_b - .update(&mut cx_b, |buf, cx| buf.save(cx)) - .await - .unwrap(); + buffer_b.update(cx_b, |buf, cx| buf.save(cx)).await.unwrap(); buffer_b .condition(&cx_b, |buffer_b, _| !buffer_b.is_dirty()) .await; - buffer_b.read_with(&cx_b, |buf, _| { + buffer_b.read_with(cx_b, |buf, _| { assert!(!buf.has_conflict()); }); - buffer_b.update(&mut cx_b, |buf, cx| buf.edit([0..0], "hello ", cx)); - buffer_b.read_with(&cx_b, |buf, _| { + buffer_b.update(cx_b, |buf, cx| buf.edit([0..0], "hello ", cx)); + buffer_b.read_with(cx_b, |buf, _| { assert!(buf.is_dirty()); assert!(!buf.has_conflict()); }); } #[gpui::test(iterations = 10)] - async fn test_buffer_reloading(mut cx_a: TestAppContext, mut cx_b: TestAppContext) { + async fn test_buffer_reloading(cx_a: &mut TestAppContext, cx_b: &mut TestAppContext) { cx_a.foreground().forbid_parking(); let lang_registry = Arc::new(LanguageRegistry::new()); let fs = FakeFs::new(cx_a.background()); // Connect to a server as 2 clients. let mut server = TestServer::start(cx_a.foreground(), cx_a.background()).await; - let client_a = server.create_client(&mut cx_a, "user_a").await; - let client_b = server.create_client(&mut cx_b, "user_b").await; + let client_a = server.create_client(cx_a, "user_a").await; + let client_b = server.create_client(cx_b, "user_b").await; // Share a project as client A fs.insert_tree( @@ -1568,20 +1553,17 @@ mod tests { ) }); let (worktree_a, _) = project_a - .update(&mut cx_a, |p, cx| { + .update(cx_a, |p, cx| { p.find_or_create_local_worktree("/dir", false, cx) }) .await .unwrap(); worktree_a - .read_with(&cx_a, |tree, _| tree.as_local().unwrap().scan_complete()) + .read_with(cx_a, |tree, _| tree.as_local().unwrap().scan_complete()) .await; - let project_id = project_a.update(&mut cx_a, |p, _| p.next_remote_id()).await; - let worktree_id = worktree_a.read_with(&cx_a, |tree, _| tree.id()); - project_a - .update(&mut cx_a, |p, cx| p.share(cx)) - .await - .unwrap(); + let project_id = project_a.update(cx_a, |p, _| p.next_remote_id()).await; + let worktree_id = worktree_a.read_with(cx_a, |tree, _| tree.id()); + project_a.update(cx_a, |p, cx| p.share(cx)).await.unwrap(); // Join that project as client B let project_b = Project::remote( @@ -1594,14 +1576,14 @@ mod tests { ) .await .unwrap(); - let _worktree_b = project_b.update(&mut cx_b, |p, cx| p.worktrees(cx).next().unwrap()); + let _worktree_b = project_b.update(cx_b, |p, cx| p.worktrees(cx).next().unwrap()); // Open a buffer as client B let buffer_b = project_b - .update(&mut cx_b, |p, cx| p.open_buffer((worktree_id, "a.txt"), cx)) + .update(cx_b, |p, cx| p.open_buffer((worktree_id, "a.txt"), cx)) .await .unwrap(); - buffer_b.read_with(&cx_b, |buf, _| { + buffer_b.read_with(cx_b, |buf, _| { assert!(!buf.is_dirty()); assert!(!buf.has_conflict()); }); @@ -1614,15 +1596,15 @@ mod tests { buf.text() == "new contents" && !buf.is_dirty() }) .await; - buffer_b.read_with(&cx_b, |buf, _| { + buffer_b.read_with(cx_b, |buf, _| { assert!(!buf.has_conflict()); }); } #[gpui::test(iterations = 10)] async fn test_editing_while_guest_opens_buffer( - mut cx_a: TestAppContext, - mut cx_b: TestAppContext, + cx_a: &mut TestAppContext, + cx_b: &mut TestAppContext, ) { cx_a.foreground().forbid_parking(); let lang_registry = Arc::new(LanguageRegistry::new()); @@ -1630,8 +1612,8 @@ mod tests { // Connect to a server as 2 clients. let mut server = TestServer::start(cx_a.foreground(), cx_a.background()).await; - let client_a = server.create_client(&mut cx_a, "user_a").await; - let client_b = server.create_client(&mut cx_b, "user_b").await; + let client_a = server.create_client(cx_a, "user_a").await; + let client_b = server.create_client(cx_b, "user_b").await; // Share a project as client A fs.insert_tree( @@ -1652,20 +1634,17 @@ mod tests { ) }); let (worktree_a, _) = project_a - .update(&mut cx_a, |p, cx| { + .update(cx_a, |p, cx| { p.find_or_create_local_worktree("/dir", false, cx) }) .await .unwrap(); worktree_a - .read_with(&cx_a, |tree, _| tree.as_local().unwrap().scan_complete()) + .read_with(cx_a, |tree, _| tree.as_local().unwrap().scan_complete()) .await; - let project_id = project_a.update(&mut cx_a, |p, _| p.next_remote_id()).await; - let worktree_id = worktree_a.read_with(&cx_a, |tree, _| tree.id()); - project_a - .update(&mut cx_a, |p, cx| p.share(cx)) - .await - .unwrap(); + let project_id = project_a.update(cx_a, |p, _| p.next_remote_id()).await; + let worktree_id = worktree_a.read_with(cx_a, |tree, _| tree.id()); + project_a.update(cx_a, |p, cx| p.share(cx)).await.unwrap(); // Join that project as client B let project_b = Project::remote( @@ -1681,30 +1660,30 @@ mod tests { // Open a buffer as client A let buffer_a = project_a - .update(&mut cx_a, |p, cx| p.open_buffer((worktree_id, "a.txt"), cx)) + .update(cx_a, |p, cx| p.open_buffer((worktree_id, "a.txt"), cx)) .await .unwrap(); // Start opening the same buffer as client B let buffer_b = cx_b .background() - .spawn(project_b.update(&mut cx_b, |p, cx| p.open_buffer((worktree_id, "a.txt"), cx))); + .spawn(project_b.update(cx_b, |p, cx| p.open_buffer((worktree_id, "a.txt"), cx))); // Edit the buffer as client A while client B is still opening it. cx_b.background().simulate_random_delay().await; - buffer_a.update(&mut cx_a, |buf, cx| buf.edit([0..0], "X", cx)); + buffer_a.update(cx_a, |buf, cx| buf.edit([0..0], "X", cx)); cx_b.background().simulate_random_delay().await; - buffer_a.update(&mut cx_a, |buf, cx| buf.edit([1..1], "Y", cx)); + buffer_a.update(cx_a, |buf, cx| buf.edit([1..1], "Y", cx)); - let text = buffer_a.read_with(&cx_a, |buf, _| buf.text()); + let text = buffer_a.read_with(cx_a, |buf, _| buf.text()); let buffer_b = buffer_b.await.unwrap(); buffer_b.condition(&cx_b, |buf, _| buf.text() == text).await; } #[gpui::test(iterations = 10)] async fn test_leaving_worktree_while_opening_buffer( - mut cx_a: TestAppContext, - mut cx_b: TestAppContext, + cx_a: &mut TestAppContext, + cx_b: &mut TestAppContext, ) { cx_a.foreground().forbid_parking(); let lang_registry = Arc::new(LanguageRegistry::new()); @@ -1712,8 +1691,8 @@ mod tests { // Connect to a server as 2 clients. let mut server = TestServer::start(cx_a.foreground(), cx_a.background()).await; - let client_a = server.create_client(&mut cx_a, "user_a").await; - let client_b = server.create_client(&mut cx_b, "user_b").await; + let client_a = server.create_client(cx_a, "user_a").await; + let client_b = server.create_client(cx_b, "user_b").await; // Share a project as client A fs.insert_tree( @@ -1734,20 +1713,17 @@ mod tests { ) }); let (worktree_a, _) = project_a - .update(&mut cx_a, |p, cx| { + .update(cx_a, |p, cx| { p.find_or_create_local_worktree("/dir", false, cx) }) .await .unwrap(); worktree_a - .read_with(&cx_a, |tree, _| tree.as_local().unwrap().scan_complete()) + .read_with(cx_a, |tree, _| tree.as_local().unwrap().scan_complete()) .await; - let project_id = project_a.update(&mut cx_a, |p, _| p.next_remote_id()).await; - let worktree_id = worktree_a.read_with(&cx_a, |tree, _| tree.id()); - project_a - .update(&mut cx_a, |p, cx| p.share(cx)) - .await - .unwrap(); + let project_id = project_a.update(cx_a, |p, _| p.next_remote_id()).await; + let worktree_id = worktree_a.read_with(cx_a, |tree, _| tree.id()); + project_a.update(cx_a, |p, cx| p.share(cx)).await.unwrap(); // Join that project as client B let project_b = Project::remote( @@ -1769,7 +1745,7 @@ mod tests { // Begin opening a buffer as client B, but leave the project before the open completes. let buffer_b = cx_b .background() - .spawn(project_b.update(&mut cx_b, |p, cx| p.open_buffer((worktree_id, "a.txt"), cx))); + .spawn(project_b.update(cx_b, |p, cx| p.open_buffer((worktree_id, "a.txt"), cx))); cx_b.update(|_| drop(project_b)); drop(buffer_b); @@ -1780,15 +1756,15 @@ mod tests { } #[gpui::test(iterations = 10)] - async fn test_peer_disconnection(mut cx_a: TestAppContext, mut cx_b: TestAppContext) { + async fn test_peer_disconnection(cx_a: &mut TestAppContext, cx_b: &mut TestAppContext) { cx_a.foreground().forbid_parking(); let lang_registry = Arc::new(LanguageRegistry::new()); let fs = FakeFs::new(cx_a.background()); // Connect to a server as 2 clients. let mut server = TestServer::start(cx_a.foreground(), cx_a.background()).await; - let client_a = server.create_client(&mut cx_a, "user_a").await; - let client_b = server.create_client(&mut cx_b, "user_b").await; + let client_a = server.create_client(cx_a, "user_a").await; + let client_b = server.create_client(cx_b, "user_b").await; // Share a project as client A fs.insert_tree( @@ -1810,19 +1786,19 @@ mod tests { ) }); let (worktree_a, _) = project_a - .update(&mut cx_a, |p, cx| { + .update(cx_a, |p, cx| { p.find_or_create_local_worktree("/a", false, cx) }) .await .unwrap(); worktree_a - .read_with(&cx_a, |tree, _| tree.as_local().unwrap().scan_complete()) + .read_with(cx_a, |tree, _| tree.as_local().unwrap().scan_complete()) .await; let project_id = project_a - .update(&mut cx_a, |project, _| project.next_remote_id()) + .update(cx_a, |project, _| project.next_remote_id()) .await; project_a - .update(&mut cx_a, |project, cx| project.share(cx)) + .update(cx_a, |project, cx| project.share(cx)) .await .unwrap(); @@ -1852,8 +1828,8 @@ mod tests { #[gpui::test(iterations = 10)] async fn test_collaborating_with_diagnostics( - mut cx_a: TestAppContext, - mut cx_b: TestAppContext, + cx_a: &mut TestAppContext, + cx_b: &mut TestAppContext, ) { cx_a.foreground().forbid_parking(); let mut lang_registry = Arc::new(LanguageRegistry::new()); @@ -1875,8 +1851,8 @@ mod tests { // Connect to a server as 2 clients. let mut server = TestServer::start(cx_a.foreground(), cx_a.background()).await; - let client_a = server.create_client(&mut cx_a, "user_a").await; - let client_b = server.create_client(&mut cx_b, "user_b").await; + let client_a = server.create_client(cx_a, "user_a").await; + let client_b = server.create_client(cx_b, "user_b").await; // Share a project as client A fs.insert_tree( @@ -1898,25 +1874,22 @@ mod tests { ) }); let (worktree_a, _) = project_a - .update(&mut cx_a, |p, cx| { + .update(cx_a, |p, cx| { p.find_or_create_local_worktree("/a", false, cx) }) .await .unwrap(); worktree_a - .read_with(&cx_a, |tree, _| tree.as_local().unwrap().scan_complete()) + .read_with(cx_a, |tree, _| tree.as_local().unwrap().scan_complete()) .await; - let project_id = project_a.update(&mut cx_a, |p, _| p.next_remote_id()).await; - let worktree_id = worktree_a.read_with(&cx_a, |tree, _| tree.id()); - project_a - .update(&mut cx_a, |p, cx| p.share(cx)) - .await - .unwrap(); + let project_id = project_a.update(cx_a, |p, _| p.next_remote_id()).await; + let worktree_id = worktree_a.read_with(cx_a, |tree, _| tree.id()); + project_a.update(cx_a, |p, cx| p.share(cx)).await.unwrap(); // Cause the language server to start. let _ = cx_a .background() - .spawn(project_a.update(&mut cx_a, |project, cx| { + .spawn(project_a.update(cx_a, |project, cx| { project.open_buffer( ProjectPath { worktree_id, @@ -1972,7 +1945,7 @@ mod tests { .await .unwrap(); - project_b.read_with(&cx_b, |project, cx| { + project_b.read_with(cx_b, |project, cx| { assert_eq!( project.diagnostic_summaries(cx).collect::>(), &[( @@ -2035,11 +2008,11 @@ mod tests { // Open the file with the errors on client B. They should be present. let buffer_b = cx_b .background() - .spawn(project_b.update(&mut cx_b, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx))) + .spawn(project_b.update(cx_b, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx))) .await .unwrap(); - buffer_b.read_with(&cx_b, |buffer, _| { + buffer_b.read_with(cx_b, |buffer, _| { assert_eq!( buffer .snapshot() @@ -2074,8 +2047,8 @@ mod tests { #[gpui::test(iterations = 10)] async fn test_collaborating_with_completion( - mut cx_a: TestAppContext, - mut cx_b: TestAppContext, + cx_a: &mut TestAppContext, + cx_b: &mut TestAppContext, ) { cx_a.foreground().forbid_parking(); let mut lang_registry = Arc::new(LanguageRegistry::new()); @@ -2104,8 +2077,8 @@ mod tests { // Connect to a server as 2 clients. let mut server = TestServer::start(cx_a.foreground(), cx_a.background()).await; - let client_a = server.create_client(&mut cx_a, "user_a").await; - let client_b = server.create_client(&mut cx_b, "user_b").await; + let client_a = server.create_client(cx_a, "user_a").await; + let client_b = server.create_client(cx_b, "user_b").await; // Share a project as client A fs.insert_tree( @@ -2127,20 +2100,17 @@ mod tests { ) }); let (worktree_a, _) = project_a - .update(&mut cx_a, |p, cx| { + .update(cx_a, |p, cx| { p.find_or_create_local_worktree("/a", false, cx) }) .await .unwrap(); worktree_a - .read_with(&cx_a, |tree, _| tree.as_local().unwrap().scan_complete()) + .read_with(cx_a, |tree, _| tree.as_local().unwrap().scan_complete()) .await; - let project_id = project_a.update(&mut cx_a, |p, _| p.next_remote_id()).await; - let worktree_id = worktree_a.read_with(&cx_a, |tree, _| tree.id()); - project_a - .update(&mut cx_a, |p, cx| p.share(cx)) - .await - .unwrap(); + let project_id = project_a.update(cx_a, |p, _| p.next_remote_id()).await; + let worktree_id = worktree_a.read_with(cx_a, |tree, _| tree.id()); + project_a.update(cx_a, |p, cx| p.share(cx)).await.unwrap(); // Join the worktree as client B. let project_b = Project::remote( @@ -2156,9 +2126,7 @@ mod tests { // Open a file in an editor as the guest. let buffer_b = project_b - .update(&mut cx_b, |p, cx| { - p.open_buffer((worktree_id, "main.rs"), cx) - }) + .update(cx_b, |p, cx| p.open_buffer((worktree_id, "main.rs"), cx)) .await .unwrap(); let (window_b, _) = cx_b.add_window(|_| EmptyView); @@ -2177,7 +2145,7 @@ mod tests { .await; // Type a completion trigger character as the guest. - editor_b.update(&mut cx_b, |editor, cx| { + editor_b.update(cx_b, |editor, cx| { editor.select_ranges([13..13], None, cx); editor.handle_input(&Input(".".into()), cx); cx.focus(&editor_b); @@ -2233,9 +2201,7 @@ mod tests { // Open the buffer on the host. let buffer_a = project_a - .update(&mut cx_a, |p, cx| { - p.open_buffer((worktree_id, "main.rs"), cx) - }) + .update(cx_a, |p, cx| p.open_buffer((worktree_id, "main.rs"), cx)) .await .unwrap(); buffer_a @@ -2246,7 +2212,7 @@ mod tests { editor_b .condition(&cx_b, |editor, _| editor.context_menu_visible()) .await; - editor_b.update(&mut cx_b, |editor, cx| { + editor_b.update(cx_b, |editor, cx| { editor.confirm_completion(&ConfirmCompletion(Some(0)), cx); assert_eq!(editor.text(cx), "fn main() { a.first_method() }"); }); @@ -2290,7 +2256,7 @@ mod tests { } #[gpui::test(iterations = 10)] - async fn test_formatting_buffer(mut cx_a: TestAppContext, mut cx_b: TestAppContext) { + async fn test_formatting_buffer(cx_a: &mut TestAppContext, cx_b: &mut TestAppContext) { cx_a.foreground().forbid_parking(); let mut lang_registry = Arc::new(LanguageRegistry::new()); let fs = FakeFs::new(cx_a.background()); @@ -2311,8 +2277,8 @@ mod tests { // Connect to a server as 2 clients. let mut server = TestServer::start(cx_a.foreground(), cx_a.background()).await; - let client_a = server.create_client(&mut cx_a, "user_a").await; - let client_b = server.create_client(&mut cx_b, "user_b").await; + let client_a = server.create_client(cx_a, "user_a").await; + let client_b = server.create_client(cx_b, "user_b").await; // Share a project as client A fs.insert_tree( @@ -2333,20 +2299,17 @@ mod tests { ) }); let (worktree_a, _) = project_a - .update(&mut cx_a, |p, cx| { + .update(cx_a, |p, cx| { p.find_or_create_local_worktree("/a", false, cx) }) .await .unwrap(); worktree_a - .read_with(&cx_a, |tree, _| tree.as_local().unwrap().scan_complete()) + .read_with(cx_a, |tree, _| tree.as_local().unwrap().scan_complete()) .await; - let project_id = project_a.update(&mut cx_a, |p, _| p.next_remote_id()).await; - let worktree_id = worktree_a.read_with(&cx_a, |tree, _| tree.id()); - project_a - .update(&mut cx_a, |p, cx| p.share(cx)) - .await - .unwrap(); + let project_id = project_a.update(cx_a, |p, _| p.next_remote_id()).await; + let worktree_id = worktree_a.read_with(cx_a, |tree, _| tree.id()); + project_a.update(cx_a, |p, cx| p.share(cx)).await.unwrap(); // Join the worktree as client B. let project_b = Project::remote( @@ -2362,11 +2325,11 @@ mod tests { let buffer_b = cx_b .background() - .spawn(project_b.update(&mut cx_b, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx))) + .spawn(project_b.update(cx_b, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx))) .await .unwrap(); - let format = project_b.update(&mut cx_b, |project, cx| { + let format = project_b.update(cx_b, |project, cx| { project.format(HashSet::from_iter([buffer_b.clone()]), true, cx) }); @@ -2386,13 +2349,13 @@ mod tests { format.await.unwrap(); assert_eq!( - buffer_b.read_with(&cx_b, |buffer, _| buffer.text()), + buffer_b.read_with(cx_b, |buffer, _| buffer.text()), "let honey = two" ); } #[gpui::test(iterations = 10)] - async fn test_definition(mut cx_a: TestAppContext, mut cx_b: TestAppContext) { + async fn test_definition(cx_a: &mut TestAppContext, cx_b: &mut TestAppContext) { cx_a.foreground().forbid_parking(); let mut lang_registry = Arc::new(LanguageRegistry::new()); let fs = FakeFs::new(cx_a.background()); @@ -2428,8 +2391,8 @@ mod tests { // Connect to a server as 2 clients. let mut server = TestServer::start(cx_a.foreground(), cx_a.background()).await; - let client_a = server.create_client(&mut cx_a, "user_a").await; - let client_b = server.create_client(&mut cx_b, "user_b").await; + let client_a = server.create_client(cx_a, "user_a").await; + let client_b = server.create_client(cx_b, "user_b").await; // Share a project as client A let project_a = cx_a.update(|cx| { @@ -2442,20 +2405,17 @@ mod tests { ) }); let (worktree_a, _) = project_a - .update(&mut cx_a, |p, cx| { + .update(cx_a, |p, cx| { p.find_or_create_local_worktree("/root-1", false, cx) }) .await .unwrap(); worktree_a - .read_with(&cx_a, |tree, _| tree.as_local().unwrap().scan_complete()) + .read_with(cx_a, |tree, _| tree.as_local().unwrap().scan_complete()) .await; - let project_id = project_a.update(&mut cx_a, |p, _| p.next_remote_id()).await; - let worktree_id = worktree_a.read_with(&cx_a, |tree, _| tree.id()); - project_a - .update(&mut cx_a, |p, cx| p.share(cx)) - .await - .unwrap(); + let project_id = project_a.update(cx_a, |p, _| p.next_remote_id()).await; + let worktree_id = worktree_a.read_with(cx_a, |tree, _| tree.id()); + project_a.update(cx_a, |p, cx| p.share(cx)).await.unwrap(); // Join the worktree as client B. let project_b = Project::remote( @@ -2472,12 +2432,12 @@ mod tests { // Open the file on client B. let buffer_b = cx_b .background() - .spawn(project_b.update(&mut cx_b, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx))) + .spawn(project_b.update(cx_b, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx))) .await .unwrap(); // Request the definition of a symbol as the guest. - let definitions_1 = project_b.update(&mut cx_b, |p, cx| p.definition(&buffer_b, 23, cx)); + let definitions_1 = project_b.update(cx_b, |p, cx| p.definition(&buffer_b, 23, cx)); let mut fake_language_server = fake_language_servers.next().await.unwrap(); fake_language_server.handle_request::(|_, _| { @@ -2504,7 +2464,7 @@ mod tests { // Try getting more definitions for the same buffer, ensuring the buffer gets reused from // the previous call to `definition`. - let definitions_2 = project_b.update(&mut cx_b, |p, cx| p.definition(&buffer_b, 33, cx)); + let definitions_2 = project_b.update(cx_b, |p, cx| p.definition(&buffer_b, 33, cx)); fake_language_server.handle_request::(|_, _| { Some(lsp::GotoDefinitionResponse::Scalar(lsp::Location::new( lsp::Url::from_file_path("/root-2/b.rs").unwrap(), @@ -2530,7 +2490,7 @@ mod tests { } #[gpui::test(iterations = 10)] - async fn test_references(mut cx_a: TestAppContext, mut cx_b: TestAppContext) { + async fn test_references(cx_a: &mut TestAppContext, cx_b: &mut TestAppContext) { cx_a.foreground().forbid_parking(); let mut lang_registry = Arc::new(LanguageRegistry::new()); let fs = FakeFs::new(cx_a.background()); @@ -2567,8 +2527,8 @@ mod tests { // Connect to a server as 2 clients. let mut server = TestServer::start(cx_a.foreground(), cx_a.background()).await; - let client_a = server.create_client(&mut cx_a, "user_a").await; - let client_b = server.create_client(&mut cx_b, "user_b").await; + let client_a = server.create_client(cx_a, "user_a").await; + let client_b = server.create_client(cx_b, "user_b").await; // Share a project as client A let project_a = cx_a.update(|cx| { @@ -2581,20 +2541,17 @@ mod tests { ) }); let (worktree_a, _) = project_a - .update(&mut cx_a, |p, cx| { + .update(cx_a, |p, cx| { p.find_or_create_local_worktree("/root-1", false, cx) }) .await .unwrap(); worktree_a - .read_with(&cx_a, |tree, _| tree.as_local().unwrap().scan_complete()) + .read_with(cx_a, |tree, _| tree.as_local().unwrap().scan_complete()) .await; - let project_id = project_a.update(&mut cx_a, |p, _| p.next_remote_id()).await; - let worktree_id = worktree_a.read_with(&cx_a, |tree, _| tree.id()); - project_a - .update(&mut cx_a, |p, cx| p.share(cx)) - .await - .unwrap(); + let project_id = project_a.update(cx_a, |p, _| p.next_remote_id()).await; + let worktree_id = worktree_a.read_with(cx_a, |tree, _| tree.id()); + project_a.update(cx_a, |p, cx| p.share(cx)).await.unwrap(); // Join the worktree as client B. let project_b = Project::remote( @@ -2611,14 +2568,12 @@ mod tests { // Open the file on client B. let buffer_b = cx_b .background() - .spawn(project_b.update(&mut cx_b, |p, cx| { - p.open_buffer((worktree_id, "one.rs"), cx) - })) + .spawn(project_b.update(cx_b, |p, cx| p.open_buffer((worktree_id, "one.rs"), cx))) .await .unwrap(); // Request references to a symbol as the guest. - let references = project_b.update(&mut cx_b, |p, cx| p.references(&buffer_b, 7, cx)); + let references = project_b.update(cx_b, |p, cx| p.references(&buffer_b, 7, cx)); let mut fake_language_server = fake_language_servers.next().await.unwrap(); fake_language_server.handle_request::(|params, _| { @@ -2666,7 +2621,7 @@ mod tests { } #[gpui::test(iterations = 10)] - async fn test_project_search(mut cx_a: TestAppContext, mut cx_b: TestAppContext) { + async fn test_project_search(cx_a: &mut TestAppContext, cx_b: &mut TestAppContext) { cx_a.foreground().forbid_parking(); let lang_registry = Arc::new(LanguageRegistry::new()); let fs = FakeFs::new(cx_a.background()); @@ -2691,8 +2646,8 @@ mod tests { // Connect to a server as 2 clients. let mut server = TestServer::start(cx_a.foreground(), cx_a.background()).await; - let client_a = server.create_client(&mut cx_a, "user_a").await; - let client_b = server.create_client(&mut cx_b, "user_b").await; + let client_a = server.create_client(cx_a, "user_a").await; + let client_b = server.create_client(cx_b, "user_b").await; // Share a project as client A let project_a = cx_a.update(|cx| { @@ -2704,33 +2659,30 @@ mod tests { cx, ) }); - let project_id = project_a.update(&mut cx_a, |p, _| p.next_remote_id()).await; + let project_id = project_a.update(cx_a, |p, _| p.next_remote_id()).await; let (worktree_1, _) = project_a - .update(&mut cx_a, |p, cx| { + .update(cx_a, |p, cx| { p.find_or_create_local_worktree("/root-1", false, cx) }) .await .unwrap(); worktree_1 - .read_with(&cx_a, |tree, _| tree.as_local().unwrap().scan_complete()) + .read_with(cx_a, |tree, _| tree.as_local().unwrap().scan_complete()) .await; let (worktree_2, _) = project_a - .update(&mut cx_a, |p, cx| { + .update(cx_a, |p, cx| { p.find_or_create_local_worktree("/root-2", false, cx) }) .await .unwrap(); worktree_2 - .read_with(&cx_a, |tree, _| tree.as_local().unwrap().scan_complete()) + .read_with(cx_a, |tree, _| tree.as_local().unwrap().scan_complete()) .await; eprintln!("sharing"); - project_a - .update(&mut cx_a, |p, cx| p.share(cx)) - .await - .unwrap(); + project_a.update(cx_a, |p, cx| p.share(cx)).await.unwrap(); // Join the worktree as client B. let project_b = Project::remote( @@ -2745,7 +2697,7 @@ mod tests { .unwrap(); let results = project_b - .update(&mut cx_b, |project, cx| { + .update(cx_b, |project, cx| { project.search(SearchQuery::text("world", false, false), cx) }) .await @@ -2754,7 +2706,7 @@ mod tests { let mut ranges_by_path = results .into_iter() .map(|(buffer, ranges)| { - buffer.read_with(&cx_b, |buffer, cx| { + buffer.read_with(cx_b, |buffer, cx| { let path = buffer.file().unwrap().full_path(cx); let offset_ranges = ranges .into_iter() @@ -2778,7 +2730,7 @@ mod tests { } #[gpui::test(iterations = 10)] - async fn test_document_highlights(mut cx_a: TestAppContext, mut cx_b: TestAppContext) { + async fn test_document_highlights(cx_a: &mut TestAppContext, cx_b: &mut TestAppContext) { cx_a.foreground().forbid_parking(); let lang_registry = Arc::new(LanguageRegistry::new()); let fs = FakeFs::new(cx_a.background()); @@ -2805,8 +2757,8 @@ mod tests { // Connect to a server as 2 clients. let mut server = TestServer::start(cx_a.foreground(), cx_a.background()).await; - let client_a = server.create_client(&mut cx_a, "user_a").await; - let client_b = server.create_client(&mut cx_b, "user_b").await; + let client_a = server.create_client(cx_a, "user_a").await; + let client_b = server.create_client(cx_b, "user_b").await; // Share a project as client A let project_a = cx_a.update(|cx| { @@ -2819,20 +2771,17 @@ mod tests { ) }); let (worktree_a, _) = project_a - .update(&mut cx_a, |p, cx| { + .update(cx_a, |p, cx| { p.find_or_create_local_worktree("/root-1", false, cx) }) .await .unwrap(); worktree_a - .read_with(&cx_a, |tree, _| tree.as_local().unwrap().scan_complete()) + .read_with(cx_a, |tree, _| tree.as_local().unwrap().scan_complete()) .await; - let project_id = project_a.update(&mut cx_a, |p, _| p.next_remote_id()).await; - let worktree_id = worktree_a.read_with(&cx_a, |tree, _| tree.id()); - project_a - .update(&mut cx_a, |p, cx| p.share(cx)) - .await - .unwrap(); + let project_id = project_a.update(cx_a, |p, _| p.next_remote_id()).await; + let worktree_id = worktree_a.read_with(cx_a, |tree, _| tree.id()); + project_a.update(cx_a, |p, cx| p.share(cx)).await.unwrap(); // Join the worktree as client B. let project_b = Project::remote( @@ -2849,15 +2798,12 @@ mod tests { // Open the file on client B. let buffer_b = cx_b .background() - .spawn(project_b.update(&mut cx_b, |p, cx| { - p.open_buffer((worktree_id, "main.rs"), cx) - })) + .spawn(project_b.update(cx_b, |p, cx| p.open_buffer((worktree_id, "main.rs"), cx))) .await .unwrap(); // Request document highlights as the guest. - let highlights = - project_b.update(&mut cx_b, |p, cx| p.document_highlights(&buffer_b, 34, cx)); + let highlights = project_b.update(cx_b, |p, cx| p.document_highlights(&buffer_b, 34, cx)); let mut fake_language_server = fake_language_servers.next().await.unwrap(); fake_language_server.handle_request::( @@ -2901,7 +2847,7 @@ mod tests { ); let highlights = highlights.await.unwrap(); - buffer_b.read_with(&cx_b, |buffer, _| { + buffer_b.read_with(cx_b, |buffer, _| { let snapshot = buffer.snapshot(); let highlights = highlights @@ -2920,7 +2866,7 @@ mod tests { } #[gpui::test(iterations = 10)] - async fn test_project_symbols(mut cx_a: TestAppContext, mut cx_b: TestAppContext) { + async fn test_project_symbols(cx_a: &mut TestAppContext, cx_b: &mut TestAppContext) { cx_a.foreground().forbid_parking(); let mut lang_registry = Arc::new(LanguageRegistry::new()); let fs = FakeFs::new(cx_a.background()); @@ -2957,8 +2903,8 @@ mod tests { // Connect to a server as 2 clients. let mut server = TestServer::start(cx_a.foreground(), cx_a.background()).await; - let client_a = server.create_client(&mut cx_a, "user_a").await; - let client_b = server.create_client(&mut cx_b, "user_b").await; + let client_a = server.create_client(cx_a, "user_a").await; + let client_b = server.create_client(cx_b, "user_b").await; // Share a project as client A let project_a = cx_a.update(|cx| { @@ -2971,20 +2917,17 @@ mod tests { ) }); let (worktree_a, _) = project_a - .update(&mut cx_a, |p, cx| { + .update(cx_a, |p, cx| { p.find_or_create_local_worktree("/code/crate-1", false, cx) }) .await .unwrap(); worktree_a - .read_with(&cx_a, |tree, _| tree.as_local().unwrap().scan_complete()) + .read_with(cx_a, |tree, _| tree.as_local().unwrap().scan_complete()) .await; - let project_id = project_a.update(&mut cx_a, |p, _| p.next_remote_id()).await; - let worktree_id = worktree_a.read_with(&cx_a, |tree, _| tree.id()); - project_a - .update(&mut cx_a, |p, cx| p.share(cx)) - .await - .unwrap(); + let project_id = project_a.update(cx_a, |p, _| p.next_remote_id()).await; + let worktree_id = worktree_a.read_with(cx_a, |tree, _| tree.id()); + project_a.update(cx_a, |p, cx| p.share(cx)).await.unwrap(); // Join the worktree as client B. let project_b = Project::remote( @@ -3001,14 +2944,12 @@ mod tests { // Cause the language server to start. let _buffer = cx_b .background() - .spawn(project_b.update(&mut cx_b, |p, cx| { - p.open_buffer((worktree_id, "one.rs"), cx) - })) + .spawn(project_b.update(cx_b, |p, cx| p.open_buffer((worktree_id, "one.rs"), cx))) .await .unwrap(); // Request the definition of a symbol as the guest. - let symbols = project_b.update(&mut cx_b, |p, cx| p.symbols("two", cx)); + let symbols = project_b.update(cx_b, |p, cx| p.symbols("two", cx)); let mut fake_language_server = fake_language_servers.next().await.unwrap(); fake_language_server.handle_request::(|_, _| { #[allow(deprecated)] @@ -3031,12 +2972,12 @@ mod tests { // Open one of the returned symbols. let buffer_b_2 = project_b - .update(&mut cx_b, |project, cx| { + .update(cx_b, |project, cx| { project.open_buffer_for_symbol(&symbols[0], cx) }) .await .unwrap(); - buffer_b_2.read_with(&cx_b, |buffer, _| { + buffer_b_2.read_with(cx_b, |buffer, _| { assert_eq!( buffer.file().unwrap().path().as_ref(), Path::new("../crate-2/two.rs") @@ -3047,7 +2988,7 @@ mod tests { let mut fake_symbol = symbols[0].clone(); fake_symbol.path = Path::new("/code/secrets").into(); let error = project_b - .update(&mut cx_b, |project, cx| { + .update(cx_b, |project, cx| { project.open_buffer_for_symbol(&fake_symbol, cx) }) .await @@ -3057,8 +2998,8 @@ mod tests { #[gpui::test(iterations = 10)] async fn test_open_buffer_while_getting_definition_pointing_to_it( - mut cx_a: TestAppContext, - mut cx_b: TestAppContext, + cx_a: &mut TestAppContext, + cx_b: &mut TestAppContext, mut rng: StdRng, ) { cx_a.foreground().forbid_parking(); @@ -3091,8 +3032,8 @@ mod tests { // Connect to a server as 2 clients. let mut server = TestServer::start(cx_a.foreground(), cx_a.background()).await; - let client_a = server.create_client(&mut cx_a, "user_a").await; - let client_b = server.create_client(&mut cx_b, "user_b").await; + let client_a = server.create_client(cx_a, "user_a").await; + let client_b = server.create_client(cx_b, "user_b").await; // Share a project as client A let project_a = cx_a.update(|cx| { @@ -3106,20 +3047,17 @@ mod tests { }); let (worktree_a, _) = project_a - .update(&mut cx_a, |p, cx| { + .update(cx_a, |p, cx| { p.find_or_create_local_worktree("/root", false, cx) }) .await .unwrap(); worktree_a - .read_with(&cx_a, |tree, _| tree.as_local().unwrap().scan_complete()) + .read_with(cx_a, |tree, _| tree.as_local().unwrap().scan_complete()) .await; - let project_id = project_a.update(&mut cx_a, |p, _| p.next_remote_id()).await; - let worktree_id = worktree_a.read_with(&cx_a, |tree, _| tree.id()); - project_a - .update(&mut cx_a, |p, cx| p.share(cx)) - .await - .unwrap(); + let project_id = project_a.update(cx_a, |p, _| p.next_remote_id()).await; + let worktree_id = worktree_a.read_with(cx_a, |tree, _| tree.id()); + project_a.update(cx_a, |p, cx| p.share(cx)).await.unwrap(); // Join the worktree as client B. let project_b = Project::remote( @@ -3135,20 +3073,18 @@ mod tests { let buffer_b1 = cx_b .background() - .spawn(project_b.update(&mut cx_b, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx))) + .spawn(project_b.update(cx_b, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx))) .await .unwrap(); let definitions; let buffer_b2; if rng.gen() { - definitions = project_b.update(&mut cx_b, |p, cx| p.definition(&buffer_b1, 23, cx)); - buffer_b2 = - project_b.update(&mut cx_b, |p, cx| p.open_buffer((worktree_id, "b.rs"), cx)); + definitions = project_b.update(cx_b, |p, cx| p.definition(&buffer_b1, 23, cx)); + buffer_b2 = project_b.update(cx_b, |p, cx| p.open_buffer((worktree_id, "b.rs"), cx)); } else { - buffer_b2 = - project_b.update(&mut cx_b, |p, cx| p.open_buffer((worktree_id, "b.rs"), cx)); - definitions = project_b.update(&mut cx_b, |p, cx| p.definition(&buffer_b1, 23, cx)); + buffer_b2 = project_b.update(cx_b, |p, cx| p.open_buffer((worktree_id, "b.rs"), cx)); + definitions = project_b.update(cx_b, |p, cx| p.definition(&buffer_b1, 23, cx)); } let mut fake_language_server = fake_language_servers.next().await.unwrap(); @@ -3167,8 +3103,8 @@ mod tests { #[gpui::test(iterations = 10)] async fn test_collaborating_with_code_actions( - mut cx_a: TestAppContext, - mut cx_b: TestAppContext, + cx_a: &mut TestAppContext, + cx_b: &mut TestAppContext, ) { cx_a.foreground().forbid_parking(); let mut lang_registry = Arc::new(LanguageRegistry::new()); @@ -3192,8 +3128,8 @@ mod tests { // Connect to a server as 2 clients. let mut server = TestServer::start(cx_a.foreground(), cx_a.background()).await; - let client_a = server.create_client(&mut cx_a, "user_a").await; - let client_b = server.create_client(&mut cx_b, "user_b").await; + let client_a = server.create_client(cx_a, "user_a").await; + let client_b = server.create_client(cx_b, "user_b").await; // Share a project as client A fs.insert_tree( @@ -3215,20 +3151,17 @@ mod tests { ) }); let (worktree_a, _) = project_a - .update(&mut cx_a, |p, cx| { + .update(cx_a, |p, cx| { p.find_or_create_local_worktree("/a", false, cx) }) .await .unwrap(); worktree_a - .read_with(&cx_a, |tree, _| tree.as_local().unwrap().scan_complete()) + .read_with(cx_a, |tree, _| tree.as_local().unwrap().scan_complete()) .await; - let project_id = project_a.update(&mut cx_a, |p, _| p.next_remote_id()).await; - let worktree_id = worktree_a.read_with(&cx_a, |tree, _| tree.id()); - project_a - .update(&mut cx_a, |p, cx| p.share(cx)) - .await - .unwrap(); + let project_id = project_a.update(cx_a, |p, _| p.next_remote_id()).await; + let worktree_id = worktree_a.read_with(cx_a, |tree, _| tree.id()); + project_a.update(cx_a, |p, cx| p.share(cx)).await.unwrap(); // Join the worktree as client B. let project_b = Project::remote( @@ -3250,7 +3183,7 @@ mod tests { let (_window_b, workspace_b) = cx_b.add_window(|cx| Workspace::new(¶ms, cx)); let editor_b = workspace_b - .update(&mut cx_b, |workspace, cx| { + .update(cx_b, |workspace, cx| { workspace.open_path((worktree_id, "main.rs").into(), cx) }) .await @@ -3273,7 +3206,7 @@ mod tests { .await; // Move cursor to a location that contains code actions. - editor_b.update(&mut cx_b, |editor, cx| { + editor_b.update(cx_b, |editor, cx| { editor.select_ranges([Point::new(1, 31)..Point::new(1, 31)], None, cx); cx.focus(&editor_b); }); @@ -3335,7 +3268,7 @@ mod tests { .await; // Toggle code actions and wait for them to display. - editor_b.update(&mut cx_b, |editor, cx| { + editor_b.update(cx_b, |editor, cx| { editor.toggle_code_actions(&ToggleCodeActions(false), cx); }); editor_b @@ -3346,7 +3279,7 @@ mod tests { // Confirming the code action will trigger a resolve request. let confirm_action = workspace_b - .update(&mut cx_b, |workspace, cx| { + .update(cx_b, |workspace, cx| { Editor::confirm_code_action(workspace, &ConfirmCodeAction(Some(0)), cx) }) .unwrap(); @@ -3388,14 +3321,14 @@ mod tests { // After the action is confirmed, an editor containing both modified files is opened. confirm_action.await.unwrap(); - let code_action_editor = workspace_b.read_with(&cx_b, |workspace, cx| { + let code_action_editor = workspace_b.read_with(cx_b, |workspace, cx| { workspace .active_item(cx) .unwrap() .downcast::() .unwrap() }); - code_action_editor.update(&mut cx_b, |editor, cx| { + code_action_editor.update(cx_b, |editor, cx| { assert_eq!(editor.text(cx), "\nmod other;\nfn main() { let foo = 4; }"); editor.undo(&Undo, cx); assert_eq!( @@ -3408,7 +3341,7 @@ mod tests { } #[gpui::test(iterations = 10)] - async fn test_collaborating_with_renames(mut cx_a: TestAppContext, mut cx_b: TestAppContext) { + async fn test_collaborating_with_renames(cx_a: &mut TestAppContext, cx_b: &mut TestAppContext) { cx_a.foreground().forbid_parking(); let mut lang_registry = Arc::new(LanguageRegistry::new()); let fs = FakeFs::new(cx_a.background()); @@ -3431,8 +3364,8 @@ mod tests { // Connect to a server as 2 clients. let mut server = TestServer::start(cx_a.foreground(), cx_a.background()).await; - let client_a = server.create_client(&mut cx_a, "user_a").await; - let client_b = server.create_client(&mut cx_b, "user_b").await; + let client_a = server.create_client(cx_a, "user_a").await; + let client_b = server.create_client(cx_b, "user_b").await; // Share a project as client A fs.insert_tree( @@ -3454,20 +3387,17 @@ mod tests { ) }); let (worktree_a, _) = project_a - .update(&mut cx_a, |p, cx| { + .update(cx_a, |p, cx| { p.find_or_create_local_worktree("/dir", false, cx) }) .await .unwrap(); worktree_a - .read_with(&cx_a, |tree, _| tree.as_local().unwrap().scan_complete()) + .read_with(cx_a, |tree, _| tree.as_local().unwrap().scan_complete()) .await; - let project_id = project_a.update(&mut cx_a, |p, _| p.next_remote_id()).await; - let worktree_id = worktree_a.read_with(&cx_a, |tree, _| tree.id()); - project_a - .update(&mut cx_a, |p, cx| p.share(cx)) - .await - .unwrap(); + let project_id = project_a.update(cx_a, |p, _| p.next_remote_id()).await; + let worktree_id = worktree_a.read_with(cx_a, |tree, _| tree.id()); + project_a.update(cx_a, |p, cx| p.share(cx)).await.unwrap(); // Join the worktree as client B. let project_b = Project::remote( @@ -3489,7 +3419,7 @@ mod tests { let (_window_b, workspace_b) = cx_b.add_window(|cx| Workspace::new(¶ms, cx)); let editor_b = workspace_b - .update(&mut cx_b, |workspace, cx| { + .update(cx_b, |workspace, cx| { workspace.open_path((worktree_id, "one.rs").into(), cx) }) .await @@ -3499,7 +3429,7 @@ mod tests { let mut fake_language_server = fake_language_servers.next().await.unwrap(); // Move cursor to a location that can be renamed. - let prepare_rename = editor_b.update(&mut cx_b, |editor, cx| { + let prepare_rename = editor_b.update(cx_b, |editor, cx| { editor.select_ranges([7..7], None, cx); editor.rename(&Rename, cx).unwrap() }); @@ -3517,7 +3447,7 @@ mod tests { .await .unwrap(); prepare_rename.await.unwrap(); - editor_b.update(&mut cx_b, |editor, cx| { + editor_b.update(cx_b, |editor, cx| { let rename = editor.pending_rename().unwrap(); let buffer = editor.buffer().read(cx).snapshot(cx); assert_eq!( @@ -3531,7 +3461,7 @@ mod tests { }); }); - let confirm_rename = workspace_b.update(&mut cx_b, |workspace, cx| { + let confirm_rename = workspace_b.update(cx_b, |workspace, cx| { Editor::confirm_rename(workspace, &ConfirmRename, cx).unwrap() }); fake_language_server @@ -3589,14 +3519,14 @@ mod tests { .unwrap(); confirm_rename.await.unwrap(); - let rename_editor = workspace_b.read_with(&cx_b, |workspace, cx| { + let rename_editor = workspace_b.read_with(cx_b, |workspace, cx| { workspace .active_item(cx) .unwrap() .downcast::() .unwrap() }); - rename_editor.update(&mut cx_b, |editor, cx| { + rename_editor.update(cx_b, |editor, cx| { assert_eq!( editor.text(cx), "const TWO: usize = one::THREE + one::THREE;\nconst THREE: usize = 1;" @@ -3614,7 +3544,7 @@ mod tests { }); // Ensure temporary rename edits cannot be undone/redone. - editor_b.update(&mut cx_b, |editor, cx| { + editor_b.update(cx_b, |editor, cx| { editor.undo(&Undo, cx); assert_eq!(editor.text(cx), "const ONE: usize = 1;"); editor.undo(&Undo, cx); @@ -3625,13 +3555,13 @@ mod tests { } #[gpui::test(iterations = 10)] - async fn test_basic_chat(mut cx_a: TestAppContext, mut cx_b: TestAppContext) { + async fn test_basic_chat(cx_a: &mut TestAppContext, cx_b: &mut TestAppContext) { cx_a.foreground().forbid_parking(); // Connect to a server as 2 clients. let mut server = TestServer::start(cx_a.foreground(), cx_a.background()).await; - let client_a = server.create_client(&mut cx_a, "user_a").await; - let client_b = server.create_client(&mut cx_b, "user_b").await; + let client_a = server.create_client(cx_a, "user_a").await; + let client_b = server.create_client(cx_b, "user_b").await; // Create an org that includes these 2 users. let db = &server.app_state.db; @@ -3664,9 +3594,9 @@ mod tests { let channels_a = cx_a .add_model(|cx| ChannelList::new(client_a.user_store.clone(), client_a.clone(), cx)); channels_a - .condition(&mut cx_a, |list, _| list.available_channels().is_some()) + .condition(cx_a, |list, _| list.available_channels().is_some()) .await; - channels_a.read_with(&cx_a, |list, _| { + channels_a.read_with(cx_a, |list, _| { assert_eq!( list.available_channels().unwrap(), &[ChannelDetails { @@ -3675,10 +3605,10 @@ mod tests { }] ) }); - let channel_a = channels_a.update(&mut cx_a, |this, cx| { + let channel_a = channels_a.update(cx_a, |this, cx| { this.get_channel(channel_id.to_proto(), cx).unwrap() }); - channel_a.read_with(&cx_a, |channel, _| assert!(channel.messages().is_empty())); + channel_a.read_with(cx_a, |channel, _| assert!(channel.messages().is_empty())); channel_a .condition(&cx_a, |channel, _| { channel_messages(channel) @@ -3689,9 +3619,9 @@ mod tests { let channels_b = cx_b .add_model(|cx| ChannelList::new(client_b.user_store.clone(), client_b.clone(), cx)); channels_b - .condition(&mut cx_b, |list, _| list.available_channels().is_some()) + .condition(cx_b, |list, _| list.available_channels().is_some()) .await; - channels_b.read_with(&cx_b, |list, _| { + channels_b.read_with(cx_b, |list, _| { assert_eq!( list.available_channels().unwrap(), &[ChannelDetails { @@ -3701,10 +3631,10 @@ mod tests { ) }); - let channel_b = channels_b.update(&mut cx_b, |this, cx| { + let channel_b = channels_b.update(cx_b, |this, cx| { this.get_channel(channel_id.to_proto(), cx).unwrap() }); - channel_b.read_with(&cx_b, |channel, _| assert!(channel.messages().is_empty())); + channel_b.read_with(cx_b, |channel, _| assert!(channel.messages().is_empty())); channel_b .condition(&cx_b, |channel, _| { channel_messages(channel) @@ -3713,7 +3643,7 @@ mod tests { .await; channel_a - .update(&mut cx_a, |channel, cx| { + .update(cx_a, |channel, cx| { channel .send_message("oh, hi B.".to_string(), cx) .unwrap() @@ -3765,11 +3695,11 @@ mod tests { } #[gpui::test(iterations = 10)] - async fn test_chat_message_validation(mut cx_a: TestAppContext) { + async fn test_chat_message_validation(cx_a: &mut TestAppContext) { cx_a.foreground().forbid_parking(); let mut server = TestServer::start(cx_a.foreground(), cx_a.background()).await; - let client_a = server.create_client(&mut cx_a, "user_a").await; + let client_a = server.create_client(cx_a, "user_a").await; let db = &server.app_state.db; let org_id = db.create_org("Test Org", "test-org").await.unwrap(); @@ -3784,15 +3714,15 @@ mod tests { let channels_a = cx_a .add_model(|cx| ChannelList::new(client_a.user_store.clone(), client_a.clone(), cx)); channels_a - .condition(&mut cx_a, |list, _| list.available_channels().is_some()) + .condition(cx_a, |list, _| list.available_channels().is_some()) .await; - let channel_a = channels_a.update(&mut cx_a, |this, cx| { + let channel_a = channels_a.update(cx_a, |this, cx| { this.get_channel(channel_id.to_proto(), cx).unwrap() }); // Messages aren't allowed to be too long. channel_a - .update(&mut cx_a, |channel, cx| { + .update(cx_a, |channel, cx| { let long_body = "this is long.\n".repeat(1024); channel.send_message(long_body, cx).unwrap() }) @@ -3800,13 +3730,13 @@ mod tests { .unwrap_err(); // Messages aren't allowed to be blank. - channel_a.update(&mut cx_a, |channel, cx| { + channel_a.update(cx_a, |channel, cx| { channel.send_message(String::new(), cx).unwrap_err() }); // Leading and trailing whitespace are trimmed. channel_a - .update(&mut cx_a, |channel, cx| { + .update(cx_a, |channel, cx| { channel .send_message("\n surrounded by whitespace \n".to_string(), cx) .unwrap() @@ -3825,13 +3755,13 @@ mod tests { } #[gpui::test(iterations = 10)] - async fn test_chat_reconnection(mut cx_a: TestAppContext, mut cx_b: TestAppContext) { + async fn test_chat_reconnection(cx_a: &mut TestAppContext, cx_b: &mut TestAppContext) { cx_a.foreground().forbid_parking(); // Connect to a server as 2 clients. let mut server = TestServer::start(cx_a.foreground(), cx_a.background()).await; - let client_a = server.create_client(&mut cx_a, "user_a").await; - let client_b = server.create_client(&mut cx_b, "user_b").await; + let client_a = server.create_client(cx_a, "user_a").await; + let client_b = server.create_client(cx_b, "user_b").await; let mut status_b = client_b.status(); // Create an org that includes these 2 users. @@ -3865,10 +3795,10 @@ mod tests { let channels_a = cx_a .add_model(|cx| ChannelList::new(client_a.user_store.clone(), client_a.clone(), cx)); channels_a - .condition(&mut cx_a, |list, _| list.available_channels().is_some()) + .condition(cx_a, |list, _| list.available_channels().is_some()) .await; - channels_a.read_with(&cx_a, |list, _| { + channels_a.read_with(cx_a, |list, _| { assert_eq!( list.available_channels().unwrap(), &[ChannelDetails { @@ -3877,10 +3807,10 @@ mod tests { }] ) }); - let channel_a = channels_a.update(&mut cx_a, |this, cx| { + let channel_a = channels_a.update(cx_a, |this, cx| { this.get_channel(channel_id.to_proto(), cx).unwrap() }); - channel_a.read_with(&cx_a, |channel, _| assert!(channel.messages().is_empty())); + channel_a.read_with(cx_a, |channel, _| assert!(channel.messages().is_empty())); channel_a .condition(&cx_a, |channel, _| { channel_messages(channel) @@ -3891,9 +3821,9 @@ mod tests { let channels_b = cx_b .add_model(|cx| ChannelList::new(client_b.user_store.clone(), client_b.clone(), cx)); channels_b - .condition(&mut cx_b, |list, _| list.available_channels().is_some()) + .condition(cx_b, |list, _| list.available_channels().is_some()) .await; - channels_b.read_with(&cx_b, |list, _| { + channels_b.read_with(cx_b, |list, _| { assert_eq!( list.available_channels().unwrap(), &[ChannelDetails { @@ -3903,10 +3833,10 @@ mod tests { ) }); - let channel_b = channels_b.update(&mut cx_b, |this, cx| { + let channel_b = channels_b.update(cx_b, |this, cx| { this.get_channel(channel_id.to_proto(), cx).unwrap() }); - channel_b.read_with(&cx_b, |channel, _| assert!(channel.messages().is_empty())); + channel_b.read_with(cx_b, |channel, _| assert!(channel.messages().is_empty())); channel_b .condition(&cx_b, |channel, _| { channel_messages(channel) @@ -3922,7 +3852,7 @@ mod tests { Some(client::Status::ReconnectionError { .. }) ) {} - channels_b.read_with(&cx_b, |channels, _| { + channels_b.read_with(cx_b, |channels, _| { assert_eq!( channels.available_channels().unwrap(), [ChannelDetails { @@ -3931,7 +3861,7 @@ mod tests { }] ) }); - channel_b.read_with(&cx_b, |channel, _| { + channel_b.read_with(cx_b, |channel, _| { assert_eq!( channel_messages(channel), [("user_b".to_string(), "hello A, it's B.".to_string(), false)] @@ -3940,7 +3870,7 @@ mod tests { // Send a message from client B while it is disconnected. channel_b - .update(&mut cx_b, |channel, cx| { + .update(cx_b, |channel, cx| { let task = channel .send_message("can you see this?".to_string(), cx) .unwrap(); @@ -3958,7 +3888,7 @@ mod tests { // Send a message from client A while B is disconnected. channel_a - .update(&mut cx_a, |channel, cx| { + .update(cx_a, |channel, cx| { channel .send_message("oh, hi B.".to_string(), cx) .unwrap() @@ -3997,7 +3927,7 @@ mod tests { // Ensure client A and B can communicate normally after reconnection. channel_a - .update(&mut cx_a, |channel, cx| { + .update(cx_a, |channel, cx| { channel.send_message("you online?".to_string(), cx).unwrap() }) .await @@ -4016,7 +3946,7 @@ mod tests { .await; channel_b - .update(&mut cx_b, |channel, cx| { + .update(cx_b, |channel, cx| { channel.send_message("yep".to_string(), cx).unwrap() }) .await @@ -4038,9 +3968,9 @@ mod tests { #[gpui::test(iterations = 10)] async fn test_contacts( - mut cx_a: TestAppContext, - mut cx_b: TestAppContext, - mut cx_c: TestAppContext, + cx_a: &mut TestAppContext, + cx_b: &mut TestAppContext, + cx_c: &mut TestAppContext, ) { cx_a.foreground().forbid_parking(); let lang_registry = Arc::new(LanguageRegistry::new()); @@ -4048,9 +3978,9 @@ mod tests { // Connect to a server as 3 clients. let mut server = TestServer::start(cx_a.foreground(), cx_a.background()).await; - let client_a = server.create_client(&mut cx_a, "user_a").await; - let client_b = server.create_client(&mut cx_b, "user_b").await; - let client_c = server.create_client(&mut cx_c, "user_c").await; + let client_a = server.create_client(cx_a, "user_a").await; + let client_b = server.create_client(cx_b, "user_b").await; + let client_c = server.create_client(cx_c, "user_c").await; // Share a worktree as client A. fs.insert_tree( @@ -4071,13 +4001,13 @@ mod tests { ) }); let (worktree_a, _) = project_a - .update(&mut cx_a, |p, cx| { + .update(cx_a, |p, cx| { p.find_or_create_local_worktree("/a", false, cx) }) .await .unwrap(); worktree_a - .read_with(&cx_a, |tree, _| tree.as_local().unwrap().scan_complete()) + .read_with(cx_a, |tree, _| tree.as_local().unwrap().scan_complete()) .await; client_a @@ -4100,10 +4030,10 @@ mod tests { .await; let project_id = project_a - .update(&mut cx_a, |project, _| project.next_remote_id()) + .update(cx_a, |project, _| project.next_remote_id()) .await; project_a - .update(&mut cx_a, |project, cx| project.share(cx)) + .update(cx_a, |project, cx| project.share(cx)) .await .unwrap(); @@ -4179,7 +4109,7 @@ mod tests { } #[gpui::test(iterations = 100)] - async fn test_random_collaboration(cx: TestAppContext, rng: StdRng) { + async fn test_random_collaboration(cx: &mut TestAppContext, rng: StdRng) { cx.foreground().forbid_parking(); let max_peers = env::var("MAX_PEERS") .map(|i| i.parse().expect("invalid `MAX_PEERS` variable")) diff --git a/crates/zed/src/zed.rs b/crates/zed/src/zed.rs index 02430aae1ae7e6eda89ec57c6ac7b47aa13386f4..558bf87fcbee8c4fa4dc3fd35612f80b4ca54e1d 100644 --- a/crates/zed/src/zed.rs +++ b/crates/zed/src/zed.rs @@ -152,7 +152,7 @@ mod tests { }; #[gpui::test] - async fn test_open_paths_action(mut cx: TestAppContext) { + async fn test_open_paths_action(cx: &mut TestAppContext) { let app_state = cx.update(test_app_state); let dir = temp_tree(json!({ "a": { @@ -186,7 +186,7 @@ mod tests { .await; assert_eq!(cx.window_ids().len(), 1); let workspace_1 = cx.root_view::(cx.window_ids()[0]).unwrap(); - workspace_1.read_with(&cx, |workspace, cx| { + workspace_1.read_with(cx, |workspace, cx| { assert_eq!(workspace.worktrees(cx).count(), 2) }); @@ -205,7 +205,7 @@ mod tests { } #[gpui::test] - async fn test_new_empty_workspace(mut cx: TestAppContext) { + async fn test_new_empty_workspace(cx: &mut TestAppContext) { let app_state = cx.update(test_app_state); cx.update(|cx| { workspace::init(cx); @@ -213,7 +213,7 @@ mod tests { cx.dispatch_global_action(workspace::OpenNew(app_state.clone())); let window_id = *cx.window_ids().first().unwrap(); let workspace = cx.root_view::(window_id).unwrap(); - let editor = workspace.update(&mut cx, |workspace, cx| { + let editor = workspace.update(cx, |workspace, cx| { workspace .active_item(cx) .unwrap() @@ -221,22 +221,22 @@ mod tests { .unwrap() }); - editor.update(&mut cx, |editor, cx| { + editor.update(cx, |editor, cx| { assert!(editor.text(cx).is_empty()); }); - let save_task = workspace.update(&mut cx, |workspace, cx| workspace.save_active_item(cx)); + let save_task = workspace.update(cx, |workspace, cx| workspace.save_active_item(cx)); app_state.fs.as_fake().insert_dir("/root").await; cx.simulate_new_path_selection(|_| Some(PathBuf::from("/root/the-new-name"))); save_task.await.unwrap(); - editor.read_with(&cx, |editor, cx| { + editor.read_with(cx, |editor, cx| { assert!(!editor.is_dirty(cx)); assert_eq!(editor.title(cx), "the-new-name"); }); } #[gpui::test] - async fn test_open_entry(mut cx: TestAppContext) { + async fn test_open_entry(cx: &mut TestAppContext) { let app_state = cx.update(test_app_state); app_state .fs @@ -256,7 +256,7 @@ mod tests { let (_, workspace) = cx.add_window(|cx| Workspace::new(¶ms, cx)); params .project - .update(&mut cx, |project, cx| { + .update(cx, |project, cx| { project.find_or_create_local_worktree("/root", false, cx) }) .await @@ -271,7 +271,7 @@ mod tests { // Open the first entry let entry_1 = workspace - .update(&mut cx, |w, cx| w.open_path(file1.clone(), cx)) + .update(cx, |w, cx| w.open_path(file1.clone(), cx)) .await .unwrap(); cx.read(|cx| { @@ -285,7 +285,7 @@ mod tests { // Open the second entry workspace - .update(&mut cx, |w, cx| w.open_path(file2.clone(), cx)) + .update(cx, |w, cx| w.open_path(file2.clone(), cx)) .await .unwrap(); cx.read(|cx| { @@ -299,7 +299,7 @@ mod tests { // Open the first entry again. The existing pane item is activated. let entry_1b = workspace - .update(&mut cx, |w, cx| w.open_path(file1.clone(), cx)) + .update(cx, |w, cx| w.open_path(file1.clone(), cx)) .await .unwrap(); assert_eq!(entry_1.id(), entry_1b.id()); @@ -315,14 +315,14 @@ mod tests { // Split the pane with the first entry, then open the second entry again. workspace - .update(&mut cx, |w, cx| { + .update(cx, |w, cx| { w.split_pane(w.active_pane().clone(), SplitDirection::Right, cx); w.open_path(file2.clone(), cx) }) .await .unwrap(); - workspace.read_with(&cx, |w, cx| { + workspace.read_with(cx, |w, cx| { assert_eq!( w.active_pane() .read(cx) @@ -334,7 +334,7 @@ mod tests { }); // Open the third entry twice concurrently. Only one pane item is added. - let (t1, t2) = workspace.update(&mut cx, |w, cx| { + let (t1, t2) = workspace.update(cx, |w, cx| { ( w.open_path(file3.clone(), cx), w.open_path(file3.clone(), cx), @@ -357,7 +357,7 @@ mod tests { } #[gpui::test] - async fn test_open_paths(mut cx: TestAppContext) { + async fn test_open_paths(cx: &mut TestAppContext) { let app_state = cx.update(test_app_state); let fs = app_state.fs.as_fake(); fs.insert_dir("/dir1").await; @@ -369,7 +369,7 @@ mod tests { let (_, workspace) = cx.add_window(|cx| Workspace::new(¶ms, cx)); params .project - .update(&mut cx, |project, cx| { + .update(cx, |project, cx| { project.find_or_create_local_worktree("/dir1", false, cx) }) .await @@ -435,7 +435,7 @@ mod tests { } #[gpui::test] - async fn test_save_conflicting_item(mut cx: TestAppContext) { + async fn test_save_conflicting_item(cx: &mut TestAppContext) { let app_state = cx.update(test_app_state); let fs = app_state.fs.as_fake(); fs.insert_tree("/root", json!({ "a.txt": "" })).await; @@ -444,7 +444,7 @@ mod tests { let (window_id, workspace) = cx.add_window(|cx| Workspace::new(¶ms, cx)); params .project - .update(&mut cx, |project, cx| { + .update(cx, |project, cx| { project.find_or_create_local_worktree("/root", false, cx) }) .await @@ -474,24 +474,24 @@ mod tests { .await; cx.read(|cx| assert!(editor.is_dirty(cx))); - let save_task = workspace.update(&mut cx, |workspace, cx| workspace.save_active_item(cx)); + let save_task = workspace.update(cx, |workspace, cx| workspace.save_active_item(cx)); cx.simulate_prompt_answer(window_id, 0); save_task.await.unwrap(); - editor.read_with(&cx, |editor, cx| { + editor.read_with(cx, |editor, cx| { assert!(!editor.is_dirty(cx)); assert!(!editor.has_conflict(cx)); }); } #[gpui::test] - async fn test_open_and_save_new_file(mut cx: TestAppContext) { + async fn test_open_and_save_new_file(cx: &mut TestAppContext) { let app_state = cx.update(test_app_state); app_state.fs.as_fake().insert_dir("/root").await; let params = cx.update(|cx| WorkspaceParams::local(&app_state, cx)); let (window_id, workspace) = cx.add_window(|cx| Workspace::new(¶ms, cx)); params .project - .update(&mut cx, |project, cx| { + .update(cx, |project, cx| { project.find_or_create_local_worktree("/root", false, cx) }) .await @@ -500,7 +500,7 @@ mod tests { // Create a new untitled buffer cx.dispatch_action(window_id, vec![workspace.id()], OpenNew(app_state.clone())); - let editor = workspace.read_with(&cx, |workspace, cx| { + let editor = workspace.read_with(cx, |workspace, cx| { workspace .active_item(cx) .unwrap() @@ -508,7 +508,7 @@ mod tests { .unwrap() }); - editor.update(&mut cx, |editor, cx| { + editor.update(cx, |editor, cx| { assert!(!editor.is_dirty(cx)); assert_eq!(editor.title(cx), "untitled"); assert!(Arc::ptr_eq( @@ -520,7 +520,7 @@ mod tests { }); // Save the buffer. This prompts for a filename. - let save_task = workspace.update(&mut cx, |workspace, cx| workspace.save_active_item(cx)); + let save_task = workspace.update(cx, |workspace, cx| workspace.save_active_item(cx)); cx.simulate_new_path_selection(|parent_dir| { assert_eq!(parent_dir, Path::new("/root")); Some(parent_dir.join("the-new-name.rs")) @@ -533,21 +533,21 @@ mod tests { // When the save completes, the buffer's title is updated and the language is assigned based // on the path. save_task.await.unwrap(); - editor.read_with(&cx, |editor, cx| { + editor.read_with(cx, |editor, cx| { assert!(!editor.is_dirty(cx)); assert_eq!(editor.title(cx), "the-new-name.rs"); assert_eq!(editor.language(cx).unwrap().name().as_ref(), "Rust"); }); // Edit the file and save it again. This time, there is no filename prompt. - editor.update(&mut cx, |editor, cx| { + editor.update(cx, |editor, cx| { editor.handle_input(&editor::Input(" there".into()), cx); assert_eq!(editor.is_dirty(cx.as_ref()), true); }); - let save_task = workspace.update(&mut cx, |workspace, cx| workspace.save_active_item(cx)); + let save_task = workspace.update(cx, |workspace, cx| workspace.save_active_item(cx)); save_task.await.unwrap(); assert!(!cx.did_prompt_for_new_path()); - editor.read_with(&cx, |editor, cx| { + editor.read_with(cx, |editor, cx| { assert!(!editor.is_dirty(cx)); assert_eq!(editor.title(cx), "the-new-name.rs") }); @@ -556,7 +556,7 @@ mod tests { // the same buffer. cx.dispatch_action(window_id, vec![workspace.id()], OpenNew(app_state.clone())); workspace - .update(&mut cx, |workspace, cx| { + .update(cx, |workspace, cx| { workspace.split_pane(workspace.active_pane().clone(), SplitDirection::Right, cx); workspace.open_path( ProjectPath { @@ -568,7 +568,7 @@ mod tests { }) .await .unwrap(); - let editor2 = workspace.update(&mut cx, |workspace, cx| { + let editor2 = workspace.update(cx, |workspace, cx| { workspace .active_item(cx) .unwrap() @@ -584,7 +584,7 @@ mod tests { } #[gpui::test] - async fn test_setting_language_when_saving_as_single_file_worktree(mut cx: TestAppContext) { + async fn test_setting_language_when_saving_as_single_file_worktree(cx: &mut TestAppContext) { let app_state = cx.update(test_app_state); app_state.fs.as_fake().insert_dir("/root").await; let params = cx.update(|cx| WorkspaceParams::local(&app_state, cx)); @@ -592,7 +592,7 @@ mod tests { // Create a new untitled buffer cx.dispatch_action(window_id, vec![workspace.id()], OpenNew(app_state.clone())); - let editor = workspace.read_with(&cx, |workspace, cx| { + let editor = workspace.read_with(cx, |workspace, cx| { workspace .active_item(cx) .unwrap() @@ -600,7 +600,7 @@ mod tests { .unwrap() }); - editor.update(&mut cx, |editor, cx| { + editor.update(cx, |editor, cx| { assert!(Arc::ptr_eq( editor.language(cx).unwrap(), &language::PLAIN_TEXT @@ -610,18 +610,18 @@ mod tests { }); // Save the buffer. This prompts for a filename. - let save_task = workspace.update(&mut cx, |workspace, cx| workspace.save_active_item(cx)); + let save_task = workspace.update(cx, |workspace, cx| workspace.save_active_item(cx)); cx.simulate_new_path_selection(|_| Some(PathBuf::from("/root/the-new-name.rs"))); save_task.await.unwrap(); // The buffer is not dirty anymore and the language is assigned based on the path. - editor.read_with(&cx, |editor, cx| { + editor.read_with(cx, |editor, cx| { assert!(!editor.is_dirty(cx)); assert_eq!(editor.language(cx).unwrap().name().as_ref(), "Rust") }); } #[gpui::test] - async fn test_pane_actions(mut cx: TestAppContext) { + async fn test_pane_actions(cx: &mut TestAppContext) { cx.update(|cx| pane::init(cx)); let app_state = cx.update(test_app_state); app_state @@ -643,7 +643,7 @@ mod tests { let (window_id, workspace) = cx.add_window(|cx| Workspace::new(¶ms, cx)); params .project - .update(&mut cx, |project, cx| { + .update(cx, |project, cx| { project.find_or_create_local_worktree("/root", false, cx) }) .await @@ -656,7 +656,7 @@ mod tests { let pane_1 = cx.read(|cx| workspace.read(cx).active_pane().clone()); workspace - .update(&mut cx, |w, cx| w.open_path(file1.clone(), cx)) + .update(cx, |w, cx| w.open_path(file1.clone(), cx)) .await .unwrap(); cx.read(|cx| { @@ -686,7 +686,7 @@ mod tests { } #[gpui::test] - async fn test_navigation(mut cx: TestAppContext) { + async fn test_navigation(cx: &mut TestAppContext) { let app_state = cx.update(test_app_state); app_state .fs @@ -706,7 +706,7 @@ mod tests { let (_, workspace) = cx.add_window(|cx| Workspace::new(¶ms, cx)); params .project - .update(&mut cx, |project, cx| { + .update(cx, |project, cx| { project.find_or_create_local_worktree("/root", false, cx) }) .await @@ -719,110 +719,94 @@ mod tests { let file3 = entries[2].clone(); let editor1 = workspace - .update(&mut cx, |w, cx| w.open_path(file1.clone(), cx)) + .update(cx, |w, cx| w.open_path(file1.clone(), cx)) .await .unwrap() .downcast::() .unwrap(); - editor1.update(&mut cx, |editor, cx| { + editor1.update(cx, |editor, cx| { editor.select_display_ranges(&[DisplayPoint::new(10, 0)..DisplayPoint::new(10, 0)], cx); }); let editor2 = workspace - .update(&mut cx, |w, cx| w.open_path(file2.clone(), cx)) + .update(cx, |w, cx| w.open_path(file2.clone(), cx)) .await .unwrap() .downcast::() .unwrap(); let editor3 = workspace - .update(&mut cx, |w, cx| w.open_path(file3.clone(), cx)) + .update(cx, |w, cx| w.open_path(file3.clone(), cx)) .await .unwrap() .downcast::() .unwrap(); - editor3.update(&mut cx, |editor, cx| { + editor3.update(cx, |editor, cx| { editor.select_display_ranges(&[DisplayPoint::new(15, 0)..DisplayPoint::new(15, 0)], cx); }); assert_eq!( - active_location(&workspace, &mut cx), + active_location(&workspace, cx), (file3.clone(), DisplayPoint::new(15, 0)) ); - workspace - .update(&mut cx, |w, cx| Pane::go_back(w, cx)) - .await; + workspace.update(cx, |w, cx| Pane::go_back(w, cx)).await; assert_eq!( - active_location(&workspace, &mut cx), + active_location(&workspace, cx), (file3.clone(), DisplayPoint::new(0, 0)) ); - workspace - .update(&mut cx, |w, cx| Pane::go_back(w, cx)) - .await; + workspace.update(cx, |w, cx| Pane::go_back(w, cx)).await; assert_eq!( - active_location(&workspace, &mut cx), + active_location(&workspace, cx), (file2.clone(), DisplayPoint::new(0, 0)) ); - workspace - .update(&mut cx, |w, cx| Pane::go_back(w, cx)) - .await; + workspace.update(cx, |w, cx| Pane::go_back(w, cx)).await; assert_eq!( - active_location(&workspace, &mut cx), + active_location(&workspace, cx), (file1.clone(), DisplayPoint::new(10, 0)) ); - workspace - .update(&mut cx, |w, cx| Pane::go_back(w, cx)) - .await; + workspace.update(cx, |w, cx| Pane::go_back(w, cx)).await; assert_eq!( - active_location(&workspace, &mut cx), + active_location(&workspace, cx), (file1.clone(), DisplayPoint::new(0, 0)) ); // Go back one more time and ensure we don't navigate past the first item in the history. - workspace - .update(&mut cx, |w, cx| Pane::go_back(w, cx)) - .await; + workspace.update(cx, |w, cx| Pane::go_back(w, cx)).await; assert_eq!( - active_location(&workspace, &mut cx), + active_location(&workspace, cx), (file1.clone(), DisplayPoint::new(0, 0)) ); - workspace - .update(&mut cx, |w, cx| Pane::go_forward(w, cx)) - .await; + workspace.update(cx, |w, cx| Pane::go_forward(w, cx)).await; assert_eq!( - active_location(&workspace, &mut cx), + active_location(&workspace, cx), (file1.clone(), DisplayPoint::new(10, 0)) ); - workspace - .update(&mut cx, |w, cx| Pane::go_forward(w, cx)) - .await; + workspace.update(cx, |w, cx| Pane::go_forward(w, cx)).await; assert_eq!( - active_location(&workspace, &mut cx), + active_location(&workspace, cx), (file2.clone(), DisplayPoint::new(0, 0)) ); // Go forward to an item that has been closed, ensuring it gets re-opened at the same // location. - workspace.update(&mut cx, |workspace, cx| { + workspace.update(cx, |workspace, cx| { workspace .active_pane() .update(cx, |pane, cx| pane.close_item(editor3.id(), cx)); drop(editor3); }); - workspace - .update(&mut cx, |w, cx| Pane::go_forward(w, cx)) - .await; + workspace.update(cx, |w, cx| Pane::go_forward(w, cx)).await; assert_eq!( - active_location(&workspace, &mut cx), + active_location(&workspace, cx), (file3.clone(), DisplayPoint::new(0, 0)) ); // Go back to an item that has been closed and removed from disk, ensuring it gets skipped. workspace - .update(&mut cx, |workspace, cx| { + .update(cx, |workspace, cx| { workspace .active_pane() .update(cx, |pane, cx| pane.close_item(editor2.id(), cx)); @@ -834,18 +818,14 @@ mod tests { }) .await .unwrap(); - workspace - .update(&mut cx, |w, cx| Pane::go_back(w, cx)) - .await; + workspace.update(cx, |w, cx| Pane::go_back(w, cx)).await; assert_eq!( - active_location(&workspace, &mut cx), + active_location(&workspace, cx), (file1.clone(), DisplayPoint::new(10, 0)) ); - workspace - .update(&mut cx, |w, cx| Pane::go_forward(w, cx)) - .await; + workspace.update(cx, |w, cx| Pane::go_forward(w, cx)).await; assert_eq!( - active_location(&workspace, &mut cx), + active_location(&workspace, cx), (file3.clone(), DisplayPoint::new(0, 0)) ); From ce59e57e6d64f9bdbce7b9bdf4bb0d19a9f90e63 Mon Sep 17 00:00:00 2001 From: Antonio Scandurra Date: Tue, 1 Mar 2022 12:18:55 +0100 Subject: [PATCH 13/45] Remove project from host connection when unregistering it --- crates/server/src/rpc/store.rs | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/crates/server/src/rpc/store.rs b/crates/server/src/rpc/store.rs index 3d68836b572c54c2e2be278afc72bcc7bca4a472..a7e89f3bf531e2c946a43997665cf6f4a4531d33 100644 --- a/crates/server/src/rpc/store.rs +++ b/crates/server/src/rpc/store.rs @@ -297,6 +297,11 @@ impl Store { } let project = e.remove(); + + if let Some(host_connection) = self.connections.get_mut(&connection_id) { + host_connection.projects.remove(&project_id); + } + if let Some(share) = &project.share { for guest_connection in share.guests.keys() { if let Some(connection) = self.connections.get_mut(&guest_connection) { @@ -305,6 +310,8 @@ impl Store { } } + #[cfg(test)] + self.check_invariants(); Ok(project) } else { Err(anyhow!("no such project"))? From 8d078ed4e22f8198a1aa63bcb5c73b15592a27d6 Mon Sep 17 00:00:00 2001 From: Antonio Scandurra Date: Tue, 1 Mar 2022 13:08:25 +0100 Subject: [PATCH 14/45] Don't block when emitting fs events Blocking could happen while processing events, which would cause the background scanner to never make any further progress. --- crates/project/src/fs.rs | 19 ++++++++++--------- crates/project/src/worktree.rs | 2 +- crates/server/src/rpc.rs | 4 +++- 3 files changed, 14 insertions(+), 11 deletions(-) diff --git a/crates/project/src/fs.rs b/crates/project/src/fs.rs index 9660312d5b05ed05089e9ed7fd7baf564a9078c8..ec7925685d69173609d7344edad9521d363183ce 100644 --- a/crates/project/src/fs.rs +++ b/crates/project/src/fs.rs @@ -225,7 +225,7 @@ struct FakeFsEntry { struct FakeFsState { entries: std::collections::BTreeMap, next_inode: u64, - events_tx: postage::broadcast::Sender>, + event_txs: Vec>>, } #[cfg(any(test, feature = "test-support"))] @@ -248,8 +248,6 @@ impl FakeFsState { I: IntoIterator, T: Into, { - use postage::prelude::Sink as _; - let events = paths .into_iter() .map(|path| fsevent::Event { @@ -257,9 +255,12 @@ impl FakeFsState { flags: fsevent::StreamFlags::empty(), path: path.into(), }) - .collect(); + .collect::>(); - let _ = self.events_tx.send(events).await; + self.event_txs.retain(|tx| { + let _ = tx.try_send(events.clone()); + !tx.is_closed() + }); } } @@ -273,7 +274,6 @@ pub struct FakeFs { #[cfg(any(test, feature = "test-support"))] impl FakeFs { pub fn new(executor: std::sync::Arc) -> std::sync::Arc { - let (events_tx, _) = postage::broadcast::channel(2); let mut entries = std::collections::BTreeMap::new(); entries.insert( Path::new("/").to_path_buf(), @@ -292,7 +292,7 @@ impl FakeFs { state: futures::lock::Mutex::new(FakeFsState { entries, next_inode: 1, - events_tx, + event_txs: Default::default(), }), }) } @@ -642,9 +642,10 @@ impl Fs for FakeFs { path: &Path, _: Duration, ) -> Pin>>> { - let state = self.state.lock().await; + let mut state = self.state.lock().await; self.simulate_random_delay().await; - let rx = state.events_tx.subscribe(); + let (tx, rx) = smol::channel::unbounded(); + state.event_txs.push(tx); let path = path.to_path_buf(); Box::pin(futures::StreamExt::filter(rx, move |events| { let result = events.iter().any(|event| event.path.starts_with(&path)); diff --git a/crates/project/src/worktree.rs b/crates/project/src/worktree.rs index fe9958b9cde5b2c1f3d174140e5d7176ac01a66b..3e2ad5ce20991316feda3006eb168b228803c7c4 100644 --- a/crates/project/src/worktree.rs +++ b/crates/project/src/worktree.rs @@ -386,7 +386,7 @@ impl Worktree { if worktree.poll_task.is_none() { worktree.poll_task = Some(cx.spawn(|this, mut cx| async move { if is_fake_fs { - smol::future::yield_now().await; + cx.background().simulate_random_delay().await; } else { smol::Timer::after(Duration::from_millis(100)).await; } diff --git a/crates/server/src/rpc.rs b/crates/server/src/rpc.rs index db324a30fc3ece0e09887dfd70e8a40da96a43de..500e99ef7cf393581b334075afddb85a6c8a0381 100644 --- a/crates/server/src/rpc.rs +++ b/crates/server/src/rpc.rs @@ -4185,7 +4185,9 @@ mod tests { while operations.get() < max_operations { cx.background().simulate_random_delay().await; - if clients.len() < max_peers && rng.lock().gen_bool(0.05) { + if clients.len() >= max_peers { + break; + } else if rng.lock().gen_bool(0.05) { operations.set(operations.get() + 1); let guest_id = clients.len(); From 3e9dbe10d6a51e530bcd734b3a860a718c9720a9 Mon Sep 17 00:00:00 2001 From: Antonio Scandurra Date: Tue, 1 Mar 2022 13:34:03 +0100 Subject: [PATCH 15/45] Capture a weak reference to the `Project` in fake LSP --- crates/server/src/rpc.rs | 76 ++++++++++++++++++++++------------------ 1 file changed, 42 insertions(+), 34 deletions(-) diff --git a/crates/server/src/rpc.rs b/crates/server/src/rpc.rs index 500e99ef7cf393581b334075afddb85a6c8a0381..67ca86c154deedf175170326be5bc6268679da00 100644 --- a/crates/server/src/rpc.rs +++ b/crates/server/src/rpc.rs @@ -4175,7 +4175,7 @@ mod tests { .unwrap(); clients.push(cx.foreground().spawn(host.simulate_host( - host_project.clone(), + host_project, language_server_config, operations.clone(), max_operations, @@ -4231,7 +4231,8 @@ mod tests { let mut clients = futures::future::join_all(clients).await; cx.foreground().run_until_parked(); - let (_, host_cx) = clients.remove(0); + let (host_client, host_cx) = clients.remove(0); + let host_project = host_client.project.as_ref().unwrap(); let host_worktree_snapshots = host_project.read_with(&host_cx, |project, cx| { project .worktrees(cx) @@ -4519,7 +4520,7 @@ mod tests { language_server_config.set_fake_initializer({ let rng = rng.clone(); let files = files.clone(); - let project = project.clone(); + let project = project.downgrade(); move |fake_server| { fake_server.handle_request::(|_, _| { Some(lsp::CompletionResponse::Array(vec![lsp::CompletionItem { @@ -4579,37 +4580,44 @@ mod tests { let rng = rng.clone(); let project = project.clone(); move |params, mut cx| { - project.update(&mut cx, |project, cx| { - let path = params - .text_document_position_params - .text_document - .uri - .to_file_path() - .unwrap(); - let (worktree, relative_path) = - project.find_local_worktree(&path, cx)?; - let project_path = - ProjectPath::from((worktree.read(cx).id(), relative_path)); - let buffer = project.get_open_buffer(&project_path, cx)?.read(cx); - - let mut highlights = Vec::new(); - let highlight_count = rng.lock().gen_range(1..=5); - let mut prev_end = 0; - for _ in 0..highlight_count { - let range = - buffer.random_byte_range(prev_end, &mut *rng.lock()); - let start = - buffer.offset_to_point_utf16(range.start).to_lsp_position(); - let end = - buffer.offset_to_point_utf16(range.end).to_lsp_position(); - highlights.push(lsp::DocumentHighlight { - range: lsp::Range::new(start, end), - kind: Some(lsp::DocumentHighlightKind::READ), - }); - prev_end = range.end; - } - Some(highlights) - }) + if let Some(project) = project.upgrade(&cx) { + project.update(&mut cx, |project, cx| { + let path = params + .text_document_position_params + .text_document + .uri + .to_file_path() + .unwrap(); + let (worktree, relative_path) = + project.find_local_worktree(&path, cx)?; + let project_path = + ProjectPath::from((worktree.read(cx).id(), relative_path)); + let buffer = + project.get_open_buffer(&project_path, cx)?.read(cx); + + let mut highlights = Vec::new(); + let highlight_count = rng.lock().gen_range(1..=5); + let mut prev_end = 0; + for _ in 0..highlight_count { + let range = + buffer.random_byte_range(prev_end, &mut *rng.lock()); + let start = buffer + .offset_to_point_utf16(range.start) + .to_lsp_position(); + let end = buffer + .offset_to_point_utf16(range.end) + .to_lsp_position(); + highlights.push(lsp::DocumentHighlight { + range: lsp::Range::new(start, end), + kind: Some(lsp::DocumentHighlightKind::READ), + }); + prev_end = range.end; + } + Some(highlights) + }) + } else { + None + } } }); } From 4cfd345f9d91ebd8e76a668f3494ecf2e45c4b9d Mon Sep 17 00:00:00 2001 From: Antonio Scandurra Date: Tue, 1 Mar 2022 15:48:15 +0100 Subject: [PATCH 16/45] Use `async_broadcast` to emit fake FS events Co-Authored-By: Nathan Sobo --- Cargo.lock | 1 + crates/project/Cargo.toml | 1 + crates/project/src/fs.rs | 77 ++++++++++++++++++++++----------------- 3 files changed, 45 insertions(+), 34 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 71fe0871c4a6198a30ba1b4bb6e92093f08ef53a..b495bbd7c641937eed4987c48f47157431f11e73 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -3568,6 +3568,7 @@ version = "0.1.0" dependencies = [ "aho-corasick", "anyhow", + "async-broadcast", "async-trait", "client", "clock", diff --git a/crates/project/Cargo.toml b/crates/project/Cargo.toml index dea5a10279a8f6e3dfe1c62d9526bbea0e797fac..5e58f4530a3f9f4c62d3cc865f6b68f0f9c552c7 100644 --- a/crates/project/Cargo.toml +++ b/crates/project/Cargo.toml @@ -28,6 +28,7 @@ sum_tree = { path = "../sum_tree" } util = { path = "../util" } aho-corasick = "0.7" anyhow = "1.0.38" +async-broadcast = "0.3.4" async-trait = "0.1" futures = "0.3" ignore = "0.4" diff --git a/crates/project/src/fs.rs b/crates/project/src/fs.rs index ec7925685d69173609d7344edad9521d363183ce..d436e5e9faf20d92101cfd8a8d9f015afa7f25ba 100644 --- a/crates/project/src/fs.rs +++ b/crates/project/src/fs.rs @@ -225,7 +225,6 @@ struct FakeFsEntry { struct FakeFsState { entries: std::collections::BTreeMap, next_inode: u64, - event_txs: Vec>>, } #[cfg(any(test, feature = "test-support"))] @@ -242,26 +241,6 @@ impl FakeFsState { Err(anyhow!("invalid path {:?}", path)) } } - - async fn emit_event(&mut self, paths: I) - where - I: IntoIterator, - T: Into, - { - let events = paths - .into_iter() - .map(|path| fsevent::Event { - event_id: 0, - flags: fsevent::StreamFlags::empty(), - path: path.into(), - }) - .collect::>(); - - self.event_txs.retain(|tx| { - let _ = tx.try_send(events.clone()); - !tx.is_closed() - }); - } } #[cfg(any(test, feature = "test-support"))] @@ -269,6 +248,10 @@ pub struct FakeFs { // Use an unfair lock to ensure tests are deterministic. state: futures::lock::Mutex, executor: std::sync::Weak, + events: ( + async_broadcast::Sender>, + async_broadcast::Receiver>, + ), } #[cfg(any(test, feature = "test-support"))] @@ -292,8 +275,8 @@ impl FakeFs { state: futures::lock::Mutex::new(FakeFsState { entries, next_inode: 1, - event_txs: Default::default(), }), + events: async_broadcast::broadcast(16), }) } @@ -316,7 +299,9 @@ impl FakeFs { content: None, }, ); - state.emit_event(&[path]).await; + + drop(state); + self.emit_event(&[path]).await; } pub async fn insert_file(&self, path: impl AsRef, content: String) { @@ -338,7 +323,9 @@ impl FakeFs { content: Some(content), }, ); - state.emit_event(&[path]).await; + + drop(state); + self.emit_event(&[path]).await; } #[must_use] @@ -383,6 +370,23 @@ impl FakeFs { .simulate_random_delay() .await; } + + async fn emit_event(&self, paths: I) + where + I: IntoIterator, + T: Into, + { + let events = paths + .into_iter() + .map(|path| fsevent::Event { + event_id: 0, + flags: fsevent::StreamFlags::empty(), + path: path.into(), + }) + .collect::>(); + + let _ = self.events.0.broadcast(events).await; + } } #[cfg(any(test, feature = "test-support"))] @@ -420,7 +424,8 @@ impl Fs for FakeFs { )); } } - state.emit_event(&created_dir_paths).await; + drop(state); + self.emit_event(&created_dir_paths).await; Ok(()) } @@ -461,7 +466,8 @@ impl Fs for FakeFs { }; state.entries.insert(path.to_path_buf(), entry); } - state.emit_event(&[path]).await; + drop(state); + self.emit_event(&[path]).await; Ok(()) } @@ -497,7 +503,8 @@ impl Fs for FakeFs { state.entries.insert(new_path, entry); } - state.emit_event(&[source, target]).await; + drop(state); + self.emit_event(&[source, target]).await; Ok(()) } @@ -522,7 +529,8 @@ impl Fs for FakeFs { } state.entries.retain(|path, _| !path.starts_with(path)); - state.emit_event(&[path]).await; + drop(state); + self.emit_event(&[path]).await; } else if !options.ignore_if_not_exists { return Err(anyhow!("{path:?} does not exist")); } @@ -540,7 +548,8 @@ impl Fs for FakeFs { } state.entries.remove(&path); - state.emit_event(&[path]).await; + drop(state); + self.emit_event(&[path]).await; } else if !options.ignore_if_not_exists { return Err(anyhow!("{path:?} does not exist")); } @@ -575,7 +584,8 @@ impl Fs for FakeFs { } else { entry.content = Some(text.chunks().collect()); entry.metadata.mtime = SystemTime::now(); - state.emit_event(&[path]).await; + drop(state); + self.emit_event(&[path]).await; Ok(()) } } else { @@ -591,7 +601,8 @@ impl Fs for FakeFs { content: Some(text.chunks().collect()), }; state.entries.insert(path.to_path_buf(), entry); - state.emit_event(&[path]).await; + drop(state); + self.emit_event(&[path]).await; Ok(()) } } @@ -642,10 +653,8 @@ impl Fs for FakeFs { path: &Path, _: Duration, ) -> Pin>>> { - let mut state = self.state.lock().await; self.simulate_random_delay().await; - let (tx, rx) = smol::channel::unbounded(); - state.event_txs.push(tx); + let rx = self.events.1.clone(); let path = path.to_path_buf(); Box::pin(futures::StreamExt::filter(rx, move |events| { let result = events.iter().any(|event| event.path.starts_with(&path)); From aa03ebce0e3d7a45f4a02e1cdc4793a9e0b48750 Mon Sep 17 00:00:00 2001 From: Antonio Scandurra Date: Tue, 1 Mar 2022 16:00:40 +0100 Subject: [PATCH 17/45] Enable leak backtraces by setting `LEAK_BACKTRACE=1` Co-Authored-By: Nathan Sobo --- crates/gpui/src/app.rs | 29 ++++++++++++++++++++++++----- 1 file changed, 24 insertions(+), 5 deletions(-) diff --git a/crates/gpui/src/app.rs b/crates/gpui/src/app.rs index 90eeccdaf6bc65aa0979f9a8d429a7dc148002eb..855090e2f0642d60e6457becd118c38dc3c56e42 100644 --- a/crates/gpui/src/app.rs +++ b/crates/gpui/src/app.rs @@ -10,6 +10,7 @@ use crate::{ use anyhow::{anyhow, Result}; use backtrace::Backtrace; use keymap::MatchResult; +use lazy_static::lazy_static; use parking_lot::Mutex; use platform::Event; use postage::{mpsc, oneshot, sink::Sink as _, stream::Stream as _}; @@ -3767,20 +3768,30 @@ impl Drop for Subscription { } } +lazy_static! { + static ref LEAK_BACKTRACE: bool = + std::env::var("LEAK_BACKTRACE").map_or(false, |b| !b.is_empty()); +} + #[derive(Default)] pub struct LeakDetector { next_handle_id: usize, - handle_backtraces: HashMap, HashMap)>, + handle_backtraces: HashMap, HashMap>)>, } impl LeakDetector { fn handle_created(&mut self, type_name: Option<&'static str>, entity_id: usize) -> usize { let handle_id = post_inc(&mut self.next_handle_id); let entry = self.handle_backtraces.entry(entity_id).or_default(); + let backtrace = if *LEAK_BACKTRACE { + Some(Backtrace::new_unresolved()) + } else { + None + }; if let Some(type_name) = type_name { entry.0.get_or_insert(type_name); } - entry.1.insert(handle_id, Backtrace::new_unresolved()); + entry.1.insert(handle_id, backtrace); handle_id } @@ -3803,12 +3814,20 @@ impl LeakDetector { id ); for trace in backtraces.values_mut() { - trace.resolve(); - eprintln!("{:?}", CwdBacktrace(trace)); + if let Some(trace) = trace { + trace.resolve(); + eprintln!("{:?}", CwdBacktrace(trace)); + } } found_leaks = true; } - assert!(!found_leaks, "detected leaked handles"); + + let hint = if *LEAK_BACKTRACE { + "" + } else { + " – set LEAK_BACKTRACE=1 for more information" + }; + assert!(!found_leaks, "detected leaked handles{}", hint); } } From 0d6f6bf5bb78c082b8b487729489aed3ea76bf28 Mon Sep 17 00:00:00 2001 From: Antonio Scandurra Date: Tue, 1 Mar 2022 16:11:12 +0100 Subject: [PATCH 18/45] Detect when view handles are leaked Co-Authored-By: Nathan Sobo --- crates/gpui/src/app.rs | 106 +++++++++++++++++++++++++++++------------ 1 file changed, 76 insertions(+), 30 deletions(-) diff --git a/crates/gpui/src/app.rs b/crates/gpui/src/app.rs index 855090e2f0642d60e6457becd118c38dc3c56e42..3aa628e253db2f00e38e138596e26c80d155b865 100644 --- a/crates/gpui/src/app.rs +++ b/crates/gpui/src/app.rs @@ -16,7 +16,7 @@ use platform::Event; use postage::{mpsc, oneshot, sink::Sink as _, stream::Stream as _}; use smol::prelude::*; use std::{ - any::{type_name, Any, TypeId}, + any::{self, type_name, Any, TypeId}, cell::RefCell, collections::{hash_map::Entry, BTreeMap, HashMap, HashSet, VecDeque}, fmt::{self, Debug}, @@ -3149,16 +3149,28 @@ pub struct ViewHandle { view_id: usize, view_type: PhantomData, ref_counts: Arc>, + #[cfg(feature = "test-support")] + handle_id: usize, } impl ViewHandle { fn new(window_id: usize, view_id: usize, ref_counts: &Arc>) -> Self { ref_counts.lock().inc_view(window_id, view_id); + #[cfg(feature = "test-support")] + let handle_id = ref_counts + .lock() + .leak_detector + .lock() + .handle_created(Some(type_name::()), view_id); + Self { window_id, view_id, view_type: PhantomData, ref_counts: ref_counts.clone(), + + #[cfg(feature = "test-support")] + handle_id, } } @@ -3304,17 +3316,9 @@ impl ViewHandle { } } -impl Clone for ViewHandle { +impl Clone for ViewHandle { fn clone(&self) -> Self { - self.ref_counts - .lock() - .inc_view(self.window_id, self.view_id); - Self { - window_id: self.window_id, - view_id: self.view_id, - view_type: PhantomData, - ref_counts: self.ref_counts.clone(), - } + ViewHandle::new(self.window_id, self.view_id, &self.ref_counts) } } @@ -3340,6 +3344,12 @@ impl Drop for ViewHandle { self.ref_counts .lock() .dec_view(self.window_id, self.view_id); + #[cfg(feature = "test-support")] + self.ref_counts + .lock() + .leak_detector + .lock() + .handle_dropped(self.view_id, self.handle_id); } } @@ -3370,10 +3380,40 @@ pub struct AnyViewHandle { window_id: usize, view_id: usize, view_type: TypeId, + type_name: &'static str, ref_counts: Arc>, + #[cfg(feature = "test-support")] + handle_id: usize, } impl AnyViewHandle { + fn new( + window_id: usize, + view_id: usize, + view_type: TypeId, + type_name: &'static str, + ref_counts: Arc>, + ) -> Self { + ref_counts.lock().inc_view(window_id, view_id); + + #[cfg(feature = "test-support")] + let handle_id = ref_counts + .lock() + .leak_detector + .lock() + .handle_created(Some(type_name), view_id); + + Self { + window_id, + view_id, + view_type, + type_name, + ref_counts, + #[cfg(feature = "test-support")] + handle_id, + } + } + pub fn id(&self) -> usize { self.view_id } @@ -3394,6 +3434,8 @@ impl AnyViewHandle { view_id: self.view_id, ref_counts: self.ref_counts.clone(), view_type: PhantomData, + #[cfg(feature = "test-support")] + handle_id: self.handle_id, }); unsafe { Arc::decrement_strong_count(&self.ref_counts); @@ -3408,15 +3450,13 @@ impl AnyViewHandle { impl Clone for AnyViewHandle { fn clone(&self) -> Self { - self.ref_counts - .lock() - .inc_view(self.window_id, self.view_id); - Self { - window_id: self.window_id, - view_id: self.view_id, - view_type: self.view_type, - ref_counts: self.ref_counts.clone(), - } + Self::new( + self.window_id, + self.view_id, + self.view_type, + self.type_name, + self.ref_counts.clone(), + ) } } @@ -3428,16 +3468,13 @@ impl From<&AnyViewHandle> for AnyViewHandle { impl From<&ViewHandle> for AnyViewHandle { fn from(handle: &ViewHandle) -> Self { - handle - .ref_counts - .lock() - .inc_view(handle.window_id, handle.view_id); - AnyViewHandle { - window_id: handle.window_id, - view_id: handle.view_id, - view_type: TypeId::of::(), - ref_counts: handle.ref_counts.clone(), - } + Self::new( + handle.window_id, + handle.view_id, + TypeId::of::(), + any::type_name::(), + handle.ref_counts.clone(), + ) } } @@ -3447,7 +3484,10 @@ impl From> for AnyViewHandle { window_id: handle.window_id, view_id: handle.view_id, view_type: TypeId::of::(), + type_name: any::type_name::(), ref_counts: handle.ref_counts.clone(), + #[cfg(feature = "test-support")] + handle_id: handle.handle_id, }; unsafe { Arc::decrement_strong_count(&handle.ref_counts); @@ -3462,6 +3502,12 @@ impl Drop for AnyViewHandle { self.ref_counts .lock() .dec_view(self.window_id, self.view_id); + #[cfg(feature = "test-support")] + self.ref_counts + .lock() + .leak_detector + .lock() + .handle_dropped(self.view_id, self.handle_id); } } From 83a34022353fcaac5e53071e24b1d6e4da00bc74 Mon Sep 17 00:00:00 2001 From: Antonio Scandurra Date: Tue, 1 Mar 2022 17:01:52 +0100 Subject: [PATCH 19/45] Make `TestAppContext` and its dependencies available only in tests Co-Authored-By: Nathan Sobo --- crates/gpui/Cargo.toml | 5 +- crates/gpui/src/app.rs | 130 +++++++++++++++----------- crates/gpui/src/executor.rs | 95 ++++++++++++------- crates/gpui/src/platform/test.rs | 1 + crates/gpui/src/test.rs | 5 +- crates/gpui/src/util.rs | 15 +-- crates/gpui_macros/src/gpui_macros.rs | 8 +- 7 files changed, 161 insertions(+), 98 deletions(-) diff --git a/crates/gpui/Cargo.toml b/crates/gpui/Cargo.toml index 7a0e356d2a908dc47f45ea01c137a674ca11f09f..95769cd470c3214ac191a379757ba93ceb51c91c 100644 --- a/crates/gpui/Cargo.toml +++ b/crates/gpui/Cargo.toml @@ -8,14 +8,14 @@ version = "0.1.0" path = "src/gpui.rs" [features] -test-support = ["env_logger", "collections/test-support"] +test-support = ["backtrace", "env_logger", "collections/test-support"] [dependencies] collections = { path = "../collections" } gpui_macros = { path = "../gpui_macros" } sum_tree = { path = "../sum_tree" } async-task = "4.0.3" -backtrace = "0.3" +backtrace = { version = "0.3", optional = true } ctor = "0.1" dhat = "0.3" env_logger = { version = "0.8", optional = true } @@ -49,6 +49,7 @@ bindgen = "0.58.1" cc = "1.0.67" [dev-dependencies] +backtrace = "0.3" collections = { path = "../collections", features = ["test-support"] } env_logger = "0.8" png = "0.16" diff --git a/crates/gpui/src/app.rs b/crates/gpui/src/app.rs index 3aa628e253db2f00e38e138596e26c80d155b865..ffe198a503bba8b41abdd3ae936bf8e50698a338 100644 --- a/crates/gpui/src/app.rs +++ b/crates/gpui/src/app.rs @@ -4,16 +4,15 @@ use crate::{ keymap::{self, Keystroke}, platform::{self, CursorStyle, Platform, PromptLevel, WindowOptions}, presenter::Presenter, - util::{post_inc, timeout, CwdBacktrace}, + util::post_inc, AssetCache, AssetSource, ClipboardItem, FontCache, PathPromptOptions, TextLayoutCache, }; use anyhow::{anyhow, Result}; -use backtrace::Backtrace; use keymap::MatchResult; use lazy_static::lazy_static; use parking_lot::Mutex; use platform::Event; -use postage::{mpsc, oneshot, sink::Sink as _, stream::Stream as _}; +use postage::oneshot; use smol::prelude::*; use std::{ any::{self, type_name, Any, TypeId}, @@ -237,6 +236,7 @@ pub struct App(Rc>); #[derive(Clone)] pub struct AsyncAppContext(Rc>); +#[cfg(any(test, feature = "test-support"))] pub struct TestAppContext { cx: Rc>, foreground_platform: Rc, @@ -384,6 +384,7 @@ impl App { } } +#[cfg(any(test, feature = "test-support"))] impl TestAppContext { pub fn new( foreground_platform: Rc, @@ -401,7 +402,7 @@ impl TestAppContext { foreground_platform.clone(), font_cache, RefCounts { - #[cfg(feature = "test-support")] + #[cfg(any(test, feature = "test-support"))] leak_detector, ..Default::default() }, @@ -544,6 +545,8 @@ impl TestAppContext { } pub fn simulate_prompt_answer(&self, window_id: usize, answer: usize) { + use postage::prelude::Sink as _; + let mut state = self.cx.borrow_mut(); let (_, window) = state .presenters_and_platform_windows @@ -560,18 +563,12 @@ impl TestAppContext { let _ = done_tx.try_send(answer); } - #[cfg(feature = "test-support")] + #[cfg(any(test, feature = "test-support"))] pub fn leak_detector(&self) -> Arc> { self.cx.borrow().leak_detector() } } -impl Drop for TestAppContext { - fn drop(&mut self) { - self.cx.borrow_mut().remove_all_windows(); - } -} - impl AsyncAppContext { pub fn spawn(&self, f: F) -> Task where @@ -684,6 +681,7 @@ impl ReadViewWith for AsyncAppContext { } } +#[cfg(any(test, feature = "test-support"))] impl UpdateModel for TestAppContext { fn update_model( &mut self, @@ -694,6 +692,7 @@ impl UpdateModel for TestAppContext { } } +#[cfg(any(test, feature = "test-support"))] impl ReadModelWith for TestAppContext { fn read_model_with( &self, @@ -706,6 +705,7 @@ impl ReadModelWith for TestAppContext { } } +#[cfg(any(test, feature = "test-support"))] impl UpdateView for TestAppContext { fn update_view( &mut self, @@ -719,6 +719,7 @@ impl UpdateView for TestAppContext { } } +#[cfg(any(test, feature = "test-support"))] impl ReadViewWith for TestAppContext { fn read_view_with( &self, @@ -848,7 +849,7 @@ impl MutableAppContext { } } - fn remove_all_windows(&mut self) { + pub fn remove_all_windows(&mut self) { for (window_id, _) in self.cx.windows.drain() { self.presenters_and_platform_windows.remove(&window_id); } @@ -1827,7 +1828,7 @@ impl MutableAppContext { self.cx.platform.read_from_clipboard() } - #[cfg(feature = "test-support")] + #[cfg(any(test, feature = "test-support"))] pub fn leak_detector(&self) -> Arc> { self.cx.ref_counts.lock().leak_detector.clone() } @@ -2841,7 +2842,7 @@ pub struct ModelHandle { model_type: PhantomData, ref_counts: Arc>, - #[cfg(feature = "test-support")] + #[cfg(any(test, feature = "test-support"))] handle_id: usize, } @@ -2849,7 +2850,7 @@ impl ModelHandle { fn new(model_id: usize, ref_counts: &Arc>) -> Self { ref_counts.lock().inc_model(model_id); - #[cfg(feature = "test-support")] + #[cfg(any(test, feature = "test-support"))] let handle_id = ref_counts .lock() .leak_detector @@ -2861,7 +2862,7 @@ impl ModelHandle { model_type: PhantomData, ref_counts: ref_counts.clone(), - #[cfg(feature = "test-support")] + #[cfg(any(test, feature = "test-support"))] handle_id, } } @@ -2902,8 +2903,11 @@ impl ModelHandle { }) } + #[cfg(any(test, feature = "test-support"))] pub fn next_notification(&self, cx: &TestAppContext) -> impl Future { - let (mut tx, mut rx) = mpsc::channel(1); + use postage::prelude::{Sink as _, Stream as _}; + + let (mut tx, mut rx) = postage::mpsc::channel(1); let mut cx = cx.cx.borrow_mut(); let subscription = cx.observe(self, move |_, _| { tx.try_send(()).ok(); @@ -2916,7 +2920,7 @@ impl ModelHandle { }; async move { - let notification = timeout(duration, rx.recv()) + let notification = crate::util::timeout(duration, rx.recv()) .await .expect("next notification timed out"); drop(subscription); @@ -2924,11 +2928,14 @@ impl ModelHandle { } } + #[cfg(any(test, feature = "test-support"))] pub fn next_event(&self, cx: &TestAppContext) -> impl Future where T::Event: Clone, { - let (mut tx, mut rx) = mpsc::channel(1); + use postage::prelude::{Sink as _, Stream as _}; + + let (mut tx, mut rx) = postage::mpsc::channel(1); let mut cx = cx.cx.borrow_mut(); let subscription = cx.subscribe(self, move |_, event, _| { tx.blocking_send(event.clone()).ok(); @@ -2941,7 +2948,7 @@ impl ModelHandle { }; async move { - let event = timeout(duration, rx.recv()) + let event = crate::util::timeout(duration, rx.recv()) .await .expect("next event timed out"); drop(subscription); @@ -2949,12 +2956,15 @@ impl ModelHandle { } } + #[cfg(any(test, feature = "test-support"))] pub fn condition( &self, cx: &TestAppContext, mut predicate: impl FnMut(&T, &AppContext) -> bool, ) -> impl Future { - let (tx, mut rx) = mpsc::channel(1024); + use postage::prelude::{Sink as _, Stream as _}; + + let (tx, mut rx) = postage::mpsc::channel(1024); let mut cx = cx.cx.borrow_mut(); let subscriptions = ( @@ -2981,7 +2991,7 @@ impl ModelHandle { }; async move { - timeout(duration, async move { + crate::util::timeout(duration, async move { loop { { let cx = cx.borrow(); @@ -3059,7 +3069,7 @@ impl Drop for ModelHandle { let mut ref_counts = self.ref_counts.lock(); ref_counts.dec_model(self.model_id); - #[cfg(feature = "test-support")] + #[cfg(any(test, feature = "test-support"))] ref_counts .leak_detector .lock() @@ -3149,14 +3159,14 @@ pub struct ViewHandle { view_id: usize, view_type: PhantomData, ref_counts: Arc>, - #[cfg(feature = "test-support")] + #[cfg(any(test, feature = "test-support"))] handle_id: usize, } impl ViewHandle { fn new(window_id: usize, view_id: usize, ref_counts: &Arc>) -> Self { ref_counts.lock().inc_view(window_id, view_id); - #[cfg(feature = "test-support")] + #[cfg(any(test, feature = "test-support"))] let handle_id = ref_counts .lock() .leak_detector @@ -3169,7 +3179,7 @@ impl ViewHandle { view_type: PhantomData, ref_counts: ref_counts.clone(), - #[cfg(feature = "test-support")] + #[cfg(any(test, feature = "test-support"))] handle_id, } } @@ -3230,8 +3240,11 @@ impl ViewHandle { .map_or(false, |focused_id| focused_id == self.view_id) } + #[cfg(any(test, feature = "test-support"))] pub fn next_notification(&self, cx: &TestAppContext) -> impl Future { - let (mut tx, mut rx) = mpsc::channel(1); + use postage::prelude::{Sink as _, Stream as _}; + + let (mut tx, mut rx) = postage::mpsc::channel(1); let mut cx = cx.cx.borrow_mut(); let subscription = cx.observe(self, move |_, _| { tx.try_send(()).ok(); @@ -3244,7 +3257,7 @@ impl ViewHandle { }; async move { - let notification = timeout(duration, rx.recv()) + let notification = crate::util::timeout(duration, rx.recv()) .await .expect("next notification timed out"); drop(subscription); @@ -3252,12 +3265,15 @@ impl ViewHandle { } } + #[cfg(any(test, feature = "test-support"))] pub fn condition( &self, cx: &TestAppContext, mut predicate: impl FnMut(&T, &AppContext) -> bool, ) -> impl Future { - let (tx, mut rx) = mpsc::channel(1024); + use postage::prelude::{Sink as _, Stream as _}; + + let (tx, mut rx) = postage::mpsc::channel(1024); let mut cx = cx.cx.borrow_mut(); let subscriptions = self.update(&mut *cx, |_, cx| { @@ -3286,7 +3302,7 @@ impl ViewHandle { }; async move { - timeout(duration, async move { + crate::util::timeout(duration, async move { loop { { let cx = cx.borrow(); @@ -3344,7 +3360,7 @@ impl Drop for ViewHandle { self.ref_counts .lock() .dec_view(self.window_id, self.view_id); - #[cfg(feature = "test-support")] + #[cfg(any(test, feature = "test-support"))] self.ref_counts .lock() .leak_detector @@ -3382,7 +3398,7 @@ pub struct AnyViewHandle { view_type: TypeId, type_name: &'static str, ref_counts: Arc>, - #[cfg(feature = "test-support")] + #[cfg(any(test, feature = "test-support"))] handle_id: usize, } @@ -3396,7 +3412,7 @@ impl AnyViewHandle { ) -> Self { ref_counts.lock().inc_view(window_id, view_id); - #[cfg(feature = "test-support")] + #[cfg(any(test, feature = "test-support"))] let handle_id = ref_counts .lock() .leak_detector @@ -3409,7 +3425,7 @@ impl AnyViewHandle { view_type, type_name, ref_counts, - #[cfg(feature = "test-support")] + #[cfg(any(test, feature = "test-support"))] handle_id, } } @@ -3434,7 +3450,7 @@ impl AnyViewHandle { view_id: self.view_id, ref_counts: self.ref_counts.clone(), view_type: PhantomData, - #[cfg(feature = "test-support")] + #[cfg(any(test, feature = "test-support"))] handle_id: self.handle_id, }); unsafe { @@ -3486,7 +3502,7 @@ impl From> for AnyViewHandle { view_type: TypeId::of::(), type_name: any::type_name::(), ref_counts: handle.ref_counts.clone(), - #[cfg(feature = "test-support")] + #[cfg(any(test, feature = "test-support"))] handle_id: handle.handle_id, }; unsafe { @@ -3502,7 +3518,7 @@ impl Drop for AnyViewHandle { self.ref_counts .lock() .dec_view(self.window_id, self.view_id); - #[cfg(feature = "test-support")] + #[cfg(any(test, feature = "test-support"))] self.ref_counts .lock() .leak_detector @@ -3516,7 +3532,7 @@ pub struct AnyModelHandle { model_type: TypeId, ref_counts: Arc>, - #[cfg(feature = "test-support")] + #[cfg(any(test, feature = "test-support"))] handle_id: usize, } @@ -3524,7 +3540,7 @@ impl AnyModelHandle { fn new(model_id: usize, model_type: TypeId, ref_counts: Arc>) -> Self { ref_counts.lock().inc_model(model_id); - #[cfg(feature = "test-support")] + #[cfg(any(test, feature = "test-support"))] let handle_id = ref_counts .lock() .leak_detector @@ -3536,7 +3552,7 @@ impl AnyModelHandle { model_type, ref_counts, - #[cfg(feature = "test-support")] + #[cfg(any(test, feature = "test-support"))] handle_id, } } @@ -3548,7 +3564,7 @@ impl AnyModelHandle { model_type: PhantomData, ref_counts: self.ref_counts.clone(), - #[cfg(feature = "test-support")] + #[cfg(any(test, feature = "test-support"))] handle_id: self.handle_id, }); unsafe { @@ -3594,7 +3610,7 @@ impl Drop for AnyModelHandle { let mut ref_counts = self.ref_counts.lock(); ref_counts.dec_model(self.model_id); - #[cfg(feature = "test-support")] + #[cfg(any(test, feature = "test-support"))] ref_counts .leak_detector .lock() @@ -3819,18 +3835,26 @@ lazy_static! { std::env::var("LEAK_BACKTRACE").map_or(false, |b| !b.is_empty()); } +#[cfg(any(test, feature = "test-support"))] #[derive(Default)] pub struct LeakDetector { next_handle_id: usize, - handle_backtraces: HashMap, HashMap>)>, + handle_backtraces: HashMap< + usize, + ( + Option<&'static str>, + HashMap>, + ), + >, } +#[cfg(any(test, feature = "test-support"))] impl LeakDetector { fn handle_created(&mut self, type_name: Option<&'static str>, entity_id: usize) -> usize { let handle_id = post_inc(&mut self.next_handle_id); let entry = self.handle_backtraces.entry(entity_id).or_default(); let backtrace = if *LEAK_BACKTRACE { - Some(Backtrace::new_unresolved()) + Some(backtrace::Backtrace::new_unresolved()) } else { None }; @@ -3862,7 +3886,7 @@ impl LeakDetector { for trace in backtraces.values_mut() { if let Some(trace) = trace { trace.resolve(); - eprintln!("{:?}", CwdBacktrace(trace)); + eprintln!("{:?}", crate::util::CwdBacktrace(trace)); } } found_leaks = true; @@ -3885,7 +3909,7 @@ struct RefCounts { dropped_views: HashSet<(usize, usize)>, dropped_element_states: HashSet, - #[cfg(feature = "test-support")] + #[cfg(any(test, feature = "test-support"))] leak_detector: Arc>, } @@ -4067,13 +4091,11 @@ mod tests { let handle_1 = cx.add_model(|_| Model::default()); let handle_2 = cx.add_model(|_| Model::default()); - let handle_2b = handle_2.clone(); - handle_1.update(cx, |_, c| { - c.subscribe(&handle_2, move |model: &mut Model, _, event, c| { + c.subscribe(&handle_2, move |model: &mut Model, emitter, event, c| { model.events.push(*event); - c.subscribe(&handle_2b, |model, _, event, _| { + c.subscribe(&emitter, |model, _, event, _| { model.events.push(*event * 2); }) .detach(); @@ -4102,12 +4124,11 @@ mod tests { let handle_1 = cx.add_model(|_| Model::default()); let handle_2 = cx.add_model(|_| Model::default()); - let handle_2b = handle_2.clone(); handle_1.update(cx, |_, c| { c.observe(&handle_2, move |model, observed, c| { model.events.push(observed.read(c).count); - c.observe(&handle_2b, |model, observed, c| { + c.observe(&observed, |model, observed, c| { model.events.push(observed.read(c).count * 2); }) .detach(); @@ -4341,14 +4362,13 @@ mod tests { let (window_id, handle_1) = cx.add_window(Default::default(), |_| View::default()); let handle_2 = cx.add_view(window_id, |_| View::default()); - let handle_2b = handle_2.clone(); let handle_3 = cx.add_model(|_| Model); handle_1.update(cx, |_, c| { - c.subscribe(&handle_2, move |me, _, event, c| { + c.subscribe(&handle_2, move |me, emitter, event, c| { me.events.push(*event); - c.subscribe(&handle_2b, |me, _, event, _| { + c.subscribe(&emitter, |me, _, event, _| { me.events.push(*event * 2); }) .detach(); diff --git a/crates/gpui/src/executor.rs b/crates/gpui/src/executor.rs index bfe3eff3d4a6eca93b5671749dc04fe7b02c947f..e73f403dc39478ce03522f8be1cafe76e4263742 100644 --- a/crates/gpui/src/executor.rs +++ b/crates/gpui/src/executor.rs @@ -1,28 +1,18 @@ use anyhow::{anyhow, Result}; use async_task::Runnable; -use backtrace::Backtrace; -use collections::HashMap; -use parking_lot::Mutex; -use postage::{barrier, prelude::Stream as _}; -use rand::prelude::*; -use smol::{channel, future::yield_now, prelude::*, Executor, Timer}; +use smol::{channel, prelude::*, Executor, Timer}; use std::{ any::Any, fmt::{self, Display}, marker::PhantomData, mem, - ops::RangeInclusive, pin::Pin, rc::Rc, - sync::{ - atomic::{AtomicBool, Ordering::SeqCst}, - Arc, - }, + sync::Arc, task::{Context, Poll}, thread, - time::{Duration, Instant}, + time::Duration, }; -use waker_fn::waker_fn; use crate::{ platform::{self, Dispatcher}, @@ -34,6 +24,7 @@ pub enum Foreground { dispatcher: Arc, _not_send_or_sync: PhantomData>, }, + #[cfg(any(test, feature = "test-support"))] Deterministic { cx_id: usize, executor: Arc, @@ -41,9 +32,8 @@ pub enum Foreground { } pub enum Background { - Deterministic { - executor: Arc, - }, + #[cfg(any(test, feature = "test-support"))] + Deterministic { executor: Arc }, Production { executor: Arc>, _stop: channel::Sender<()>, @@ -70,39 +60,45 @@ pub enum Task { unsafe impl Send for Task {} +#[cfg(any(test, feature = "test-support"))] struct DeterministicState { - rng: StdRng, + rng: rand::prelude::StdRng, seed: u64, - scheduled_from_foreground: HashMap>, + scheduled_from_foreground: collections::HashMap>, scheduled_from_background: Vec, forbid_parking: bool, - block_on_ticks: RangeInclusive, - now: Instant, - pending_timers: Vec<(Instant, barrier::Sender)>, - waiting_backtrace: Option, + block_on_ticks: std::ops::RangeInclusive, + now: std::time::Instant, + pending_timers: Vec<(std::time::Instant, postage::barrier::Sender)>, + waiting_backtrace: Option, } +#[cfg(any(test, feature = "test-support"))] struct ForegroundRunnable { runnable: Runnable, main: bool, } +#[cfg(any(test, feature = "test-support"))] pub struct Deterministic { - state: Arc>, - parker: Mutex, + state: Arc>, + parker: parking_lot::Mutex, } +#[cfg(any(test, feature = "test-support"))] impl Deterministic { pub fn new(seed: u64) -> Arc { + use rand::prelude::*; + Arc::new(Self { - state: Arc::new(Mutex::new(DeterministicState { + state: Arc::new(parking_lot::Mutex::new(DeterministicState { rng: StdRng::seed_from_u64(seed), seed, scheduled_from_foreground: Default::default(), scheduled_from_background: Default::default(), forbid_parking: false, block_on_ticks: 0..=1000, - now: Instant::now(), + now: std::time::Instant::now(), pending_timers: Default::default(), waiting_backtrace: None, })), @@ -161,6 +157,8 @@ impl Deterministic { cx_id: usize, main_future: Pin>>>, ) -> Box { + use std::sync::atomic::{AtomicBool, Ordering::SeqCst}; + let woken = Arc::new(AtomicBool::new(false)); let state = self.state.clone(); @@ -195,18 +193,22 @@ impl Deterministic { } } - pub(crate) fn run_until_parked(&self) { + pub fn run_until_parked(&self) { + use std::sync::atomic::AtomicBool; let woken = Arc::new(AtomicBool::new(false)); self.run_internal(woken, None); } fn run_internal( &self, - woken: Arc, + woken: Arc, mut main_task: Option<&mut AnyLocalTask>, ) -> Option> { + use rand::prelude::*; + use std::sync::atomic::Ordering::SeqCst; + let unparker = self.parker.lock().unparker(); - let waker = waker_fn(move || { + let waker = waker_fn::waker_fn(move || { woken.store(true, SeqCst); unparker.unpark(); }); @@ -261,8 +263,10 @@ impl Deterministic { where F: Unpin + Future, { + use rand::prelude::*; + let unparker = self.parker.lock().unparker(); - let waker = waker_fn(move || { + let waker = waker_fn::waker_fn(move || { unparker.unpark(); }); @@ -295,10 +299,12 @@ impl Deterministic { } } +#[cfg(any(test, feature = "test-support"))] impl DeterministicState { fn will_park(&mut self) { if self.forbid_parking { let mut backtrace_message = String::new(); + #[cfg(any(test, feature = "test-support"))] if let Some(backtrace) = self.waiting_backtrace.as_mut() { backtrace.resolve(); backtrace_message = format!( @@ -330,6 +336,7 @@ impl Foreground { pub fn spawn(&self, future: impl Future + 'static) -> Task { let future = any_local_future(future); let any_task = match self { + #[cfg(any(test, feature = "test-support"))] Self::Deterministic { cx_id, executor } => { executor.spawn_from_foreground(*cx_id, future, false) } @@ -351,6 +358,7 @@ impl Foreground { Task::local(any_task) } + #[cfg(any(test, feature = "test-support"))] pub fn run(&self, future: impl Future) -> T { let future = async move { Box::new(future.await) as Box }.boxed_local(); let result = match self { @@ -360,6 +368,7 @@ impl Foreground { *result.downcast().unwrap() } + #[cfg(any(test, feature = "test-support"))] pub fn run_until_parked(&self) { match self { Self::Deterministic { executor, .. } => executor.run_until_parked(), @@ -367,6 +376,7 @@ impl Foreground { } } + #[cfg(any(test, feature = "test-support"))] pub fn parking_forbidden(&self) -> bool { match self { Self::Deterministic { executor, .. } => executor.state.lock().forbid_parking, @@ -374,15 +384,18 @@ impl Foreground { } } + #[cfg(any(test, feature = "test-support"))] pub fn start_waiting(&self) { match self { Self::Deterministic { executor, .. } => { - executor.state.lock().waiting_backtrace = Some(Backtrace::new_unresolved()); + executor.state.lock().waiting_backtrace = + Some(backtrace::Backtrace::new_unresolved()); } _ => panic!("this method can only be called on a deterministic executor"), } } + #[cfg(any(test, feature = "test-support"))] pub fn finish_waiting(&self) { match self { Self::Deterministic { executor, .. } => { @@ -392,7 +405,10 @@ impl Foreground { } } + #[cfg(any(test, feature = "test-support"))] pub fn forbid_parking(&self) { + use rand::prelude::*; + match self { Self::Deterministic { executor, .. } => { let mut state = executor.state.lock(); @@ -405,8 +421,11 @@ impl Foreground { pub async fn timer(&self, duration: Duration) { match self { + #[cfg(any(test, feature = "test-support"))] Self::Deterministic { executor, .. } => { - let (tx, mut rx) = barrier::channel(); + use postage::prelude::Stream as _; + + let (tx, mut rx) = postage::barrier::channel(); { let mut state = executor.state.lock(); let wakeup_at = state.now + duration; @@ -420,6 +439,7 @@ impl Foreground { } } + #[cfg(any(test, feature = "test-support"))] pub fn advance_clock(&self, duration: Duration) { match self { Self::Deterministic { executor, .. } => { @@ -438,7 +458,8 @@ impl Foreground { } } - pub fn set_block_on_ticks(&self, range: RangeInclusive) { + #[cfg(any(test, feature = "test-support"))] + pub fn set_block_on_ticks(&self, range: std::ops::RangeInclusive) { match self { Self::Deterministic { executor, .. } => executor.state.lock().block_on_ticks = range, _ => panic!("this method can only be called on a deterministic executor"), @@ -478,6 +499,7 @@ impl Background { let future = any_future(future); let any_task = match self { Self::Production { executor, .. } => executor.spawn(future), + #[cfg(any(test, feature = "test-support"))] Self::Deterministic { executor } => executor.spawn(future), }; Task::send(any_task) @@ -490,6 +512,7 @@ impl Background { smol::pin!(future); match self { Self::Production { .. } => smol::block_on(&mut future), + #[cfg(any(test, feature = "test-support"))] Self::Deterministic { executor, .. } => { executor.block(&mut future, usize::MAX).unwrap() } @@ -509,7 +532,9 @@ impl Background { if !timeout.is_zero() { let output = match self { Self::Production { .. } => smol::block_on(util::timeout(timeout, &mut future)).ok(), + #[cfg(any(test, feature = "test-support"))] Self::Deterministic { executor, .. } => { + use rand::prelude::*; let max_ticks = { let mut state = executor.state.lock(); let range = state.block_on_ticks.clone(); @@ -544,7 +569,11 @@ impl Background { } } + #[cfg(any(test, feature = "test-support"))] pub async fn simulate_random_delay(&self) { + use rand::prelude::*; + use smol::future::yield_now; + match self { Self::Deterministic { executor, .. } => { if executor.state.lock().rng.gen_bool(0.2) { diff --git a/crates/gpui/src/platform/test.rs b/crates/gpui/src/platform/test.rs index 3e3ec60acb10fc4d67b66ba5f6064b8e67c9f21f..706439a955d6e96cd3d01c8bfb44a634786780df 100644 --- a/crates/gpui/src/platform/test.rs +++ b/crates/gpui/src/platform/test.rs @@ -39,6 +39,7 @@ pub struct Window { pub(crate) last_prompt: Cell>>, } +#[cfg(any(test, feature = "test-support"))] impl ForegroundPlatform { pub(crate) fn simulate_new_path_selection( &self, diff --git a/crates/gpui/src/test.rs b/crates/gpui/src/test.rs index 788084067dce985c8c265868a37f7c8506848890..c8a69c4e7d7b0ee46caac66b041e4fa769ef3e5a 100644 --- a/crates/gpui/src/test.rs +++ b/crates/gpui/src/test.rs @@ -86,10 +86,13 @@ pub fn run_test( deterministic.clone(), seed, is_last_iteration, - ) + ); }); + cx.update(|cx| cx.remove_all_windows()); deterministic.run_until_parked(); + cx.update(|_| {}); // flush effects + leak_detector.lock().detect(); if is_last_iteration { break; diff --git a/crates/gpui/src/util.rs b/crates/gpui/src/util.rs index 9677be56bec9abd84224415f0eb9e34b2b5b8954..9e59c387e8c2beee95c5b3692d486b89e68bd771 100644 --- a/crates/gpui/src/util.rs +++ b/crates/gpui/src/util.rs @@ -1,6 +1,5 @@ -use backtrace::{Backtrace, BacktraceFmt, BytesOrWideString}; use smol::future::FutureExt; -use std::{fmt, future::Future, time::Duration}; +use std::{future::Future, time::Duration}; pub fn post_inc(value: &mut usize) -> usize { let prev = *value; @@ -20,14 +19,18 @@ where timer.race(future).await } -pub struct CwdBacktrace<'a>(pub &'a Backtrace); +#[cfg(any(test, feature = "test-support"))] +pub struct CwdBacktrace<'a>(pub &'a backtrace::Backtrace); +#[cfg(any(test, feature = "test-support"))] impl<'a> std::fmt::Debug for CwdBacktrace<'a> { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> std::fmt::Result { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + use backtrace::{BacktraceFmt, BytesOrWideString}; + let cwd = std::env::current_dir().unwrap(); let cwd = cwd.parent().unwrap(); - let mut print_path = |fmt: &mut fmt::Formatter<'_>, path: BytesOrWideString<'_>| { - fmt::Display::fmt(&path, fmt) + let mut print_path = |fmt: &mut std::fmt::Formatter<'_>, path: BytesOrWideString<'_>| { + std::fmt::Display::fmt(&path, fmt) }; let mut fmt = BacktraceFmt::new(f, backtrace::PrintFmt::Full, &mut print_path); for frame in self.0.frames() { diff --git a/crates/gpui_macros/src/gpui_macros.rs b/crates/gpui_macros/src/gpui_macros.rs index 83bb33b756e4aa7ed1536f7a53f9e4e59a0c4339..c107175dca71fcec76e32a08dd8ff4e6579dadaf 100644 --- a/crates/gpui_macros/src/gpui_macros.rs +++ b/crates/gpui_macros/src/gpui_macros.rs @@ -66,6 +66,7 @@ pub fn test(args: TokenStream, function: TokenStream) -> TokenStream { // Pass to the test function the number of app contexts that it needs, // based on its parameter list. let mut cx_vars = proc_macro2::TokenStream::new(); + let mut cx_teardowns = proc_macro2::TokenStream::new(); let mut inner_fn_args = proc_macro2::TokenStream::new(); for (ix, arg) in inner_fn.sig.inputs.iter().enumerate() { if let FnArg::Typed(arg) = arg { @@ -104,6 +105,11 @@ pub fn test(args: TokenStream, function: TokenStream) -> TokenStream { #first_entity_id, ); )); + cx_teardowns.extend(quote!( + #cx_varname.update(|cx| cx.remove_all_windows()); + deterministic.run_until_parked(); + #cx_varname.update(|_| {}); // flush effects + )); inner_fn_args.extend(quote!(&mut #cx_varname,)); } _ => { @@ -145,7 +151,7 @@ pub fn test(args: TokenStream, function: TokenStream) -> TokenStream { &mut |cx, foreground_platform, deterministic, seed, is_last_iteration| { #cx_vars cx.foreground().run(#inner_fn_name(#inner_fn_args)); - cx.foreground().run_until_parked(); + #cx_teardowns } ); } From 0bb7189842aa8926e84fb3a166a5c6fa9f36e272 Mon Sep 17 00:00:00 2001 From: Antonio Scandurra Date: Tue, 1 Mar 2022 17:08:10 +0100 Subject: [PATCH 20/45] Use a weak handle to poll local worktree snapshot Co-Authored-By: Nathan Sobo --- crates/project/src/worktree.rs | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/crates/project/src/worktree.rs b/crates/project/src/worktree.rs index 3e2ad5ce20991316feda3006eb168b228803c7c4..33a564585acfaa71fbe154d1c0d33548c5234035 100644 --- a/crates/project/src/worktree.rs +++ b/crates/project/src/worktree.rs @@ -384,16 +384,18 @@ impl Worktree { worktree.snapshot = worktree.background_snapshot.lock().clone(); if worktree.is_scanning() { if worktree.poll_task.is_none() { - worktree.poll_task = Some(cx.spawn(|this, mut cx| async move { + worktree.poll_task = Some(cx.spawn_weak(|this, mut cx| async move { if is_fake_fs { cx.background().simulate_random_delay().await; } else { smol::Timer::after(Duration::from_millis(100)).await; } - this.update(&mut cx, |this, cx| { - this.as_local_mut().unwrap().poll_task = None; - this.poll_snapshot(cx); - }) + if let Some(this) = this.upgrade(&cx) { + this.update(&mut cx, |this, cx| { + this.as_local_mut().unwrap().poll_task = None; + this.poll_snapshot(cx); + }); + } })); } } else { From 992328a6ee3b99827309ce4d1341e64ecf2af823 Mon Sep 17 00:00:00 2001 From: Antonio Scandurra Date: Tue, 1 Mar 2022 17:14:18 +0100 Subject: [PATCH 21/45] Forbid parking in `project_panel::tests::test_visible_list` Co-Authored-By: Nathan Sobo --- crates/project_panel/src/project_panel.rs | 2 ++ 1 file changed, 2 insertions(+) diff --git a/crates/project_panel/src/project_panel.rs b/crates/project_panel/src/project_panel.rs index 8407cc3c26d6cf9a27c247fac9f878c114d2b2a6..1317ceb7ecacd764561698d7f94dece257985311 100644 --- a/crates/project_panel/src/project_panel.rs +++ b/crates/project_panel/src/project_panel.rs @@ -590,6 +590,8 @@ mod tests { #[gpui::test] async fn test_visible_list(cx: &mut gpui::TestAppContext) { + cx.foreground().forbid_parking(); + let params = cx.update(WorkspaceParams::test); let settings = params.settings.clone(); let fs = params.fs.as_fake(); From c661ff251d54526d97af4abf2021a2a2d71da0ce Mon Sep 17 00:00:00 2001 From: Antonio Scandurra Date: Tue, 1 Mar 2022 17:14:40 +0100 Subject: [PATCH 22/45] Revert "Use `async_broadcast` to emit fake FS events" This reverts commit 4cfd345f9d91ebd8e76a668f3494ecf2e45c4b9d, because having a bounded broadcast introduces the possibility of waiting forever when there isn't yet a receiver processing those events. Co-Authored-By: Nathan Sobo --- Cargo.lock | 1 - crates/project/Cargo.toml | 1 - crates/project/src/fs.rs | 77 +++++++++++++++++---------------------- 3 files changed, 34 insertions(+), 45 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index b495bbd7c641937eed4987c48f47157431f11e73..71fe0871c4a6198a30ba1b4bb6e92093f08ef53a 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -3568,7 +3568,6 @@ version = "0.1.0" dependencies = [ "aho-corasick", "anyhow", - "async-broadcast", "async-trait", "client", "clock", diff --git a/crates/project/Cargo.toml b/crates/project/Cargo.toml index 5e58f4530a3f9f4c62d3cc865f6b68f0f9c552c7..dea5a10279a8f6e3dfe1c62d9526bbea0e797fac 100644 --- a/crates/project/Cargo.toml +++ b/crates/project/Cargo.toml @@ -28,7 +28,6 @@ sum_tree = { path = "../sum_tree" } util = { path = "../util" } aho-corasick = "0.7" anyhow = "1.0.38" -async-broadcast = "0.3.4" async-trait = "0.1" futures = "0.3" ignore = "0.4" diff --git a/crates/project/src/fs.rs b/crates/project/src/fs.rs index d436e5e9faf20d92101cfd8a8d9f015afa7f25ba..ec7925685d69173609d7344edad9521d363183ce 100644 --- a/crates/project/src/fs.rs +++ b/crates/project/src/fs.rs @@ -225,6 +225,7 @@ struct FakeFsEntry { struct FakeFsState { entries: std::collections::BTreeMap, next_inode: u64, + event_txs: Vec>>, } #[cfg(any(test, feature = "test-support"))] @@ -241,6 +242,26 @@ impl FakeFsState { Err(anyhow!("invalid path {:?}", path)) } } + + async fn emit_event(&mut self, paths: I) + where + I: IntoIterator, + T: Into, + { + let events = paths + .into_iter() + .map(|path| fsevent::Event { + event_id: 0, + flags: fsevent::StreamFlags::empty(), + path: path.into(), + }) + .collect::>(); + + self.event_txs.retain(|tx| { + let _ = tx.try_send(events.clone()); + !tx.is_closed() + }); + } } #[cfg(any(test, feature = "test-support"))] @@ -248,10 +269,6 @@ pub struct FakeFs { // Use an unfair lock to ensure tests are deterministic. state: futures::lock::Mutex, executor: std::sync::Weak, - events: ( - async_broadcast::Sender>, - async_broadcast::Receiver>, - ), } #[cfg(any(test, feature = "test-support"))] @@ -275,8 +292,8 @@ impl FakeFs { state: futures::lock::Mutex::new(FakeFsState { entries, next_inode: 1, + event_txs: Default::default(), }), - events: async_broadcast::broadcast(16), }) } @@ -299,9 +316,7 @@ impl FakeFs { content: None, }, ); - - drop(state); - self.emit_event(&[path]).await; + state.emit_event(&[path]).await; } pub async fn insert_file(&self, path: impl AsRef, content: String) { @@ -323,9 +338,7 @@ impl FakeFs { content: Some(content), }, ); - - drop(state); - self.emit_event(&[path]).await; + state.emit_event(&[path]).await; } #[must_use] @@ -370,23 +383,6 @@ impl FakeFs { .simulate_random_delay() .await; } - - async fn emit_event(&self, paths: I) - where - I: IntoIterator, - T: Into, - { - let events = paths - .into_iter() - .map(|path| fsevent::Event { - event_id: 0, - flags: fsevent::StreamFlags::empty(), - path: path.into(), - }) - .collect::>(); - - let _ = self.events.0.broadcast(events).await; - } } #[cfg(any(test, feature = "test-support"))] @@ -424,8 +420,7 @@ impl Fs for FakeFs { )); } } - drop(state); - self.emit_event(&created_dir_paths).await; + state.emit_event(&created_dir_paths).await; Ok(()) } @@ -466,8 +461,7 @@ impl Fs for FakeFs { }; state.entries.insert(path.to_path_buf(), entry); } - drop(state); - self.emit_event(&[path]).await; + state.emit_event(&[path]).await; Ok(()) } @@ -503,8 +497,7 @@ impl Fs for FakeFs { state.entries.insert(new_path, entry); } - drop(state); - self.emit_event(&[source, target]).await; + state.emit_event(&[source, target]).await; Ok(()) } @@ -529,8 +522,7 @@ impl Fs for FakeFs { } state.entries.retain(|path, _| !path.starts_with(path)); - drop(state); - self.emit_event(&[path]).await; + state.emit_event(&[path]).await; } else if !options.ignore_if_not_exists { return Err(anyhow!("{path:?} does not exist")); } @@ -548,8 +540,7 @@ impl Fs for FakeFs { } state.entries.remove(&path); - drop(state); - self.emit_event(&[path]).await; + state.emit_event(&[path]).await; } else if !options.ignore_if_not_exists { return Err(anyhow!("{path:?} does not exist")); } @@ -584,8 +575,7 @@ impl Fs for FakeFs { } else { entry.content = Some(text.chunks().collect()); entry.metadata.mtime = SystemTime::now(); - drop(state); - self.emit_event(&[path]).await; + state.emit_event(&[path]).await; Ok(()) } } else { @@ -601,8 +591,7 @@ impl Fs for FakeFs { content: Some(text.chunks().collect()), }; state.entries.insert(path.to_path_buf(), entry); - drop(state); - self.emit_event(&[path]).await; + state.emit_event(&[path]).await; Ok(()) } } @@ -653,8 +642,10 @@ impl Fs for FakeFs { path: &Path, _: Duration, ) -> Pin>>> { + let mut state = self.state.lock().await; self.simulate_random_delay().await; - let rx = self.events.1.clone(); + let (tx, rx) = smol::channel::unbounded(); + state.event_txs.push(tx); let path = path.to_path_buf(); Box::pin(futures::StreamExt::filter(rx, move |events| { let result = events.iter().any(|event| event.path.starts_with(&path)); From bc9c034baadbbbd225bb6cff81706332895adc79 Mon Sep 17 00:00:00 2001 From: Antonio Scandurra Date: Tue, 1 Mar 2022 18:50:05 +0100 Subject: [PATCH 23/45] Fix `Presenter` leak when removing windows --- crates/chat_panel/src/chat_panel.rs | 8 ++++++-- crates/gpui/src/app.rs | 2 ++ 2 files changed, 8 insertions(+), 2 deletions(-) diff --git a/crates/chat_panel/src/chat_panel.rs b/crates/chat_panel/src/chat_panel.rs index 5b3df14f330b0766365d2b2b383d5ec73aabe99a..93c51a1b40e75d6db1b8528aff6f8048509eab71 100644 --- a/crates/chat_panel/src/chat_panel.rs +++ b/crates/chat_panel/src/chat_panel.rs @@ -101,11 +101,15 @@ impl ChatPanel { cx.dispatch_action(LoadMoreMessages); } }); - let _observe_status = cx.spawn(|this, mut cx| { + let _observe_status = cx.spawn_weak(|this, mut cx| { let mut status = rpc.status(); async move { while let Some(_) = status.recv().await { - this.update(&mut cx, |_, cx| cx.notify()); + if let Some(this) = this.upgrade(&cx) { + this.update(&mut cx, |_, cx| cx.notify()); + } else { + break; + } } } }); diff --git a/crates/gpui/src/app.rs b/crates/gpui/src/app.rs index ffe198a503bba8b41abdd3ae936bf8e50698a338..017eae0d7218a03c362e15509a3ad65c6e7fe61c 100644 --- a/crates/gpui/src/app.rs +++ b/crates/gpui/src/app.rs @@ -852,6 +852,7 @@ impl MutableAppContext { pub fn remove_all_windows(&mut self) { for (window_id, _) in self.cx.windows.drain() { self.presenters_and_platform_windows.remove(&window_id); + self.debug_elements_callbacks.remove(&window_id); } self.flush_effects(); } @@ -1403,6 +1404,7 @@ impl MutableAppContext { pub fn remove_window(&mut self, window_id: usize) { self.cx.windows.remove(&window_id); self.presenters_and_platform_windows.remove(&window_id); + self.debug_elements_callbacks.remove(&window_id); self.flush_effects(); } From a25f21df393967239cf3c1e5ca0cbee2692eb972 Mon Sep 17 00:00:00 2001 From: Antonio Scandurra Date: Tue, 1 Mar 2022 19:16:58 +0100 Subject: [PATCH 24/45] Remove `debug_elements_callbacks` from `App` Co-Authored-By: Nathan Sobo Co-Authored-By: Max Brunsfeld --- crates/gpui/src/app.rs | 20 ++------------------ 1 file changed, 2 insertions(+), 18 deletions(-) diff --git a/crates/gpui/src/app.rs b/crates/gpui/src/app.rs index 017eae0d7218a03c362e15509a3ad65c6e7fe61c..3f6146d34822e4baf997aa3f2fb44ca32056b869 100644 --- a/crates/gpui/src/app.rs +++ b/crates/gpui/src/app.rs @@ -761,7 +761,6 @@ pub struct MutableAppContext { release_observations: Arc>>>, presenters_and_platform_windows: HashMap>, Box)>, - debug_elements_callbacks: HashMap crate::json::Value>>, foreground: Rc, pending_effects: VecDeque, pending_notifications: HashSet, @@ -808,7 +807,6 @@ impl MutableAppContext { observations: Default::default(), release_observations: Default::default(), presenters_and_platform_windows: HashMap::new(), - debug_elements_callbacks: HashMap::new(), foreground, pending_effects: VecDeque::new(), pending_notifications: HashSet::new(), @@ -852,7 +850,6 @@ impl MutableAppContext { pub fn remove_all_windows(&mut self) { for (window_id, _) in self.cx.windows.drain() { self.presenters_and_platform_windows.remove(&window_id); - self.debug_elements_callbacks.remove(&window_id); } self.flush_effects(); } @@ -873,18 +870,10 @@ impl MutableAppContext { &self.cx.background } - pub fn on_debug_elements(&mut self, window_id: usize, callback: F) - where - F: 'static + Fn(&AppContext) -> crate::json::Value, - { - self.debug_elements_callbacks - .insert(window_id, Box::new(callback)); - } - pub fn debug_elements(&self, window_id: usize) -> Option { - self.debug_elements_callbacks + self.presenters_and_platform_windows .get(&window_id) - .map(|debug_elements| debug_elements(&self.cx)) + .and_then(|(presenter, _)| presenter.borrow().debug_elements(self)) } pub fn add_action(&mut self, handler: F) @@ -1404,7 +1393,6 @@ impl MutableAppContext { pub fn remove_window(&mut self, window_id: usize) { self.cx.windows.remove(&window_id); self.presenters_and_platform_windows.remove(&window_id); - self.debug_elements_callbacks.remove(&window_id); self.flush_effects(); } @@ -1456,10 +1444,6 @@ impl MutableAppContext { self.presenters_and_platform_windows .insert(window_id, (presenter.clone(), window)); - - self.on_debug_elements(window_id, move |cx| { - presenter.borrow().debug_elements(cx).unwrap() - }); } pub fn build_presenter(&mut self, window_id: usize, titlebar_height: f32) -> Presenter { From efe7f611283c525a5bebdc0f43a21c241a028b41 Mon Sep 17 00:00:00 2001 From: Antonio Scandurra Date: Tue, 1 Mar 2022 19:17:38 +0100 Subject: [PATCH 25/45] Use `simulate_random_delay` when polling snapshot only in tests Co-Authored-By: Nathan Sobo Co-Authored-By: Max Brunsfeld --- crates/project/src/worktree.rs | 1 + 1 file changed, 1 insertion(+) diff --git a/crates/project/src/worktree.rs b/crates/project/src/worktree.rs index 33a564585acfaa71fbe154d1c0d33548c5234035..59498a3d75b33f0b4e4025f3b2daaa3716828b94 100644 --- a/crates/project/src/worktree.rs +++ b/crates/project/src/worktree.rs @@ -386,6 +386,7 @@ impl Worktree { if worktree.poll_task.is_none() { worktree.poll_task = Some(cx.spawn_weak(|this, mut cx| async move { if is_fake_fs { + #[cfg(any(test, feature = "test-support"))] cx.background().simulate_random_delay().await; } else { smol::Timer::after(Duration::from_millis(100)).await; From 2b64e8d4a2d858e2d34aa9e322f98cc4af72d39e Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Tue, 1 Mar 2022 10:50:58 -0800 Subject: [PATCH 26/45] Fix failure in test_unshare_project due to dropping handle outside of an update block Co-Authored-By: Antonio Scandurra Co-Authored-By: Nathan Sobo --- crates/server/src/rpc.rs | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/crates/server/src/rpc.rs b/crates/server/src/rpc.rs index 67ca86c154deedf175170326be5bc6268679da00..a3cabe3f99711d1baa9e747d71eaa2e827c07627 100644 --- a/crates/server/src/rpc.rs +++ b/crates/server/src/rpc.rs @@ -1238,7 +1238,9 @@ mod tests { .condition(cx_b, |project, _| project.is_read_only()) .await; assert!(worktree_a.read_with(cx_a, |tree, _| !tree.as_local().unwrap().is_shared())); - drop(project_b); + cx_b.update(|_| { + drop(project_b); + }); // Share the project again and ensure guests can still join. project_a @@ -1247,7 +1249,7 @@ mod tests { .unwrap(); assert!(worktree_a.read_with(cx_a, |tree, _| tree.as_local().unwrap().is_shared())); - let project_c = Project::remote( + let project_b2 = Project::remote( project_id, client_b.clone(), client_b.user_store.clone(), @@ -1257,7 +1259,7 @@ mod tests { ) .await .unwrap(); - project_c + project_b2 .update(cx_b, |p, cx| p.open_buffer((worktree_id, "a.txt"), cx)) .await .unwrap(); From 43f856c56859c96bede115f98276b29d1b66183e Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Tue, 1 Mar 2022 12:09:35 -0800 Subject: [PATCH 27/45] Make integration tests depend only on a few core crates, not all of zed --- Cargo.lock | 7 ++++++- crates/server/Cargo.toml | 7 ++++++- crates/server/src/rpc.rs | 39 ++++++++++++++++++++------------------- 3 files changed, 32 insertions(+), 21 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 71fe0871c4a6198a30ba1b4bb6e92093f08ef53a..4f5299198612e24287109283e8b6d8b2b02fc796 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -5953,9 +5953,11 @@ dependencies = [ "async-tungstenite", "base64 0.13.0", "clap 3.0.0-beta.2", + "client", "collections", "comrak", "ctor", + "editor", "either", "env_logger", "envy", @@ -5964,12 +5966,15 @@ dependencies = [ "handlebars", "http-auth-basic", "jwt-simple", + "language", "lazy_static", "lipsum", + "lsp", "oauth2", "oauth2-surf", "parking_lot", "postage", + "project", "rand 0.8.3", "rpc", "rust-embed", @@ -5984,7 +5989,7 @@ dependencies = [ "time 0.2.27", "toml", "util", - "zed", + "workspace", ] [[package]] diff --git a/crates/server/Cargo.toml b/crates/server/Cargo.toml index 15f218178b5cd50b87b7ce24d710976c8afa5b7b..d7984dad04062ec6854262fb21ad0ea0c21f7840 100644 --- a/crates/server/Cargo.toml +++ b/crates/server/Cargo.toml @@ -58,7 +58,12 @@ features = ["runtime-async-std-rustls", "postgres", "time", "uuid"] collections = { path = "../collections", features = ["test-support"] } gpui = { path = "../gpui", features = ["test-support"] } rpc = { path = "../rpc", features = ["test-support"] } -zed = { path = "../zed", features = ["test-support"] } +client = { path = "../client", features = ["test-support"] } +editor = { path = "../editor", features = ["test-support"] } +language = { path = "../language", features = ["test-support"] } +lsp = { path = "../lsp", features = ["test-support"] } +project = { path = "../project", features = ["test-support"] } +workspace = { path = "../workspace", features = ["test-support"] } ctor = "0.1" env_logger = "0.8" util = { path = "../util" } diff --git a/crates/server/src/rpc.rs b/crates/server/src/rpc.rs index a3cabe3f99711d1baa9e747d71eaa2e827c07627..09cbc82ec81be6af3b576b6f6e507e66ff489b63 100644 --- a/crates/server/src/rpc.rs +++ b/crates/server/src/rpc.rs @@ -989,10 +989,28 @@ mod tests { github, AppState, Config, }; use ::rpc::Peer; + use client::{ + self, test::FakeHttpClient, Channel, ChannelDetails, ChannelList, Client, Credentials, + EstablishConnectionError, UserStore, + }; use collections::BTreeMap; + use editor::{ + self, ConfirmCodeAction, ConfirmCompletion, ConfirmRename, Editor, Input, MultiBuffer, + Redo, Rename, ToOffset, ToggleCodeActions, Undo, + }; use gpui::{executor, ModelHandle, TestAppContext}; + use language::{ + tree_sitter_rust, AnchorRangeExt, Diagnostic, DiagnosticEntry, Language, LanguageConfig, + LanguageRegistry, LanguageServerConfig, Point, ToLspPosition, + }; + use lsp; use parking_lot::Mutex; use postage::{sink::Sink, watch}; + use project::{ + fs::{FakeFs, Fs as _}, + search::SearchQuery, + DiagnosticSummary, Project, ProjectPath, + }; use rand::prelude::*; use rpc::PeerId; use serde_json::json; @@ -1009,24 +1027,7 @@ mod tests { }, time::Duration, }; - use zed::{ - client::{ - self, test::FakeHttpClient, Channel, ChannelDetails, ChannelList, Client, Credentials, - EstablishConnectionError, UserStore, - }, - editor::{ - self, ConfirmCodeAction, ConfirmCompletion, ConfirmRename, Editor, Input, MultiBuffer, - Redo, Rename, ToOffset, ToggleCodeActions, Undo, - }, - fs::{FakeFs, Fs as _}, - language::{ - tree_sitter_rust, AnchorRangeExt, Diagnostic, DiagnosticEntry, Language, - LanguageConfig, LanguageRegistry, LanguageServerConfig, Point, ToLspPosition, - }, - lsp, - project::{search::SearchQuery, DiagnosticSummary, Project, ProjectPath}, - workspace::{Settings, Workspace, WorkspaceParams}, - }; + use workspace::{Settings, Workspace, WorkspaceParams}; #[cfg(test)] #[ctor::ctor] @@ -4488,7 +4489,7 @@ mod tests { pub peer_id: PeerId, pub user_store: ModelHandle, project: Option>, - buffers: HashSet>, + buffers: HashSet>, } impl Deref for TestClient { From 0e6686916c9dd0a1d65606d684e3d2bacbd7c910 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Tue, 1 Mar 2022 12:10:35 -0800 Subject: [PATCH 28/45] Remove stray printing in db tests --- crates/server/src/db.rs | 1 - 1 file changed, 1 deletion(-) diff --git a/crates/server/src/db.rs b/crates/server/src/db.rs index 53a9c7d25971d609c3cce3b94c55a44654082b3c..222b06e6c4347da0a2dc76ad33c97a649cd5c0a2 100644 --- a/crates/server/src/db.rs +++ b/crates/server/src/db.rs @@ -1112,7 +1112,6 @@ pub mod tests { .take(count) .cloned() .collect::>(); - dbg!(count, before_id, &messages); messages.sort_unstable_by_key(|message| message.id); Ok(messages) } From 74469a46ba312da8249b3b31711bcbdadfc1c7cd Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Tue, 1 Mar 2022 13:26:59 -0800 Subject: [PATCH 29/45] Clean up tasks properly when dropping a FakeLanguageServer * Make sure the fake's IO tasks are stopped * Ensure that the fake's stdout is closed, so that the corresponding language server's IO tasks are woken up and halted. --- crates/lsp/src/lsp.rs | 166 ++++++++++++++++++++++++++---------------- 1 file changed, 102 insertions(+), 64 deletions(-) diff --git a/crates/lsp/src/lsp.rs b/crates/lsp/src/lsp.rs index ed3502208e0b13978de2d86b4be632627240f11b..0510388381d240a6958338e5178688ecf70753db 100644 --- a/crates/lsp/src/lsp.rs +++ b/crates/lsp/src/lsp.rs @@ -476,18 +476,22 @@ impl Drop for Subscription { #[cfg(any(test, feature = "test-support"))] pub struct FakeLanguageServer { - handlers: Arc< - Mutex< - HashMap< - &'static str, - Box Vec>, - >, - >, - >, + handlers: FakeLanguageServerHandlers, outgoing_tx: futures::channel::mpsc::UnboundedSender>, incoming_rx: futures::channel::mpsc::UnboundedReceiver>, + _input_task: Task>, + _output_task: Task>, } +type FakeLanguageServerHandlers = Arc< + Mutex< + HashMap< + &'static str, + Box Vec>, + >, + >, +>; + #[cfg(any(test, feature = "test-support"))] impl LanguageServer { pub fn fake(cx: &mut gpui::MutableAppContext) -> (Arc, FakeLanguageServer) { @@ -533,59 +537,69 @@ impl FakeLanguageServer { let (incoming_tx, incoming_rx) = futures::channel::mpsc::unbounded(); let (outgoing_tx, mut outgoing_rx) = futures::channel::mpsc::unbounded(); - let this = Self { - outgoing_tx: outgoing_tx.clone(), - incoming_rx, - handlers: Default::default(), - }; + let handlers = FakeLanguageServerHandlers::default(); - // Receive incoming messages - let handlers = this.handlers.clone(); - cx.spawn(|cx| async move { - let mut buffer = Vec::new(); - let mut stdin = smol::io::BufReader::new(stdin); - while Self::receive(&mut stdin, &mut buffer).await.is_ok() { - cx.background().simulate_random_delay().await; - if let Ok(request) = serde_json::from_slice::(&buffer) { - assert_eq!(request.jsonrpc, JSON_RPC_VERSION); - - if let Some(handler) = handlers.lock().get_mut(request.method) { - let response = - handler(request.id, request.params.get().as_bytes(), cx.clone()); - log::debug!("handled lsp request. method:{}", request.method); - outgoing_tx.unbounded_send(response)?; - } else { - log::debug!("unhandled lsp request. method:{}", request.method); - outgoing_tx.unbounded_send( - serde_json::to_vec(&AnyResponse { + let input_task = cx.spawn(|cx| { + let handlers = handlers.clone(); + let outgoing_tx = outgoing_tx.clone(); + async move { + let mut buffer = Vec::new(); + let mut stdin = smol::io::BufReader::new(stdin); + while Self::receive(&mut stdin, &mut buffer).await.is_ok() { + cx.background().simulate_random_delay().await; + if let Ok(request) = serde_json::from_slice::(&buffer) { + assert_eq!(request.jsonrpc, JSON_RPC_VERSION); + + let response; + if let Some(handler) = handlers.lock().get_mut(request.method) { + response = + handler(request.id, request.params.get().as_bytes(), cx.clone()); + log::debug!("handled lsp request. method:{}", request.method); + } else { + response = serde_json::to_vec(&AnyResponse { id: request.id, error: Some(Error { message: "no handler".to_string(), }), result: None, }) - .unwrap(), - )?; + .unwrap(); + log::debug!("unhandled lsp request. method:{}", request.method); + } + outgoing_tx.unbounded_send(response)?; + } else { + incoming_tx.unbounded_send(buffer.clone())?; } - } else { - incoming_tx.unbounded_send(buffer.clone())?; } + Ok::<_, anyhow::Error>(()) } - Ok::<_, anyhow::Error>(()) - }) - .detach(); - - // Send outgoing messages - cx.background() - .spawn(async move { - let mut stdout = smol::io::BufWriter::new(stdout); - while let Some(notification) = outgoing_rx.next().await { - Self::send(&mut stdout, ¬ification).await; - } - }) - .detach(); + }); - this + let output_task = cx.background().spawn(async move { + let mut stdout = smol::io::BufWriter::new(PipeWriterCloseOnDrop(stdout)); + while let Some(message) = outgoing_rx.next().await { + stdout + .write_all(CONTENT_LEN_HEADER.as_bytes()) + .await + .unwrap(); + stdout + .write_all((format!("{}", message.len())).as_bytes()) + .await + .unwrap(); + stdout.write_all("\r\n\r\n".as_bytes()).await.unwrap(); + stdout.write_all(&message).await.unwrap(); + stdout.flush().await.unwrap(); + } + Ok(()) + }); + + Self { + outgoing_tx, + incoming_rx, + handlers, + _input_task: input_task, + _output_task: output_task, + } } pub async fn notify(&mut self, params: T::Params) { @@ -665,20 +679,6 @@ impl FakeLanguageServer { .await; } - async fn send(stdout: &mut smol::io::BufWriter, message: &[u8]) { - stdout - .write_all(CONTENT_LEN_HEADER.as_bytes()) - .await - .unwrap(); - stdout - .write_all((format!("{}", message.len())).as_bytes()) - .await - .unwrap(); - stdout.write_all("\r\n\r\n".as_bytes()).await.unwrap(); - stdout.write_all(&message).await.unwrap(); - stdout.flush().await.unwrap(); - } - async fn receive( stdin: &mut smol::io::BufReader, buffer: &mut Vec, @@ -699,6 +699,44 @@ impl FakeLanguageServer { } } +struct PipeWriterCloseOnDrop(async_pipe::PipeWriter); + +impl Drop for PipeWriterCloseOnDrop { + fn drop(&mut self) { + self.0.close().ok(); + } +} + +impl AsyncWrite for PipeWriterCloseOnDrop { + fn poll_write( + mut self: std::pin::Pin<&mut Self>, + cx: &mut std::task::Context<'_>, + buf: &[u8], + ) -> std::task::Poll> { + let pipe = &mut self.0; + smol::pin!(pipe); + pipe.poll_write(cx, buf) + } + + fn poll_flush( + mut self: std::pin::Pin<&mut Self>, + cx: &mut std::task::Context<'_>, + ) -> std::task::Poll> { + let pipe = &mut self.0; + smol::pin!(pipe); + pipe.poll_flush(cx) + } + + fn poll_close( + mut self: std::pin::Pin<&mut Self>, + cx: &mut std::task::Context<'_>, + ) -> std::task::Poll> { + let pipe = &mut self.0; + smol::pin!(pipe); + pipe.poll_close(cx) + } +} + #[cfg(test)] mod tests { use super::*; From f2f1a52c7eb2b232f644d6b92670ea52edb86882 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Tue, 1 Mar 2022 13:36:49 -0800 Subject: [PATCH 30/45] Prevent hangs in lsp requests made while server is shutting down * Avoid postage::oneshot, since receiver is not woken when sender is dropped. * Clear the response channels when an IO task exits. --- crates/lsp/src/lsp.rs | 21 +++++++++++++++++---- 1 file changed, 17 insertions(+), 4 deletions(-) diff --git a/crates/lsp/src/lsp.rs b/crates/lsp/src/lsp.rs index 0510388381d240a6958338e5178688ecf70753db..ac0603cd4dfe2b62c0a19f4013171c78bf728e20 100644 --- a/crates/lsp/src/lsp.rs +++ b/crates/lsp/src/lsp.rs @@ -1,8 +1,9 @@ use anyhow::{anyhow, Context, Result}; +use futures::channel::oneshot; use futures::{io::BufWriter, AsyncRead, AsyncWrite}; use gpui::{executor, Task}; use parking_lot::{Mutex, RwLock}; -use postage::{barrier, oneshot, prelude::Stream, sink::Sink, watch}; +use postage::{barrier, prelude::Stream, watch}; use serde::{Deserialize, Serialize}; use serde_json::{json, value::RawValue, Value}; use smol::{ @@ -135,6 +136,7 @@ impl LanguageServer { let notification_handlers = notification_handlers.clone(); let response_handlers = response_handlers.clone(); async move { + let _clear_response_channels = ClearResponseChannels(response_handlers.clone()); let mut buffer = Vec::new(); loop { buffer.clear(); @@ -323,9 +325,12 @@ impl LanguageServer { outbound_tx.close(); Some( async move { + log::debug!("language server shutdown started"); shutdown_request.await?; + response_handlers.lock().clear(); exit?; output_done.recv().await; + log::debug!("language server shutdown finished"); drop(tasks); Ok(()) } @@ -404,7 +409,7 @@ impl LanguageServer { }) .unwrap(); let mut response_handlers = response_handlers.lock(); - let (mut tx, mut rx) = oneshot::channel(); + let (tx, rx) = oneshot::channel(); response_handlers.insert( id, Box::new(move |result| { @@ -414,7 +419,7 @@ impl LanguageServer { } Err(error) => Err(anyhow!("{}", error.message)), }; - let _ = tx.try_send(response); + let _ = tx.send(response); }), ); @@ -423,7 +428,7 @@ impl LanguageServer { .context("failed to write to language server's stdin"); async move { send?; - rx.recv().await.unwrap() + rx.await? } } @@ -737,6 +742,14 @@ impl AsyncWrite for PipeWriterCloseOnDrop { } } +struct ClearResponseChannels(Arc>>); + +impl Drop for ClearResponseChannels { + fn drop(&mut self) { + self.0.lock().clear(); + } +} + #[cfg(test)] mod tests { use super::*; From 917543cc32e63c70f64d54a588deba2871e2b14c Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Tue, 1 Mar 2022 13:37:33 -0800 Subject: [PATCH 31/45] Handle Peer responses using a futures::oneshot instead of postage::mpsc --- crates/rpc/src/peer.rs | 22 +++++++--------------- 1 file changed, 7 insertions(+), 15 deletions(-) diff --git a/crates/rpc/src/peer.rs b/crates/rpc/src/peer.rs index a1d5ab8e9677a3161e4a6fa5a7842845fecb0a21..0a00f6d8015107320dd6fc73006e02334803ff81 100644 --- a/crates/rpc/src/peer.rs +++ b/crates/rpc/src/peer.rs @@ -1,8 +1,7 @@ use super::proto::{self, AnyTypedEnvelope, EnvelopedMessage, MessageStream, RequestMessage}; use super::Connection; use anyhow::{anyhow, Context, Result}; -use futures::stream::BoxStream; -use futures::{FutureExt as _, StreamExt}; +use futures::{channel::oneshot, stream::BoxStream, FutureExt as _, StreamExt}; use parking_lot::{Mutex, RwLock}; use postage::{ barrier, mpsc, @@ -92,7 +91,7 @@ pub struct ConnectionState { outgoing_tx: futures::channel::mpsc::UnboundedSender, next_message_id: Arc, response_channels: - Arc>>>>, + Arc>>>>, } const WRITE_TIMEOUT: Duration = Duration::from_secs(10); @@ -177,18 +176,14 @@ impl Peer { async move { if let Some(responding_to) = incoming.responding_to { let channel = response_channels.lock().as_mut()?.remove(&responding_to); - if let Some(mut tx) = channel { + if let Some(tx) = channel { let mut requester_resumed = barrier::channel(); - if let Err(error) = tx.send((incoming, requester_resumed.0)).await { + if let Err(error) = tx.send((incoming, requester_resumed.0)) { log::debug!( "received RPC but request future was dropped {:?}", - error.0 .0 + error.0 ); } - // Drop response channel before awaiting on the barrier. This allows the - // barrier to get dropped even if the request's future is dropped before it - // has a chance to observe the response. - drop(tx); requester_resumed.1.recv().await; } else { log::warn!("received RPC response to unknown request {}", responding_to); @@ -239,7 +234,7 @@ impl Peer { receiver_id: ConnectionId, request: T, ) -> impl Future> { - let (tx, mut rx) = mpsc::channel(1); + let (tx, rx) = oneshot::channel(); let send = self.connection_state(receiver_id).and_then(|connection| { let message_id = connection.next_message_id.fetch_add(1, SeqCst); connection @@ -260,10 +255,7 @@ impl Peer { }); async move { send?; - let (response, _barrier) = rx - .recv() - .await - .ok_or_else(|| anyhow!("connection was closed"))?; + let (response, _barrier) = rx.await.map_err(|_| anyhow!("connection was closed"))?; if let Some(proto::envelope::Payload::Error(error)) = &response.payload { Err(anyhow!("RPC request failed - {}", error.message)) } else { From 95b2f4fb1614df67ffd834d93902b4254033c54f Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Tue, 1 Mar 2022 15:02:04 -0800 Subject: [PATCH 32/45] Fix remaining language server hangs on shutdown * Use fork of async-pipe library that handles closed pipes correctly. * Clear response handlers map when terminating output task, so as to wake any pending request futures. Co-Authored-By: Nathan Sobo --- Cargo.lock | 3 +- crates/lsp/Cargo.toml | 5 +-- crates/lsp/src/lsp.rs | 77 ++++++++++++------------------------------- 3 files changed, 26 insertions(+), 59 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 4f5299198612e24287109283e8b6d8b2b02fc796..68ebe25d81d5e1a08a9c2042f654f644e6dbcc87 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -336,7 +336,7 @@ dependencies = [ [[package]] name = "async-pipe" version = "0.1.3" -source = "git+https://github.com/routerify/async-pipe-rs?rev=feeb77e83142a9ff837d0767652ae41bfc5d8e47#feeb77e83142a9ff837d0767652ae41bfc5d8e47" +source = "git+https://github.com/zed-industries/async-pipe-rs?rev=82d00a04211cf4e1236029aa03e6b6ce2a74c553#82d00a04211cf4e1236029aa03e6b6ce2a74c553" dependencies = [ "futures", "log", @@ -2827,6 +2827,7 @@ version = "0.1.0" dependencies = [ "anyhow", "async-pipe", + "collections", "ctor", "env_logger", "futures", diff --git a/crates/lsp/Cargo.toml b/crates/lsp/Cargo.toml index fca48970d21712284f129e0b260492cabfe75365..e463ba020da8f519bae4d84ccc9ef683f09b3f58 100644 --- a/crates/lsp/Cargo.toml +++ b/crates/lsp/Cargo.toml @@ -10,10 +10,11 @@ path = "src/lsp.rs" test-support = ["async-pipe"] [dependencies] +collections = { path = "../collections" } gpui = { path = "../gpui" } util = { path = "../util" } anyhow = "1.0" -async-pipe = { git = "https://github.com/routerify/async-pipe-rs", rev = "feeb77e83142a9ff837d0767652ae41bfc5d8e47", optional = true } +async-pipe = { git = "https://github.com/zed-industries/async-pipe-rs", rev = "82d00a04211cf4e1236029aa03e6b6ce2a74c553", optional = true } futures = "0.3" log = "0.4" lsp-types = "0.91" @@ -26,7 +27,7 @@ smol = "1.2" [dev-dependencies] gpui = { path = "../gpui", features = ["test-support"] } util = { path = "../util", features = ["test-support"] } -async-pipe = { git = "https://github.com/routerify/async-pipe-rs", rev = "feeb77e83142a9ff837d0767652ae41bfc5d8e47" } +async-pipe = { git = "https://github.com/zed-industries/async-pipe-rs", rev = "82d00a04211cf4e1236029aa03e6b6ce2a74c553" } ctor = "0.1" env_logger = "0.8" unindent = "0.1.7" diff --git a/crates/lsp/src/lsp.rs b/crates/lsp/src/lsp.rs index ac0603cd4dfe2b62c0a19f4013171c78bf728e20..4a988f90b5ecfe39c58793a9eaa61f52be172d0c 100644 --- a/crates/lsp/src/lsp.rs +++ b/crates/lsp/src/lsp.rs @@ -1,6 +1,6 @@ use anyhow::{anyhow, Context, Result}; -use futures::channel::oneshot; -use futures::{io::BufWriter, AsyncRead, AsyncWrite}; +use collections::HashMap; +use futures::{channel::oneshot, io::BufWriter, AsyncRead, AsyncWrite}; use gpui::{executor, Task}; use parking_lot::{Mutex, RwLock}; use postage::{barrier, prelude::Stream, watch}; @@ -12,7 +12,6 @@ use smol::{ process::Command, }; use std::{ - collections::HashMap, future::Future, io::Write, str::FromStr, @@ -129,14 +128,15 @@ impl LanguageServer { let mut stdin = BufWriter::new(stdin); let mut stdout = BufReader::new(stdout); let (outbound_tx, outbound_rx) = channel::unbounded::>(); - let notification_handlers = Arc::new(RwLock::new(HashMap::<_, NotificationHandler>::new())); - let response_handlers = Arc::new(Mutex::new(HashMap::<_, ResponseHandler>::new())); + let notification_handlers = + Arc::new(RwLock::new(HashMap::<_, NotificationHandler>::default())); + let response_handlers = Arc::new(Mutex::new(HashMap::<_, ResponseHandler>::default())); let input_task = executor.spawn( { let notification_handlers = notification_handlers.clone(); let response_handlers = response_handlers.clone(); async move { - let _clear_response_channels = ClearResponseChannels(response_handlers.clone()); + let _clear_response_handlers = ClearResponseHandlers(response_handlers.clone()); let mut buffer = Vec::new(); loop { buffer.clear(); @@ -190,8 +190,10 @@ impl LanguageServer { .log_err(), ); let (output_done_tx, output_done_rx) = barrier::channel(); - let output_task = executor.spawn( + let output_task = executor.spawn({ + let response_handlers = response_handlers.clone(); async move { + let _clear_response_handlers = ClearResponseHandlers(response_handlers); let mut content_len_buffer = Vec::new(); while let Ok(message) = outbound_rx.recv().await { content_len_buffer.clear(); @@ -205,8 +207,8 @@ impl LanguageServer { drop(output_done_tx); Ok(()) } - .log_err(), - ); + .log_err() + }); let (initialized_tx, initialized_rx) = barrier::channel(); let (mut capabilities_tx, capabilities_rx) = watch::channel(); @@ -408,9 +410,13 @@ impl LanguageServer { params, }) .unwrap(); - let mut response_handlers = response_handlers.lock(); + + let send = outbound_tx + .try_send(message) + .context("failed to write to language server's stdin"); + let (tx, rx) = oneshot::channel(); - response_handlers.insert( + response_handlers.lock().insert( id, Box::new(move |result| { let response = match result { @@ -423,9 +429,6 @@ impl LanguageServer { }), ); - let send = outbound_tx - .try_send(message) - .context("failed to write to language server's stdin"); async move { send?; rx.await? @@ -581,7 +584,7 @@ impl FakeLanguageServer { }); let output_task = cx.background().spawn(async move { - let mut stdout = smol::io::BufWriter::new(PipeWriterCloseOnDrop(stdout)); + let mut stdout = smol::io::BufWriter::new(stdout); while let Some(message) = outgoing_rx.next().await { stdout .write_all(CONTENT_LEN_HEADER.as_bytes()) @@ -694,7 +697,7 @@ impl FakeLanguageServer { let message_len: usize = std::str::from_utf8(buffer) .unwrap() .strip_prefix(CONTENT_LEN_HEADER) - .unwrap() + .ok_or_else(|| anyhow!("invalid content length header"))? .trim_end() .parse() .unwrap(); @@ -704,47 +707,9 @@ impl FakeLanguageServer { } } -struct PipeWriterCloseOnDrop(async_pipe::PipeWriter); - -impl Drop for PipeWriterCloseOnDrop { - fn drop(&mut self) { - self.0.close().ok(); - } -} - -impl AsyncWrite for PipeWriterCloseOnDrop { - fn poll_write( - mut self: std::pin::Pin<&mut Self>, - cx: &mut std::task::Context<'_>, - buf: &[u8], - ) -> std::task::Poll> { - let pipe = &mut self.0; - smol::pin!(pipe); - pipe.poll_write(cx, buf) - } - - fn poll_flush( - mut self: std::pin::Pin<&mut Self>, - cx: &mut std::task::Context<'_>, - ) -> std::task::Poll> { - let pipe = &mut self.0; - smol::pin!(pipe); - pipe.poll_flush(cx) - } - - fn poll_close( - mut self: std::pin::Pin<&mut Self>, - cx: &mut std::task::Context<'_>, - ) -> std::task::Poll> { - let pipe = &mut self.0; - smol::pin!(pipe); - pipe.poll_close(cx) - } -} - -struct ClearResponseChannels(Arc>>); +struct ClearResponseHandlers(Arc>>); -impl Drop for ClearResponseChannels { +impl Drop for ClearResponseHandlers { fn drop(&mut self) { self.0.lock().clear(); } From acf7ef3d61dddc502c01902667bb4afb7575b8cb Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Tue, 1 Mar 2022 18:02:12 -0800 Subject: [PATCH 33/45] Avoid retaining executor when using Connection::in_memory --- crates/rpc/src/conn.rs | 13 +++++++++---- 1 file changed, 9 insertions(+), 4 deletions(-) diff --git a/crates/rpc/src/conn.rs b/crates/rpc/src/conn.rs index 70c59d013b43d8bbeb9bce93c6a10ba59ee9139b..fb91b72d9f3898ec42b35b0387dc9f27951fc8f8 100644 --- a/crates/rpc/src/conn.rs +++ b/crates/rpc/src/conn.rs @@ -59,18 +59,21 @@ impl Connection { ) { use futures::channel::mpsc; use io::{Error, ErrorKind}; + use std::sync::Arc; let (tx, rx) = mpsc::unbounded::(); let tx = tx .sink_map_err(|e| WebSocketError::from(Error::new(ErrorKind::Other, e))) .with({ + let executor = Arc::downgrade(&executor); let kill_rx = kill_rx.clone(); - let executor = executor.clone(); move |msg| { let kill_rx = kill_rx.clone(); let executor = executor.clone(); Box::pin(async move { - executor.simulate_random_delay().await; + if let Some(executor) = executor.upgrade() { + executor.simulate_random_delay().await; + } if kill_rx.borrow().is_none() { Ok(msg) } else { @@ -80,9 +83,11 @@ impl Connection { } }); let rx = rx.then(move |msg| { - let executor = executor.clone(); + let executor = Arc::downgrade(&executor); Box::pin(async move { - executor.simulate_random_delay().await; + if let Some(executor) = executor.upgrade() { + executor.simulate_random_delay().await; + } msg }) }); From 3cf5329450722a55ae8e54a2c5783eaf21115821 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Tue, 1 Mar 2022 18:17:25 -0800 Subject: [PATCH 34/45] Flush effects when dropping test clients in random collaboration test --- crates/server/src/rpc.rs | 16 ++++++++++------ 1 file changed, 10 insertions(+), 6 deletions(-) diff --git a/crates/server/src/rpc.rs b/crates/server/src/rpc.rs index 09cbc82ec81be6af3b576b6f6e507e66ff489b63..52c2e43832aa0dc90260d5bc696f4f94950943ad 100644 --- a/crates/server/src/rpc.rs +++ b/crates/server/src/rpc.rs @@ -4234,7 +4234,7 @@ mod tests { let mut clients = futures::future::join_all(clients).await; cx.foreground().run_until_parked(); - let (host_client, host_cx) = clients.remove(0); + let (host_client, mut host_cx) = clients.remove(0); let host_project = host_client.project.as_ref().unwrap(); let host_worktree_snapshots = host_project.read_with(&host_cx, |project, cx| { project @@ -4246,14 +4246,14 @@ mod tests { .collect::>() }); - for (guest_client, guest_cx) in clients.iter() { + for (guest_client, mut guest_cx) in clients.into_iter() { let guest_id = guest_client.client.id(); let worktree_snapshots = guest_client .project .as_ref() .unwrap() - .read_with(guest_cx, |project, cx| { + .read_with(&guest_cx, |project, cx| { project .worktrees(cx) .map(|worktree| { @@ -4291,7 +4291,7 @@ mod tests { .project .as_ref() .unwrap() - .read_with(guest_cx, |project, cx| { + .read_with(&guest_cx, |project, cx| { assert!( !project.has_deferred_operations(cx), "guest {} has deferred operations", @@ -4300,7 +4300,7 @@ mod tests { }); for guest_buffer in &guest_client.buffers { - let buffer_id = guest_buffer.read_with(guest_cx, |buffer, _| buffer.remote_id()); + let buffer_id = guest_buffer.read_with(&guest_cx, |buffer, _| buffer.remote_id()); let host_buffer = host_project.read_with(&host_cx, |project, cx| { project.buffer_for_id(buffer_id, cx).expect(&format!( "host does not have buffer for guest:{}, peer:{}, id:{}", @@ -4308,7 +4308,7 @@ mod tests { )) }); assert_eq!( - guest_buffer.read_with(guest_cx, |buffer, _| buffer.text()), + guest_buffer.read_with(&guest_cx, |buffer, _| buffer.text()), host_buffer.read_with(&host_cx, |buffer, _| buffer.text()), "guest {}, buffer {}, path {:?}, differs from the host's buffer", guest_id, @@ -4317,7 +4317,11 @@ mod tests { .read_with(&host_cx, |buffer, cx| buffer.file().unwrap().full_path(cx)) ); } + + guest_cx.update(|_| drop(guest_client)); } + + host_cx.update(|_| drop(host_client)); } struct TestServer { From ae93cfed50c59806c04f2a082a48158515f6d43c Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Tue, 1 Mar 2022 18:17:41 -0800 Subject: [PATCH 35/45] Tear down client's connection states when dropping test clients --- crates/client/src/client.rs | 2 ++ 1 file changed, 2 insertions(+) diff --git a/crates/client/src/client.rs b/crates/client/src/client.rs index 5126c9384ce0ade3d5c4e7f2b4feaa09d3d4bba5..e338b72e740385a7738af80e9634e61e0887680a 100644 --- a/crates/client/src/client.rs +++ b/crates/client/src/client.rs @@ -230,10 +230,12 @@ impl Client { #[cfg(any(test, feature = "test-support"))] pub fn tear_down(&self) { let mut state = self.state.write(); + state._maintain_connection.take(); state.message_handlers.clear(); state.models_by_message_type.clear(); state.models_by_entity_type_and_remote_id.clear(); state.entity_id_extractors.clear(); + self.peer.reset(); } #[cfg(any(test, feature = "test-support"))] From 8b7a9367fa8cf137f2986f622557d1e81cf1d8e9 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Tue, 1 Mar 2022 18:21:53 -0800 Subject: [PATCH 36/45] Avoid storing type name string on AnyViewHandle It won't be needed for leak error messages, because the typed view handle will typically be created first. And this avoids increasing the size of the handle used in production. --- crates/gpui/src/app.rs | 11 +++-------- 1 file changed, 3 insertions(+), 8 deletions(-) diff --git a/crates/gpui/src/app.rs b/crates/gpui/src/app.rs index 3f6146d34822e4baf997aa3f2fb44ca32056b869..59be4e7a6dbfec14dc7fcd1031ef2989c3b7647c 100644 --- a/crates/gpui/src/app.rs +++ b/crates/gpui/src/app.rs @@ -15,7 +15,7 @@ use platform::Event; use postage::oneshot; use smol::prelude::*; use std::{ - any::{self, type_name, Any, TypeId}, + any::{type_name, Any, TypeId}, cell::RefCell, collections::{hash_map::Entry, BTreeMap, HashMap, HashSet, VecDeque}, fmt::{self, Debug}, @@ -3382,8 +3382,8 @@ pub struct AnyViewHandle { window_id: usize, view_id: usize, view_type: TypeId, - type_name: &'static str, ref_counts: Arc>, + #[cfg(any(test, feature = "test-support"))] handle_id: usize, } @@ -3393,7 +3393,6 @@ impl AnyViewHandle { window_id: usize, view_id: usize, view_type: TypeId, - type_name: &'static str, ref_counts: Arc>, ) -> Self { ref_counts.lock().inc_view(window_id, view_id); @@ -3403,13 +3402,12 @@ impl AnyViewHandle { .lock() .leak_detector .lock() - .handle_created(Some(type_name), view_id); + .handle_created(None, view_id); Self { window_id, view_id, view_type, - type_name, ref_counts, #[cfg(any(test, feature = "test-support"))] handle_id, @@ -3456,7 +3454,6 @@ impl Clone for AnyViewHandle { self.window_id, self.view_id, self.view_type, - self.type_name, self.ref_counts.clone(), ) } @@ -3474,7 +3471,6 @@ impl From<&ViewHandle> for AnyViewHandle { handle.window_id, handle.view_id, TypeId::of::(), - any::type_name::(), handle.ref_counts.clone(), ) } @@ -3486,7 +3482,6 @@ impl From> for AnyViewHandle { window_id: handle.window_id, view_id: handle.view_id, view_type: TypeId::of::(), - type_name: any::type_name::(), ref_counts: handle.ref_counts.clone(), #[cfg(any(test, feature = "test-support"))] handle_id: handle.handle_id, From be7a4770fb9f3a652ca1d3e422a7da35c878982a Mon Sep 17 00:00:00 2001 From: Antonio Scandurra Date: Wed, 2 Mar 2022 10:08:38 +0100 Subject: [PATCH 37/45] Delete timer state when the future that's awaiting it gets dropped --- crates/gpui/src/executor.rs | 31 +++++++++++++++++++++++++++---- 1 file changed, 27 insertions(+), 4 deletions(-) diff --git a/crates/gpui/src/executor.rs b/crates/gpui/src/executor.rs index e73f403dc39478ce03522f8be1cafe76e4263742..61a4dee0bb65b9573be7268891dddb050a8f22bc 100644 --- a/crates/gpui/src/executor.rs +++ b/crates/gpui/src/executor.rs @@ -16,7 +16,8 @@ use std::{ use crate::{ platform::{self, Dispatcher}, - util, MutableAppContext, + util::{self, post_inc}, + MutableAppContext, }; pub enum Foreground { @@ -69,7 +70,8 @@ struct DeterministicState { forbid_parking: bool, block_on_ticks: std::ops::RangeInclusive, now: std::time::Instant, - pending_timers: Vec<(std::time::Instant, postage::barrier::Sender)>, + next_timer_id: usize, + pending_timers: Vec<(usize, std::time::Instant, postage::barrier::Sender)>, waiting_backtrace: Option, } @@ -99,6 +101,7 @@ impl Deterministic { forbid_parking: false, block_on_ticks: 0..=1000, now: std::time::Instant::now(), + next_timer_id: Default::default(), pending_timers: Default::default(), waiting_backtrace: None, })), @@ -426,11 +429,31 @@ impl Foreground { use postage::prelude::Stream as _; let (tx, mut rx) = postage::barrier::channel(); + let timer_id; { let mut state = executor.state.lock(); let wakeup_at = state.now + duration; - state.pending_timers.push((wakeup_at, tx)); + timer_id = post_inc(&mut state.next_timer_id); + state.pending_timers.push((timer_id, wakeup_at, tx)); } + + struct DropTimer<'a>(usize, &'a Foreground); + impl<'a> Drop for DropTimer<'a> { + fn drop(&mut self) { + match self.1 { + Foreground::Deterministic { executor, .. } => { + executor + .state + .lock() + .pending_timers + .retain(|(timer_id, _, _)| *timer_id != self.0); + } + _ => unreachable!(), + } + } + } + + let _guard = DropTimer(timer_id, self); rx.recv().await; } _ => { @@ -451,7 +474,7 @@ impl Foreground { let mut pending_timers = mem::take(&mut state.pending_timers); drop(state); - pending_timers.retain(|(wakeup, _)| *wakeup > now); + pending_timers.retain(|(_, wakeup, _)| *wakeup > now); executor.state.lock().pending_timers.extend(pending_timers); } _ => panic!("this method can only be called on a deterministic executor"), From 51345cf1e175185ebfdbde61fdcee4c76e81b25b Mon Sep 17 00:00:00 2001 From: Antonio Scandurra Date: Wed, 2 Mar 2022 10:39:46 +0100 Subject: [PATCH 38/45] Advance clock when simulating random delay --- crates/gpui/src/executor.rs | 30 +++++++++++++++++++++--------- 1 file changed, 21 insertions(+), 9 deletions(-) diff --git a/crates/gpui/src/executor.rs b/crates/gpui/src/executor.rs index 61a4dee0bb65b9573be7268891dddb050a8f22bc..c828fe3ed49221ec5f75967080702c5a4624b82d 100644 --- a/crates/gpui/src/executor.rs +++ b/crates/gpui/src/executor.rs @@ -223,6 +223,12 @@ impl Deterministic { if state.scheduled_from_foreground.is_empty() && state.scheduled_from_background.is_empty() { + if let Some(main_task) = main_task { + if let Poll::Ready(result) = main_task.poll(&mut cx) { + return Some(result); + } + } + return None; } @@ -300,6 +306,17 @@ impl Deterministic { None } + + pub fn advance_clock(&self, duration: Duration) { + let mut state = self.state.lock(); + state.now += duration; + let now = state.now; + let mut pending_timers = mem::take(&mut state.pending_timers); + drop(state); + + pending_timers.retain(|(_, wakeup, _)| *wakeup > now); + self.state.lock().pending_timers.extend(pending_timers); + } } #[cfg(any(test, feature = "test-support"))] @@ -467,15 +484,7 @@ impl Foreground { match self { Self::Deterministic { executor, .. } => { executor.run_until_parked(); - - let mut state = executor.state.lock(); - state.now += duration; - let now = state.now; - let mut pending_timers = mem::take(&mut state.pending_timers); - drop(state); - - pending_timers.retain(|(_, wakeup, _)| *wakeup > now); - executor.state.lock().pending_timers.extend(pending_timers); + executor.advance_clock(duration); } _ => panic!("this method can only be called on a deterministic executor"), } @@ -604,6 +613,9 @@ impl Background { for _ in 0..yields { yield_now().await; } + + let delay = Duration::from_millis(executor.state.lock().rng.gen_range(0..100)); + executor.advance_clock(delay); } } _ => panic!("this method can only be called on a deterministic executor"), From 3ec76b63d30f149857b61044606a98c8f0dddc5d Mon Sep 17 00:00:00 2001 From: Antonio Scandurra Date: Wed, 2 Mar 2022 11:02:10 +0100 Subject: [PATCH 39/45] Make `dhat` optional --- crates/gpui/Cargo.toml | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/crates/gpui/Cargo.toml b/crates/gpui/Cargo.toml index 95769cd470c3214ac191a379757ba93ceb51c91c..46ff7e8bc5319672e47dc4207f2614797ad9f687 100644 --- a/crates/gpui/Cargo.toml +++ b/crates/gpui/Cargo.toml @@ -8,7 +8,7 @@ version = "0.1.0" path = "src/gpui.rs" [features] -test-support = ["backtrace", "env_logger", "collections/test-support"] +test-support = ["backtrace", "dhat", "env_logger", "collections/test-support"] [dependencies] collections = { path = "../collections" } @@ -17,7 +17,7 @@ sum_tree = { path = "../sum_tree" } async-task = "4.0.3" backtrace = { version = "0.3", optional = true } ctor = "0.1" -dhat = "0.3" +dhat = { version = "0.3", optional = true } env_logger = { version = "0.8", optional = true } etagere = "0.2" futures = "0.3" @@ -51,6 +51,7 @@ cc = "1.0.67" [dev-dependencies] backtrace = "0.3" collections = { path = "../collections", features = ["test-support"] } +dhat = "0.3" env_logger = "0.8" png = "0.16" simplelog = "0.9" From 141e0559a560a0d05f716bef5ea56d27c85b56e7 Mon Sep 17 00:00:00 2001 From: Antonio Scandurra Date: Wed, 2 Mar 2022 12:45:49 +0100 Subject: [PATCH 40/45] Fix warnings --- crates/gpui/src/executor.rs | 5 ++--- crates/lsp/src/lsp.rs | 1 + 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/crates/gpui/src/executor.rs b/crates/gpui/src/executor.rs index c828fe3ed49221ec5f75967080702c5a4624b82d..9c4b9e90e0b5dbac1d35985be391f6ae04dd2c57 100644 --- a/crates/gpui/src/executor.rs +++ b/crates/gpui/src/executor.rs @@ -16,8 +16,7 @@ use std::{ use crate::{ platform::{self, Dispatcher}, - util::{self, post_inc}, - MutableAppContext, + util, MutableAppContext, }; pub enum Foreground { @@ -450,7 +449,7 @@ impl Foreground { { let mut state = executor.state.lock(); let wakeup_at = state.now + duration; - timer_id = post_inc(&mut state.next_timer_id); + timer_id = util::post_inc(&mut state.next_timer_id); state.pending_timers.push((timer_id, wakeup_at, tx)); } diff --git a/crates/lsp/src/lsp.rs b/crates/lsp/src/lsp.rs index 4a988f90b5ecfe39c58793a9eaa61f52be172d0c..dc57c85213c564fbb47648a093c6a805fa0c88bd 100644 --- a/crates/lsp/src/lsp.rs +++ b/crates/lsp/src/lsp.rs @@ -491,6 +491,7 @@ pub struct FakeLanguageServer { _output_task: Task>, } +#[cfg(any(test, feature = "test-support"))] type FakeLanguageServerHandlers = Arc< Mutex< HashMap< From 9ff1af3adb75b2b955e90399d012214bbc39d209 Mon Sep 17 00:00:00 2001 From: Antonio Scandurra Date: Wed, 2 Mar 2022 14:30:00 +0100 Subject: [PATCH 41/45] Always run tests and bundle creation in separate runners This is better for caching and, thus, yields faster CI runs. --- .github/workflows/ci.yml | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 4050d4e120140b64ccfc7745cba12b0940a216f2..ae30bb79e544d1d22389886f2937dde368b9d628 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -18,7 +18,9 @@ env: jobs: tests: name: Run tests - runs-on: self-hosted + runs-on: + - self-hosted + - test env: RUSTFLAGS: -D warnings steps: @@ -39,7 +41,9 @@ jobs: bundle: name: Bundle app - runs-on: self-hosted + runs-on: + - self-hosted + - bundle env: MACOS_CERTIFICATE: ${{ secrets.MACOS_CERTIFICATE }} MACOS_CERTIFICATE_PASSWORD: ${{ secrets.MACOS_CERTIFICATE_PASSWORD }} From 432fef63161e253c9229907b364c642f5beb4454 Mon Sep 17 00:00:00 2001 From: Antonio Scandurra Date: Wed, 2 Mar 2022 14:41:40 +0100 Subject: [PATCH 42/45] :lipstick: --- .github/workflows/ci.yml | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index ae30bb79e544d1d22389886f2937dde368b9d628..67d6fa62c45a7e643c7e7ded0c11e7fd752d11c7 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -19,8 +19,8 @@ jobs: tests: name: Run tests runs-on: - - self-hosted - - test + - self-hosted + - test env: RUSTFLAGS: -D warnings steps: @@ -42,8 +42,8 @@ jobs: bundle: name: Bundle app runs-on: - - self-hosted - - bundle + - self-hosted + - bundle env: MACOS_CERTIFICATE: ${{ secrets.MACOS_CERTIFICATE }} MACOS_CERTIFICATE_PASSWORD: ${{ secrets.MACOS_CERTIFICATE_PASSWORD }} From 65f53db6e7b172a67050a15f50e3d8e95e27888f Mon Sep 17 00:00:00 2001 From: Antonio Scandurra Date: Wed, 2 Mar 2022 14:41:59 +0100 Subject: [PATCH 43/45] WIP: investigate why npm is not there --- script/bundle | 56 +-------------------------------------------------- 1 file changed, 1 insertion(+), 55 deletions(-) diff --git a/script/bundle b/script/bundle index bcaa68c1e9c04c9d6aaa7379a5b2de5e82920a93..274ed0c883e927ab5775be516581165de6534e05 100755 --- a/script/bundle +++ b/script/bundle @@ -2,58 +2,4 @@ set -e -export ZED_BUNDLE=true - -# Install cargo-bundle 0.5.0 if it's not already installed -cargo install cargo-bundle --version 0.5.0 - -# Deal with versions of macOS that don't include libstdc++ headers -export CXXFLAGS="-stdlib=libc++" - -# Build the app bundle for x86_64 -pushd crates/zed > /dev/null -cargo bundle --release --target x86_64-apple-darwin -popd > /dev/null - -# Build the binary for aarch64 (Apple M1) -cargo build --release --target aarch64-apple-darwin - -# Replace the bundle's binary with a "fat binary" that combines the two architecture-specific binaries -lipo -create target/x86_64-apple-darwin/release/Zed target/aarch64-apple-darwin/release/Zed -output target/x86_64-apple-darwin/release/bundle/osx/Zed.app/Contents/MacOS/zed - -# Sign the app bundle with an ad-hoc signature so it runs on the M1. We need a real certificate but this works for now. -if [[ -n $MACOS_CERTIFICATE && -n $MACOS_CERTIFICATE_PASSWORD && -n $APPLE_NOTARIZATION_USERNAME && -n $APPLE_NOTARIZATION_PASSWORD ]]; then - echo "Signing bundle with Apple-issued certificate" - security create-keychain -p $MACOS_CERTIFICATE_PASSWORD zed.keychain || echo "" - security default-keychain -s zed.keychain - security unlock-keychain -p $MACOS_CERTIFICATE_PASSWORD zed.keychain - echo $MACOS_CERTIFICATE | base64 --decode > /tmp/zed-certificate.p12 - security import /tmp/zed-certificate.p12 -k zed.keychain -P $MACOS_CERTIFICATE_PASSWORD -T /usr/bin/codesign - rm /tmp/zed-certificate.p12 - security set-key-partition-list -S apple-tool:,apple:,codesign: -s -k $MACOS_CERTIFICATE_PASSWORD zed.keychain - /usr/bin/codesign --force --deep --timestamp --options runtime --sign "Zed Industries, Inc." target/x86_64-apple-darwin/release/bundle/osx/Zed.app -v - security default-keychain -s login.keychain -else - echo "One or more of the following variables are missing: MACOS_CERTIFICATE, MACOS_CERTIFICATE_PASSWORD, APPLE_NOTARIZATION_USERNAME, APPLE_NOTARIZATION_PASSWORD" - echo "Performing an ad-hoc signature, but this bundle should not be distributed" - codesign --force --deep --sign - target/x86_64-apple-darwin/release/bundle/osx/Zed.app -v -fi - -# Create a DMG -echo "Creating DMG" -mkdir -p target/release -hdiutil create -volname Zed -srcfolder target/x86_64-apple-darwin/release/bundle/osx -ov -format UDZO target/release/Zed.dmg - -if [[ -n $MACOS_CERTIFICATE && -n $MACOS_CERTIFICATE_PASSWORD && -n $APPLE_NOTARIZATION_USERNAME && -n $APPLE_NOTARIZATION_PASSWORD ]]; then - echo "Notarizing DMG with Apple" - npm install -g notarize-cli - npx notarize-cli --file target/release/Zed.dmg --bundle-id dev.zed.Zed --username $APPLE_NOTARIZATION_USERNAME --password $APPLE_NOTARIZATION_PASSWORD -fi - -# If -o option is specified, open the target/release directory in Finder to reveal the DMG -while getopts o flag -do - case "${flag}" in - o) open target/release;; - esac -done +echo $PATH From c97983d25ac46e7648ae00ce767c2a957b96ae49 Mon Sep 17 00:00:00 2001 From: Antonio Scandurra Date: Wed, 2 Mar 2022 14:58:24 +0100 Subject: [PATCH 44/45] Install node as part of ci.yml when bundling the app --- .github/workflows/ci.yml | 5 ++++ script/bundle | 56 +++++++++++++++++++++++++++++++++++++++- 2 files changed, 60 insertions(+), 1 deletion(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 67d6fa62c45a7e643c7e7ded0c11e7fd752d11c7..f2749ebf6f4506cfb76fc53480018241f8b60903 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -64,6 +64,11 @@ jobs: target: aarch64-apple-darwin profile: minimal + - name: Install Node + uses: actions/setup-node@v2 + with: + node-version: '14' + - name: Checkout repo uses: actions/checkout@v2 with: diff --git a/script/bundle b/script/bundle index 274ed0c883e927ab5775be516581165de6534e05..bcaa68c1e9c04c9d6aaa7379a5b2de5e82920a93 100755 --- a/script/bundle +++ b/script/bundle @@ -2,4 +2,58 @@ set -e -echo $PATH +export ZED_BUNDLE=true + +# Install cargo-bundle 0.5.0 if it's not already installed +cargo install cargo-bundle --version 0.5.0 + +# Deal with versions of macOS that don't include libstdc++ headers +export CXXFLAGS="-stdlib=libc++" + +# Build the app bundle for x86_64 +pushd crates/zed > /dev/null +cargo bundle --release --target x86_64-apple-darwin +popd > /dev/null + +# Build the binary for aarch64 (Apple M1) +cargo build --release --target aarch64-apple-darwin + +# Replace the bundle's binary with a "fat binary" that combines the two architecture-specific binaries +lipo -create target/x86_64-apple-darwin/release/Zed target/aarch64-apple-darwin/release/Zed -output target/x86_64-apple-darwin/release/bundle/osx/Zed.app/Contents/MacOS/zed + +# Sign the app bundle with an ad-hoc signature so it runs on the M1. We need a real certificate but this works for now. +if [[ -n $MACOS_CERTIFICATE && -n $MACOS_CERTIFICATE_PASSWORD && -n $APPLE_NOTARIZATION_USERNAME && -n $APPLE_NOTARIZATION_PASSWORD ]]; then + echo "Signing bundle with Apple-issued certificate" + security create-keychain -p $MACOS_CERTIFICATE_PASSWORD zed.keychain || echo "" + security default-keychain -s zed.keychain + security unlock-keychain -p $MACOS_CERTIFICATE_PASSWORD zed.keychain + echo $MACOS_CERTIFICATE | base64 --decode > /tmp/zed-certificate.p12 + security import /tmp/zed-certificate.p12 -k zed.keychain -P $MACOS_CERTIFICATE_PASSWORD -T /usr/bin/codesign + rm /tmp/zed-certificate.p12 + security set-key-partition-list -S apple-tool:,apple:,codesign: -s -k $MACOS_CERTIFICATE_PASSWORD zed.keychain + /usr/bin/codesign --force --deep --timestamp --options runtime --sign "Zed Industries, Inc." target/x86_64-apple-darwin/release/bundle/osx/Zed.app -v + security default-keychain -s login.keychain +else + echo "One or more of the following variables are missing: MACOS_CERTIFICATE, MACOS_CERTIFICATE_PASSWORD, APPLE_NOTARIZATION_USERNAME, APPLE_NOTARIZATION_PASSWORD" + echo "Performing an ad-hoc signature, but this bundle should not be distributed" + codesign --force --deep --sign - target/x86_64-apple-darwin/release/bundle/osx/Zed.app -v +fi + +# Create a DMG +echo "Creating DMG" +mkdir -p target/release +hdiutil create -volname Zed -srcfolder target/x86_64-apple-darwin/release/bundle/osx -ov -format UDZO target/release/Zed.dmg + +if [[ -n $MACOS_CERTIFICATE && -n $MACOS_CERTIFICATE_PASSWORD && -n $APPLE_NOTARIZATION_USERNAME && -n $APPLE_NOTARIZATION_PASSWORD ]]; then + echo "Notarizing DMG with Apple" + npm install -g notarize-cli + npx notarize-cli --file target/release/Zed.dmg --bundle-id dev.zed.Zed --username $APPLE_NOTARIZATION_USERNAME --password $APPLE_NOTARIZATION_PASSWORD +fi + +# If -o option is specified, open the target/release directory in Finder to reveal the DMG +while getopts o flag +do + case "${flag}" in + o) open target/release;; + esac +done From 0581246690507c8ffea0d495877e162724d22e9e Mon Sep 17 00:00:00 2001 From: Antonio Scandurra Date: Wed, 2 Mar 2022 15:00:01 +0100 Subject: [PATCH 45/45] Use Node 16 on CI --- .github/workflows/ci.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index f2749ebf6f4506cfb76fc53480018241f8b60903..a773ef6c7cade1b16d058a898577c89d0801bb5d 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -67,7 +67,7 @@ jobs: - name: Install Node uses: actions/setup-node@v2 with: - node-version: '14' + node-version: '16' - name: Checkout repo uses: actions/checkout@v2