diff --git a/crates/gpui2/src/executor.rs b/crates/gpui2/src/executor.rs index 4b75bad504bbd3ed7fd512cb8bb3c54b807503c9..63f3b94c79a399984efa2a82cdd960e34e8d90be 100644 --- a/crates/gpui2/src/executor.rs +++ b/crates/gpui2/src/executor.rs @@ -88,7 +88,16 @@ impl BackgroundExecutor { #[cfg(any(test, feature = "test-support"))] pub fn block_test(&self, future: impl Future) -> R { - self.block_internal(false, future) + let (runnable, task) = unsafe { + async_task::spawn_unchecked(future, { + let dispatcher = self.dispatcher.clone(); + move |runnable| dispatcher.dispatch_on_main_thread(runnable) + }) + }; + + runnable.schedule(); + + self.block_internal(false, task) } pub fn block(&self, future: impl Future) -> R { @@ -100,17 +109,20 @@ impl BackgroundExecutor { background_only: bool, future: impl Future, ) -> R { + dbg!("block_internal"); pin_mut!(future); let (parker, unparker) = parking::pair(); let awoken = Arc::new(AtomicBool::new(false)); let awoken2 = awoken.clone(); let waker = waker_fn(move || { + dbg!("WAKING UP."); awoken2.store(true, SeqCst); unparker.unpark(); }); let mut cx = std::task::Context::from_waker(&waker); + dbg!("BOOOP"); loop { match future.as_mut().poll(&mut cx) { Poll::Ready(result) => return result, @@ -131,7 +143,9 @@ impl BackgroundExecutor { panic!("parked with nothing left to run\n{:?}", backtrace_message) } } + dbg!("PARKING!"); parker.park(); + dbg!("CONTINUING!"); } } } diff --git a/crates/project2/src/project2.rs b/crates/project2/src/project2.rs index 1457bd41ccd1eb9950cb8f99749d2dab2df39089..a598aac79ec28cc961783a9c4fda7a735f37ef5c 100644 --- a/crates/project2/src/project2.rs +++ b/crates/project2/src/project2.rs @@ -877,14 +877,17 @@ impl Project { ) }); for path in root_paths { + dbg!(&path); let (tree, _) = project .update(cx, |project, cx| { project.find_or_create_local_worktree(path, true, cx) }) .await .unwrap(); + dbg!("aaa"); tree.update(cx, |tree, _| tree.as_local().unwrap().scan_complete()) .await; + dbg!("bbb"); } project } @@ -5990,8 +5993,10 @@ impl Project { ) -> Task, PathBuf)>> { let abs_path = abs_path.as_ref(); if let Some((tree, relative_path)) = self.find_local_worktree(abs_path, cx) { + dbg!("shortcut"); Task::ready(Ok((tree, relative_path))) } else { + dbg!("long cut"); let worktree = self.create_local_worktree(abs_path, visible, cx); cx.background_executor() .spawn(async move { Ok((worktree.await?, PathBuf::new())) }) diff --git a/crates/project2/src/project_tests.rs b/crates/project2/src/project_tests.rs index 80126d82e4c9e5c2f858ec6ace473ea63edce3bf..fba2548451a310d36017444d5b000e41c81edf52 100644 --- a/crates/project2/src/project_tests.rs +++ b/crates/project2/src/project_tests.rs @@ -4,55 +4,63 @@ use futures::{future, StreamExt}; use gpui2::AppContext; use language2::{ language_settings::{AllLanguageSettings, LanguageSettingsContent}, - tree_sitter_rust, Diagnostic, FakeLspAdapter, LanguageConfig, LineEnding, OffsetRangeExt, - Point, ToPoint, + tree_sitter_rust, tree_sitter_typescript, Diagnostic, FakeLspAdapter, LanguageConfig, + LineEnding, OffsetRangeExt, Point, ToPoint, }; use lsp2::Url; use parking_lot::Mutex; use pretty_assertions::assert_eq; use serde_json::json; -use std::task::Poll; +use std::{os, task::Poll}; use unindent::Unindent as _; -use util::assert_set_eq; - -// #[gpui2::test] -// async fn test_symlinks(cx: &mut gpui2::TestAppContext) { -// init_test(cx); -// cx.executor().allow_parking(); - -// let dir = temp_tree(json!({ -// "root": { -// "apple": "", -// "banana": { -// "carrot": { -// "date": "", -// "endive": "", -// } -// }, -// "fennel": { -// "grape": "", -// } -// } -// })); - -// let root_link_path = dir.path().join("root_link"); -// unix::fs::symlink(&dir.path().join("root"), &root_link_path).unwrap(); -// unix::fs::symlink( -// &dir.path().join("root/fennel"), -// &dir.path().join("root/finnochio"), -// ) -// .unwrap(); - -// let project = Project::test(Arc::new(RealFs), [root_link_path.as_ref()], cx).await; -// project.update(cx, |project, cx| { -// let tree = project.worktrees().next().unwrap().read(cx); -// assert_eq!(tree.file_count(), 5); -// assert_eq!( -// tree.inode_for_path("fennel/grape"), -// tree.inode_for_path("finnochio/grape") -// ); -// }); -// } +use util::{assert_set_eq, test::temp_tree}; + +#[gpui2::test] +async fn test_symlinks(cx: &mut gpui2::TestAppContext) { + init_test(cx); + cx.executor().allow_parking(); + + let dir = temp_tree(json!({ + "root": { + "apple": "", + "banana": { + "carrot": { + "date": "", + "endive": "", + } + }, + "fennel": { + "grape": "", + } + } + })); + + dbg!("GOT HERE"); + + let root_link_path = dir.path().join("root_link"); + os::unix::fs::symlink(&dir.path().join("root"), &root_link_path).unwrap(); + os::unix::fs::symlink( + &dir.path().join("root/fennel"), + &dir.path().join("root/finnochio"), + ) + .unwrap(); + + dbg!("GOT HERE 2"); + + let project = Project::test(Arc::new(RealFs), [root_link_path.as_ref()], cx).await; + + dbg!("GOT HERE 2.5"); + project.update(cx, |project, cx| { + let tree = project.worktrees().next().unwrap().read(cx); + assert_eq!(tree.file_count(), 5); + assert_eq!( + tree.inode_for_path("fennel/grape"), + tree.inode_for_path("finnochio/grape") + ); + }); + + dbg!("GOT HERE 3"); +} #[gpui2::test] async fn test_managing_project_specific_settings(cx: &mut gpui2::TestAppContext) { @@ -2058,121 +2066,121 @@ async fn test_edits_from_lsp2_with_edits_on_adjacent_lines(cx: &mut gpui2::TestA }); } -// #[gpui2::test] -// async fn test_invalid_edits_from_lsp2(cx: &mut gpui2::TestAppContext) { -// init_test(cx); - -// let text = " -// use a::b; -// use a::c; - -// fn f() { -// b(); -// c(); -// } -// " -// .unindent(); - -// let fs = FakeFs::new(cx.executor().clone()); -// fs.insert_tree( -// "/dir", -// json!({ -// "a.rs": text.clone(), -// }), -// ) -// .await; - -// let project = Project::test(fs, ["/dir".as_ref()], cx).await; -// let buffer = project -// .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx)) -// .await -// .unwrap(); - -// // Simulate the language server sending us edits in a non-ordered fashion, -// // with ranges sometimes being inverted or pointing to invalid locations. -// let edits = project -// .update(cx, |project, cx| { -// project.edits_from_lsp( -// &buffer, -// [ -// lsp2::TextEdit { -// range: lsp2::Range::new( -// lsp2::Position::new(0, 9), -// lsp2::Position::new(0, 9), -// ), -// new_text: "\n\n".into(), -// }, -// lsp2::TextEdit { -// range: lsp2::Range::new( -// lsp2::Position::new(0, 8), -// lsp2::Position::new(0, 4), -// ), -// new_text: "a::{b, c}".into(), -// }, -// lsp2::TextEdit { -// range: lsp2::Range::new( -// lsp2::Position::new(1, 0), -// lsp2::Position::new(99, 0), -// ), -// new_text: "".into(), -// }, -// lsp2::TextEdit { -// range: lsp2::Range::new( -// lsp2::Position::new(0, 9), -// lsp2::Position::new(0, 9), -// ), -// new_text: " -// fn f() { -// b(); -// c(); -// }" -// .unindent(), -// }, -// ], -// LanguageServerId(0), -// None, -// cx, -// ) -// }) -// .await -// .unwrap(); - -// buffer.update(cx, |buffer, cx| { -// let edits = edits -// .into_iter() -// .map(|(range, text)| { -// ( -// range.start.to_point(buffer)..range.end.to_point(buffer), -// text, -// ) -// }) -// .collect::>(); - -// assert_eq!( -// edits, -// [ -// (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()), -// (Point::new(1, 0)..Point::new(2, 0), "".into()) -// ] -// ); - -// for (range, new_text) in edits { -// buffer.edit([(range, new_text)], None, cx); -// } -// assert_eq!( -// buffer.text(), -// " -// use a::{b, c}; - -// fn f() { -// b(); -// c(); -// } -// " -// .unindent() -// ); -// }); -// } +#[gpui2::test] +async fn test_invalid_edits_from_lsp2(cx: &mut gpui2::TestAppContext) { + init_test(cx); + + let text = " + use a::b; + use a::c; + + fn f() { + b(); + c(); + } + " + .unindent(); + + let fs = FakeFs::new(cx.executor().clone()); + fs.insert_tree( + "/dir", + json!({ + "a.rs": text.clone(), + }), + ) + .await; + + let project = Project::test(fs, ["/dir".as_ref()], cx).await; + let buffer = project + .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx)) + .await + .unwrap(); + + // Simulate the language server sending us edits in a non-ordered fashion, + // with ranges sometimes being inverted or pointing to invalid locations. + let edits = project + .update(cx, |project, cx| { + project.edits_from_lsp( + &buffer, + [ + lsp2::TextEdit { + range: lsp2::Range::new( + lsp2::Position::new(0, 9), + lsp2::Position::new(0, 9), + ), + new_text: "\n\n".into(), + }, + lsp2::TextEdit { + range: lsp2::Range::new( + lsp2::Position::new(0, 8), + lsp2::Position::new(0, 4), + ), + new_text: "a::{b, c}".into(), + }, + lsp2::TextEdit { + range: lsp2::Range::new( + lsp2::Position::new(1, 0), + lsp2::Position::new(99, 0), + ), + new_text: "".into(), + }, + lsp2::TextEdit { + range: lsp2::Range::new( + lsp2::Position::new(0, 9), + lsp2::Position::new(0, 9), + ), + new_text: " + fn f() { + b(); + c(); + }" + .unindent(), + }, + ], + LanguageServerId(0), + None, + cx, + ) + }) + .await + .unwrap(); + + buffer.update(cx, |buffer, cx| { + let edits = edits + .into_iter() + .map(|(range, text)| { + ( + range.start.to_point(buffer)..range.end.to_point(buffer), + text, + ) + }) + .collect::>(); + + assert_eq!( + edits, + [ + (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()), + (Point::new(1, 0)..Point::new(2, 0), "".into()) + ] + ); + + for (range, new_text) in edits { + buffer.edit([(range, new_text)], None, cx); + } + assert_eq!( + buffer.text(), + " + use a::{b, c}; + + fn f() { + b(); + c(); + } + " + .unindent() + ); + }); +} fn chunks_with_diagnostics( buffer: &Buffer, @@ -2292,168 +2300,168 @@ async fn test_definition(cx: &mut gpui2::TestAppContext) { } } -// #[gpui2::test] -// async fn test_completions_without_edit_ranges(cx: &mut gpui2::TestAppContext) { -// init_test(cx); - -// let mut language = Language::new( -// LanguageConfig { -// name: "TypeScript".into(), -// path_suffixes: vec!["ts".to_string()], -// ..Default::default() -// }, -// Some(tree_sitter_typescript::language_typescript()), -// ); -// let mut fake_language_servers = language -// .set_fake_lsp_adapter(Arc::new(FakeLspAdapter { -// capabilities: lsp2::ServerCapabilities { -// completion_provider: Some(lsp2::CompletionOptions { -// trigger_characters: Some(vec![":".to_string()]), -// ..Default::default() -// }), -// ..Default::default() -// }, -// ..Default::default() -// })) -// .await; - -// let fs = FakeFs::new(cx.executor().clone()); -// fs.insert_tree( -// "/dir", -// json!({ -// "a.ts": "", -// }), -// ) -// .await; - -// let project = Project::test(fs, ["/dir".as_ref()], cx).await; -// project.update(cx, |project, _| project.languages.add(Arc::new(language))); -// let buffer = project -// .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx)) -// .await -// .unwrap(); - -// let fake_server = fake_language_servers.next().await.unwrap(); - -// let text = "let a = b.fqn"; -// buffer.update(cx, |buffer, cx| buffer.set_text(text, cx)); -// let completions = project.update(cx, |project, cx| { -// project.completions(&buffer, text.len(), cx) -// }); - -// fake_server -// .handle_request::(|_, _| async move { -// Ok(Some(lsp2::CompletionResponse::Array(vec![ -// lsp2::CompletionItem { -// label: "fullyQualifiedName?".into(), -// insert_text: Some("fullyQualifiedName".into()), -// ..Default::default() -// }, -// ]))) -// }) -// .next() -// .await; -// let completions = completions.await.unwrap(); -// let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot()); -// assert_eq!(completions.len(), 1); -// assert_eq!(completions[0].new_text, "fullyQualifiedName"); -// assert_eq!( -// completions[0].old_range.to_offset(&snapshot), -// text.len() - 3..text.len() -// ); - -// let text = "let a = \"atoms/cmp\""; -// buffer.update(cx, |buffer, cx| buffer.set_text(text, cx)); -// let completions = project.update(cx, |project, cx| { -// project.completions(&buffer, text.len() - 1, cx) -// }); - -// fake_server -// .handle_request::(|_, _| async move { -// Ok(Some(lsp2::CompletionResponse::Array(vec![ -// lsp2::CompletionItem { -// label: "component".into(), -// ..Default::default() -// }, -// ]))) -// }) -// .next() -// .await; -// let completions = completions.await.unwrap(); -// let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot()); -// assert_eq!(completions.len(), 1); -// assert_eq!(completions[0].new_text, "component"); -// assert_eq!( -// completions[0].old_range.to_offset(&snapshot), -// text.len() - 4..text.len() - 1 -// ); -// } - -// #[gpui2::test] -// async fn test_completions_with_carriage_returns(cx: &mut gpui2::TestAppContext) { -// init_test(cx); - -// let mut language = Language::new( -// LanguageConfig { -// name: "TypeScript".into(), -// path_suffixes: vec!["ts".to_string()], -// ..Default::default() -// }, -// Some(tree_sitter_typescript::language_typescript()), -// ); -// let mut fake_language_servers = language -// .set_fake_lsp_adapter(Arc::new(FakeLspAdapter { -// capabilities: lsp2::ServerCapabilities { -// completion_provider: Some(lsp2::CompletionOptions { -// trigger_characters: Some(vec![":".to_string()]), -// ..Default::default() -// }), -// ..Default::default() -// }, -// ..Default::default() -// })) -// .await; - -// let fs = FakeFs::new(cx.executor().clone()); -// fs.insert_tree( -// "/dir", -// json!({ -// "a.ts": "", -// }), -// ) -// .await; - -// let project = Project::test(fs, ["/dir".as_ref()], cx).await; -// project.update(cx, |project, _| project.languages.add(Arc::new(language))); -// let buffer = project -// .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx)) -// .await -// .unwrap(); - -// let fake_server = fake_language_servers.next().await.unwrap(); - -// let text = "let a = b.fqn"; -// buffer.update(cx, |buffer, cx| buffer.set_text(text, cx)); -// let completions = project.update(cx, |project, cx| { -// project.completions(&buffer, text.len(), cx) -// }); - -// fake_server -// .handle_request::(|_, _| async move { -// Ok(Some(lsp2::CompletionResponse::Array(vec![ -// lsp2::CompletionItem { -// label: "fullyQualifiedName?".into(), -// insert_text: Some("fully\rQualified\r\nName".into()), -// ..Default::default() -// }, -// ]))) -// }) -// .next() -// .await; -// let completions = completions.await.unwrap(); -// assert_eq!(completions.len(), 1); -// assert_eq!(completions[0].new_text, "fully\nQualified\nName"); -// } +#[gpui2::test] +async fn test_completions_without_edit_ranges(cx: &mut gpui2::TestAppContext) { + init_test(cx); + + let mut language = Language::new( + LanguageConfig { + name: "TypeScript".into(), + path_suffixes: vec!["ts".to_string()], + ..Default::default() + }, + Some(tree_sitter_typescript::language_typescript()), + ); + let mut fake_language_servers = language + .set_fake_lsp_adapter(Arc::new(FakeLspAdapter { + capabilities: lsp2::ServerCapabilities { + completion_provider: Some(lsp2::CompletionOptions { + trigger_characters: Some(vec![":".to_string()]), + ..Default::default() + }), + ..Default::default() + }, + ..Default::default() + })) + .await; + + let fs = FakeFs::new(cx.executor().clone()); + fs.insert_tree( + "/dir", + json!({ + "a.ts": "", + }), + ) + .await; + + let project = Project::test(fs, ["/dir".as_ref()], cx).await; + project.update(cx, |project, _| project.languages.add(Arc::new(language))); + let buffer = project + .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx)) + .await + .unwrap(); + + let fake_server = fake_language_servers.next().await.unwrap(); + + let text = "let a = b.fqn"; + buffer.update(cx, |buffer, cx| buffer.set_text(text, cx)); + let completions = project.update(cx, |project, cx| { + project.completions(&buffer, text.len(), cx) + }); + + fake_server + .handle_request::(|_, _| async move { + Ok(Some(lsp2::CompletionResponse::Array(vec![ + lsp2::CompletionItem { + label: "fullyQualifiedName?".into(), + insert_text: Some("fullyQualifiedName".into()), + ..Default::default() + }, + ]))) + }) + .next() + .await; + let completions = completions.await.unwrap(); + let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot()); + assert_eq!(completions.len(), 1); + assert_eq!(completions[0].new_text, "fullyQualifiedName"); + assert_eq!( + completions[0].old_range.to_offset(&snapshot), + text.len() - 3..text.len() + ); + + let text = "let a = \"atoms/cmp\""; + buffer.update(cx, |buffer, cx| buffer.set_text(text, cx)); + let completions = project.update(cx, |project, cx| { + project.completions(&buffer, text.len() - 1, cx) + }); + + fake_server + .handle_request::(|_, _| async move { + Ok(Some(lsp2::CompletionResponse::Array(vec![ + lsp2::CompletionItem { + label: "component".into(), + ..Default::default() + }, + ]))) + }) + .next() + .await; + let completions = completions.await.unwrap(); + let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot()); + assert_eq!(completions.len(), 1); + assert_eq!(completions[0].new_text, "component"); + assert_eq!( + completions[0].old_range.to_offset(&snapshot), + text.len() - 4..text.len() - 1 + ); +} + +#[gpui2::test] +async fn test_completions_with_carriage_returns(cx: &mut gpui2::TestAppContext) { + init_test(cx); + + let mut language = Language::new( + LanguageConfig { + name: "TypeScript".into(), + path_suffixes: vec!["ts".to_string()], + ..Default::default() + }, + Some(tree_sitter_typescript::language_typescript()), + ); + let mut fake_language_servers = language + .set_fake_lsp_adapter(Arc::new(FakeLspAdapter { + capabilities: lsp2::ServerCapabilities { + completion_provider: Some(lsp2::CompletionOptions { + trigger_characters: Some(vec![":".to_string()]), + ..Default::default() + }), + ..Default::default() + }, + ..Default::default() + })) + .await; + + let fs = FakeFs::new(cx.executor().clone()); + fs.insert_tree( + "/dir", + json!({ + "a.ts": "", + }), + ) + .await; + + let project = Project::test(fs, ["/dir".as_ref()], cx).await; + project.update(cx, |project, _| project.languages.add(Arc::new(language))); + let buffer = project + .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx)) + .await + .unwrap(); + + let fake_server = fake_language_servers.next().await.unwrap(); + + let text = "let a = b.fqn"; + buffer.update(cx, |buffer, cx| buffer.set_text(text, cx)); + let completions = project.update(cx, |project, cx| { + project.completions(&buffer, text.len(), cx) + }); + + fake_server + .handle_request::(|_, _| async move { + Ok(Some(lsp2::CompletionResponse::Array(vec![ + lsp2::CompletionItem { + label: "fullyQualifiedName?".into(), + insert_text: Some("fully\rQualified\r\nName".into()), + ..Default::default() + }, + ]))) + }) + .next() + .await; + let completions = completions.await.unwrap(); + assert_eq!(completions.len(), 1); + assert_eq!(completions[0].new_text, "fully\nQualified\nName"); +} #[gpui2::test(iterations = 10)] async fn test_apply_code_actions_with_commands(cx: &mut gpui2::TestAppContext) { @@ -2636,212 +2644,213 @@ async fn test_save_in_single_file_worktree(cx: &mut gpui2::TestAppContext) { assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text())); } -// #[gpui2::test] -// async fn test_save_as(cx: &mut gpui2::TestAppContext) { -// init_test(cx); - -// let fs = FakeFs::new(cx.executor().clone()); -// fs.insert_tree("/dir", json!({})).await; - -// let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await; - -// let languages = project.update(cx, |project, _| project.languages().clone()); -// languages.register( -// "/some/path", -// LanguageConfig { -// name: "Rust".into(), -// path_suffixes: vec!["rs".into()], -// ..Default::default() -// }, -// tree_sitter_rust::language(), -// vec![], -// |_| Default::default(), -// ); - -// let buffer = project.update(cx, |project, cx| { -// project.create_buffer("", None, cx).unwrap() -// }); -// buffer.update(cx, |buffer, cx| { -// buffer.edit([(0..0, "abc")], None, cx); -// assert!(buffer.is_dirty()); -// assert!(!buffer.has_conflict()); -// assert_eq!(buffer.language().unwrap().name().as_ref(), "Plain Text"); -// }); -// project -// .update(cx, |project, cx| { -// project.save_buffer_as(buffer.clone(), "/dir/file1.rs".into(), cx) -// }) -// .await -// .unwrap(); -// assert_eq!(fs.load(Path::new("/dir/file1.rs")).await.unwrap(), "abc"); - -// cx.executor().run_until_parked(); -// buffer.update(cx, |buffer, cx| { -// assert_eq!( -// buffer.file().unwrap().full_path(cx), -// Path::new("dir/file1.rs") -// ); -// assert!(!buffer.is_dirty()); -// assert!(!buffer.has_conflict()); -// assert_eq!(buffer.language().unwrap().name().as_ref(), "Rust"); -// }); - -// let opened_buffer = project -// .update(cx, |project, cx| { -// project.open_local_buffer("/dir/file1.rs", cx) -// }) -// .await -// .unwrap(); -// assert_eq!(opened_buffer, buffer); -// } +#[gpui2::test] +async fn test_save_as(cx: &mut gpui2::TestAppContext) { + init_test(cx); + + let fs = FakeFs::new(cx.executor().clone()); + fs.insert_tree("/dir", json!({})).await; + + let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await; + + let languages = project.update(cx, |project, _| project.languages().clone()); + languages.register( + "/some/path", + LanguageConfig { + name: "Rust".into(), + path_suffixes: vec!["rs".into()], + ..Default::default() + }, + tree_sitter_rust::language(), + vec![], + |_| Default::default(), + ); + + let buffer = project.update(cx, |project, cx| { + project.create_buffer("", None, cx).unwrap() + }); + buffer.update(cx, |buffer, cx| { + buffer.edit([(0..0, "abc")], None, cx); + assert!(buffer.is_dirty()); + assert!(!buffer.has_conflict()); + assert_eq!(buffer.language().unwrap().name().as_ref(), "Plain Text"); + }); + project + .update(cx, |project, cx| { + project.save_buffer_as(buffer.clone(), "/dir/file1.rs".into(), cx) + }) + .await + .unwrap(); + assert_eq!(fs.load(Path::new("/dir/file1.rs")).await.unwrap(), "abc"); + + cx.executor().run_until_parked(); + buffer.update(cx, |buffer, cx| { + assert_eq!( + buffer.file().unwrap().full_path(cx), + Path::new("dir/file1.rs") + ); + assert!(!buffer.is_dirty()); + assert!(!buffer.has_conflict()); + assert_eq!(buffer.language().unwrap().name().as_ref(), "Rust"); + }); + + let opened_buffer = project + .update(cx, |project, cx| { + project.open_local_buffer("/dir/file1.rs", cx) + }) + .await + .unwrap(); + assert_eq!(opened_buffer, buffer); +} #[gpui2::test(retries = 5)] -// async fn test_rescan_and_remote_updates(cx: &mut gpui2::TestAppContext) { -// init_test(cx); -// cx.executor().allow_parking(); - -// let dir = temp_tree(json!({ -// "a": { -// "file1": "", -// "file2": "", -// "file3": "", -// }, -// "b": { -// "c": { -// "file4": "", -// "file5": "", -// } -// } -// })); - -// let project = Project::test(Arc::new(RealFs), [dir.path()], cx).await; -// let rpc = project.update(cx, |p, _| p.client.clone()); - -// let buffer_for_path = |path: &'static str, cx: &mut gpui2::TestAppContext| { -// let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx)); -// async move { buffer.await.unwrap() } -// }; -// let id_for_path = |path: &'static str, cx: &mut gpui2::TestAppContext| { -// project.update(cx, |project, cx| { -// let tree = project.worktrees().next().unwrap(); -// tree.read(cx) -// .entry_for_path(path) -// .unwrap_or_else(|| panic!("no entry for path {}", path)) -// .id -// }) -// }; - -// let buffer2 = buffer_for_path("a/file2", cx).await; -// let buffer3 = buffer_for_path("a/file3", cx).await; -// let buffer4 = buffer_for_path("b/c/file4", cx).await; -// let buffer5 = buffer_for_path("b/c/file5", cx).await; - -// let file2_id = id_for_path("a/file2", cx); -// let file3_id = id_for_path("a/file3", cx); -// let file4_id = id_for_path("b/c/file4", cx); - -// // Create a remote copy of this worktree. -// let tree = project.update(cx, |project, _| project.worktrees().next().unwrap()); - -// let metadata = tree.update(cx, |tree, _| tree.as_local().unwrap().metadata_proto()); - -// let updates = Arc::new(Mutex::new(Vec::new())); -// tree.update(cx, |tree, cx| { -// let _ = tree.as_local_mut().unwrap().observe_updates(0, cx, { -// let updates = updates.clone(); -// move |update| { -// updates.lock().push(update); -// async { true } -// } -// }); -// }); - -// let remote = cx.update(|cx| Worktree::remote(1, 1, metadata, rpc.clone(), cx)); -// cx.executor().run_until_parked(); - -// cx.update(|cx| { -// assert!(!buffer2.read(cx).is_dirty()); -// assert!(!buffer3.read(cx).is_dirty()); -// assert!(!buffer4.read(cx).is_dirty()); -// assert!(!buffer5.read(cx).is_dirty()); -// }); - -// // Rename and delete files and directories. -// tree.flush_fs_events(cx).await; -// std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap(); -// std::fs::remove_file(dir.path().join("b/c/file5")).unwrap(); -// std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap(); -// std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap(); -// tree.flush_fs_events(cx).await; - -// let expected_paths = vec![ -// "a", -// "a/file1", -// "a/file2.new", -// "b", -// "d", -// "d/file3", -// "d/file4", -// ]; - -// cx.update(|app| { -// assert_eq!( -// tree.read(app) -// .paths() -// .map(|p| p.to_str().unwrap()) -// .collect::>(), -// expected_paths -// ); -// }); - -// assert_eq!(id_for_path("a/file2.new", cx), file2_id); -// assert_eq!(id_for_path("d/file3", cx), file3_id); -// assert_eq!(id_for_path("d/file4", cx), file4_id); - -// cx.update(|cx| { -// assert_eq!( -// buffer2.read(cx).file().unwrap().path().as_ref(), -// Path::new("a/file2.new") -// ); -// assert_eq!( -// buffer3.read(cx).file().unwrap().path().as_ref(), -// Path::new("d/file3") -// ); -// assert_eq!( -// buffer4.read(cx).file().unwrap().path().as_ref(), -// Path::new("d/file4") -// ); -// assert_eq!( -// buffer5.read(cx).file().unwrap().path().as_ref(), -// Path::new("b/c/file5") -// ); - -// assert!(!buffer2.read(cx).file().unwrap().is_deleted()); -// assert!(!buffer3.read(cx).file().unwrap().is_deleted()); -// assert!(!buffer4.read(cx).file().unwrap().is_deleted()); -// assert!(buffer5.read(cx).file().unwrap().is_deleted()); -// }); - -// // Update the remote worktree. Check that it becomes consistent with the -// // local worktree. -// cx.executor().run_until_parked(); - -// remote.update(cx, |remote, _| { -// for update in updates.lock().drain(..) { -// remote.as_remote_mut().unwrap().update_from_remote(update); -// } -// }); -// cx.executor().run_until_parked(); -// remote.update(cx, |remote, _| { -// assert_eq!( -// remote -// .paths() -// .map(|p| p.to_str().unwrap()) -// .collect::>(), -// expected_paths -// ); -// }); -// } +async fn test_rescan_and_remote_updates(cx: &mut gpui2::TestAppContext) { + init_test(cx); + // cx.executor().allow_parking(); + + let dir = temp_tree(json!({ + "a": { + "file1": "", + "file2": "", + "file3": "", + }, + "b": { + "c": { + "file4": "", + "file5": "", + } + } + })); + + let project = Project::test(Arc::new(RealFs), [dir.path()], cx).await; + let rpc = project.update(cx, |p, _| p.client.clone()); + + let buffer_for_path = |path: &'static str, cx: &mut gpui2::TestAppContext| { + let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx)); + async move { buffer.await.unwrap() } + }; + let id_for_path = |path: &'static str, cx: &mut gpui2::TestAppContext| { + project.update(cx, |project, cx| { + let tree = project.worktrees().next().unwrap(); + tree.read(cx) + .entry_for_path(path) + .unwrap_or_else(|| panic!("no entry for path {}", path)) + .id + }) + }; + + let buffer2 = buffer_for_path("a/file2", cx).await; + let buffer3 = buffer_for_path("a/file3", cx).await; + let buffer4 = buffer_for_path("b/c/file4", cx).await; + let buffer5 = buffer_for_path("b/c/file5", cx).await; + + let file2_id = id_for_path("a/file2", cx); + let file3_id = id_for_path("a/file3", cx); + let file4_id = id_for_path("b/c/file4", cx); + + // Create a remote copy of this worktree. + let tree = project.update(cx, |project, _| project.worktrees().next().unwrap()); + + let metadata = tree.update(cx, |tree, _| tree.as_local().unwrap().metadata_proto()); + + let updates = Arc::new(Mutex::new(Vec::new())); + tree.update(cx, |tree, cx| { + let _ = tree.as_local_mut().unwrap().observe_updates(0, cx, { + let updates = updates.clone(); + move |update| { + updates.lock().push(update); + async { true } + } + }); + }); + + let remote = cx.update(|cx| Worktree::remote(1, 1, metadata, rpc.clone(), cx)); + cx.executor().run_until_parked(); + + cx.update(|cx| { + assert!(!buffer2.read(cx).is_dirty()); + assert!(!buffer3.read(cx).is_dirty()); + assert!(!buffer4.read(cx).is_dirty()); + assert!(!buffer5.read(cx).is_dirty()); + }); + + // Rename and delete files and directories. + tree.flush_fs_events(cx).await; + std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap(); + std::fs::remove_file(dir.path().join("b/c/file5")).unwrap(); + std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap(); + std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap(); + tree.flush_fs_events(cx).await; + + let expected_paths = vec![ + "a", + "a/file1", + "a/file2.new", + "b", + "d", + "d/file3", + "d/file4", + ]; + + cx.update(|app| { + assert_eq!( + tree.read(app) + .paths() + .map(|p| p.to_str().unwrap()) + .collect::>(), + expected_paths + ); + }); + + assert_eq!(id_for_path("a/file2.new", cx), file2_id); + assert_eq!(id_for_path("d/file3", cx), file3_id); + assert_eq!(id_for_path("d/file4", cx), file4_id); + + cx.update(|cx| { + assert_eq!( + buffer2.read(cx).file().unwrap().path().as_ref(), + Path::new("a/file2.new") + ); + assert_eq!( + buffer3.read(cx).file().unwrap().path().as_ref(), + Path::new("d/file3") + ); + assert_eq!( + buffer4.read(cx).file().unwrap().path().as_ref(), + Path::new("d/file4") + ); + assert_eq!( + buffer5.read(cx).file().unwrap().path().as_ref(), + Path::new("b/c/file5") + ); + + assert!(!buffer2.read(cx).file().unwrap().is_deleted()); + assert!(!buffer3.read(cx).file().unwrap().is_deleted()); + assert!(!buffer4.read(cx).file().unwrap().is_deleted()); + assert!(buffer5.read(cx).file().unwrap().is_deleted()); + }); + + // Update the remote worktree. Check that it becomes consistent with the + // local worktree. + cx.executor().run_until_parked(); + + remote.update(cx, |remote, _| { + for update in updates.lock().drain(..) { + remote.as_remote_mut().unwrap().update_from_remote(update); + } + }); + cx.executor().run_until_parked(); + remote.update(cx, |remote, _| { + assert_eq!( + remote + .paths() + .map(|p| p.to_str().unwrap()) + .collect::>(), + expected_paths + ); + }); +} + #[gpui2::test(iterations = 10)] async fn test_buffer_identity_across_renames(cx: &mut gpui2::TestAppContext) { init_test(cx); diff --git a/crates/project2/src/worktree.rs b/crates/project2/src/worktree.rs index 2718b5d8f0bc85bc9b619f90c8e49e4eccc7cd24..c15977c5e0145d73170509ad5fa6c2a77bafa82a 100644 --- a/crates/project2/src/worktree.rs +++ b/crates/project2/src/worktree.rs @@ -297,11 +297,15 @@ impl Worktree { // After determining whether the root entry is a file or a directory, populate the // snapshot's "root name", which will be used for the purpose of fuzzy matching. let abs_path = path.into(); + eprintln!("get root metadata"); + let metadata = fs .metadata(&abs_path) .await .context("failed to stat worktree path")?; + eprintln!("got root metadata"); + cx.build_model(move |cx: &mut ModelContext| { let root_name = abs_path .file_name() diff --git a/crates/rpc2/src/peer.rs b/crates/rpc2/src/peer.rs index 367eba2b4e198a04e630c4990d71e694bd5b099e..104ab1b4211271dbc982184a01461a29d8874a75 100644 --- a/crates/rpc2/src/peer.rs +++ b/crates/rpc2/src/peer.rs @@ -559,7 +559,6 @@ mod tests { use async_tungstenite::tungstenite::Message as WebSocketMessage; use gpui2::TestAppContext; - #[ctor::ctor] fn init_logger() { if std::env::var("RUST_LOG").is_ok() { env_logger::init(); @@ -568,6 +567,8 @@ mod tests { #[gpui2::test(iterations = 50)] async fn test_request_response(cx: &mut TestAppContext) { + init_logger(); + let executor = cx.executor(); // create 2 clients connected to 1 server