Cargo.lock 🔗
@@ -8771,6 +8771,7 @@ dependencies = [
"settings",
"smol",
"toml 0.8.10",
+ "util",
"worktree",
]
Max Brunsfeld and Conrad created
Release Notes:
- N/A
---------
Co-authored-by: Conrad <conrad@zed.dev>
Cargo.lock | 1
crates/assistant/src/assistant_panel.rs | 2
crates/assistant/src/slash_command/diagnostics_command.rs | 2
crates/assistant/src/slash_command/docs_command.rs | 2
crates/assistant/src/slash_command/file_command.rs | 2
crates/assistant/src/slash_command/project_command.rs | 2
crates/assistant_tooling/src/project_context.rs | 2
crates/call/src/room.rs | 2
crates/collab/src/tests/channel_guest_tests.rs | 2
crates/collab/src/tests/editor_tests.rs | 8
crates/collab/src/tests/integration_tests.rs | 29
crates/collab/src/tests/random_project_collaboration_tests.rs | 10
crates/editor/src/editor_tests.rs | 6
crates/editor/src/inlay_hint_cache.rs | 4
crates/file_finder/src/file_finder_tests.rs | 2
crates/file_finder/src/new_path_prompt.rs | 6
crates/git/src/repository.rs | 7
crates/go_to_line/src/go_to_line.rs | 2
crates/language_tools/src/lsp_log.rs | 2
crates/language_tools/src/lsp_log_tests.rs | 2
crates/outline/src/outline.rs | 2
crates/project/src/buffer_store.rs | 369 ++
crates/project/src/connection_manager.rs | 2
crates/project/src/debounced_delay.rs | 19
crates/project/src/project.rs | 640 +---
crates/project/src/project_tests.rs | 36
crates/project/src/worktree_store.rs | 311 ++
crates/project_panel/src/project_panel.rs | 8
crates/remote/src/ssh_session.rs | 9
crates/remote_server/Cargo.toml | 1
crates/remote_server/src/headless_project.rs | 107
crates/remote_server/src/main.rs | 1
crates/remote_server/src/remote_editing_tests.rs | 45
crates/search/src/project_search.rs | 6
crates/tab_switcher/src/tab_switcher_tests.rs | 2
crates/tasks_ui/src/lib.rs | 2
crates/workspace/src/pane.rs | 2
crates/workspace/src/workspace.rs | 14
crates/worktree/src/worktree.rs | 30
39 files changed, 999 insertions(+), 702 deletions(-)
@@ -8771,6 +8771,7 @@ dependencies = [
"settings",
"smol",
"toml 0.8.10",
+ "util",
"worktree",
]
@@ -2867,7 +2867,7 @@ fn make_lsp_adapter_delegate(
project.update(cx, |project, cx| {
// TODO: Find the right worktree.
let worktree = project
- .worktrees()
+ .worktrees(cx)
.next()
.ok_or_else(|| anyhow!("no worktrees when constructing ProjectLspAdapterDelegate"))?;
Ok(ProjectLspAdapterDelegate::new(project, &worktree, cx) as Arc<dyn LspAdapterDelegate>)
@@ -284,7 +284,7 @@ fn collect_diagnostics(
PathBuf::try_from(path)
.ok()
.and_then(|path| {
- project.read(cx).worktrees().find_map(|worktree| {
+ project.read(cx).worktrees(cx).find_map(|worktree| {
let worktree = worktree.read(cx);
let worktree_root_path = Path::new(worktree.root_name());
let relative_path = path.strip_prefix(worktree_root_path).ok()?;
@@ -24,7 +24,7 @@ impl DocsSlashCommand {
pub const NAME: &'static str = "docs";
fn path_to_cargo_toml(project: Model<Project>, cx: &mut AppContext) -> Option<Arc<Path>> {
- let worktree = project.read(cx).worktrees().next()?;
+ let worktree = project.read(cx).worktrees(cx).next()?;
let worktree = worktree.read(cx);
let entry = worktree.entry_for_path("Cargo.toml")?;
let path = ProjectPath {
@@ -188,7 +188,7 @@ fn collect_files(
let project_handle = project.downgrade();
let snapshots = project
.read(cx)
- .worktrees()
+ .worktrees(cx)
.map(|worktree| worktree.read(cx).snapshot())
.collect::<Vec<_>>();
cx.spawn(|mut cx| async move {
@@ -75,7 +75,7 @@ impl ProjectSlashCommand {
}
fn path_to_cargo_toml(project: Model<Project>, cx: &mut AppContext) -> Option<Arc<Path>> {
- let worktree = project.read(cx).worktrees().next()?;
+ let worktree = project.read(cx).worktrees(cx).next()?;
let worktree = worktree.read(cx);
let entry = worktree.entry_for_path("Cargo.toml")?;
let path = ProjectPath {
@@ -222,7 +222,7 @@ mod tests {
let worktree_ids = project.read_with(cx, |project, cx| {
project
- .worktrees()
+ .worktrees(cx)
.map(|worktree| worktree.read(cx).id())
.collect::<Vec<_>>()
});
@@ -526,7 +526,7 @@ impl Room {
rejoined_projects.push(proto::RejoinProject {
id: project_id,
worktrees: project
- .worktrees()
+ .worktrees(cx)
.map(|worktree| {
let worktree = worktree.read(cx);
proto::RejoinWorktree {
@@ -52,7 +52,7 @@ async fn test_channel_guests(
assert!(project_b.read_with(cx_b, |project, _| project.is_read_only()));
assert!(project_b
.update(cx_b, |project, cx| {
- let worktree_id = project.worktrees().next().unwrap().read(cx).id();
+ let worktree_id = project.worktrees(cx).next().unwrap().read(cx).id();
project.create_entry((worktree_id, "b.txt"), false, cx)
})
.await
@@ -76,7 +76,7 @@ async fn test_host_disconnect(
let active_call_a = cx_a.read(ActiveCall::global);
let (project_a, worktree_id) = client_a.build_local_project("/a", cx_a).await;
- let worktree_a = project_a.read_with(cx_a, |project, _| project.worktrees().next().unwrap());
+ let worktree_a = project_a.read_with(cx_a, |project, cx| project.worktrees(cx).next().unwrap());
let project_id = active_call_a
.update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
.await
@@ -1144,7 +1144,7 @@ async fn test_share_project(
});
project_b.read_with(cx_b, |project, cx| {
- let worktree = project.worktrees().next().unwrap().read(cx);
+ let worktree = project.worktrees(cx).next().unwrap().read(cx);
assert_eq!(
worktree.paths().map(AsRef::as_ref).collect::<Vec<_>>(),
[
@@ -1158,7 +1158,7 @@ async fn test_share_project(
project_b
.update(cx_b, |project, cx| {
- let worktree = project.worktrees().next().unwrap();
+ let worktree = project.worktrees(cx).next().unwrap();
let entry = worktree.read(cx).entry_for_path("ignored-dir").unwrap();
project.expand_entry(worktree_id, entry.id, cx).unwrap()
})
@@ -1166,7 +1166,7 @@ async fn test_share_project(
.unwrap();
project_b.read_with(cx_b, |project, cx| {
- let worktree = project.worktrees().next().unwrap().read(cx);
+ let worktree = project.worktrees(cx).next().unwrap().read(cx);
assert_eq!(
worktree.paths().map(AsRef::as_ref).collect::<Vec<_>>(),
[
@@ -1377,7 +1377,7 @@ async fn test_unshare_project(
.await
.unwrap();
- let worktree_a = project_a.read_with(cx_a, |project, _| project.worktrees().next().unwrap());
+ let worktree_a = project_a.read_with(cx_a, |project, cx| project.worktrees(cx).next().unwrap());
let project_b = client_b.build_dev_server_project(project_id, cx_b).await;
executor.run_until_parked();
@@ -1505,7 +1505,8 @@ async fn test_project_reconnect(
let (project_a1, _) = client_a.build_local_project("/root-1/dir1", cx_a).await;
let (project_a2, _) = client_a.build_local_project("/root-2", cx_a).await;
let (project_a3, _) = client_a.build_local_project("/root-3", cx_a).await;
- let worktree_a1 = project_a1.read_with(cx_a, |project, _| project.worktrees().next().unwrap());
+ let worktree_a1 =
+ project_a1.read_with(cx_a, |project, cx| project.worktrees(cx).next().unwrap());
let project1_id = active_call_a
.update(cx_a, |call, cx| call.share_project(project_a1.clone(), cx))
.await
@@ -2308,7 +2309,7 @@ async fn test_propagate_saves_and_fs_changes(
.await;
let (project_a, worktree_id) = client_a.build_local_project("/a", cx_a).await;
- let worktree_a = project_a.read_with(cx_a, |p, _| p.worktrees().next().unwrap());
+ let worktree_a = project_a.read_with(cx_a, |p, cx| p.worktrees(cx).next().unwrap());
let project_id = active_call_a
.update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
.await
@@ -2318,9 +2319,9 @@ async fn test_propagate_saves_and_fs_changes(
let project_b = client_b.build_dev_server_project(project_id, cx_b).await;
let project_c = client_c.build_dev_server_project(project_id, cx_c).await;
- let worktree_b = project_b.read_with(cx_b, |p, _| p.worktrees().next().unwrap());
+ let worktree_b = project_b.read_with(cx_b, |p, cx| p.worktrees(cx).next().unwrap());
- let worktree_c = project_c.read_with(cx_c, |p, _| p.worktrees().next().unwrap());
+ let worktree_c = project_c.read_with(cx_c, |p, cx| p.worktrees(cx).next().unwrap());
// Open and edit a buffer as both guests B and C.
let buffer_b = project_b
@@ -3022,8 +3023,8 @@ async fn test_fs_operations(
.unwrap();
let project_b = client_b.build_dev_server_project(project_id, cx_b).await;
- let worktree_a = project_a.read_with(cx_a, |project, _| project.worktrees().next().unwrap());
- let worktree_b = project_b.read_with(cx_b, |project, _| project.worktrees().next().unwrap());
+ let worktree_a = project_a.read_with(cx_a, |project, cx| project.worktrees(cx).next().unwrap());
+ let worktree_b = project_b.read_with(cx_b, |project, cx| project.worktrees(cx).next().unwrap());
let entry = project_b
.update(cx_b, |project, cx| {
@@ -3323,7 +3324,7 @@ async fn test_local_settings(
// As client B, join that project and observe the local settings.
let project_b = client_b.build_dev_server_project(project_id, cx_b).await;
- let worktree_b = project_b.read_with(cx_b, |project, _| project.worktrees().next().unwrap());
+ let worktree_b = project_b.read_with(cx_b, |project, cx| project.worktrees(cx).next().unwrap());
executor.run_until_parked();
cx_b.read(|cx| {
let store = cx.global::<SettingsStore>();
@@ -3735,7 +3736,7 @@ async fn test_leaving_project(
// Client B opens a buffer.
let buffer_b1 = project_b1
.update(cx_b, |project, cx| {
- let worktree_id = project.worktrees().next().unwrap().read(cx).id();
+ let worktree_id = project.worktrees(cx).next().unwrap().read(cx).id();
project.open_buffer((worktree_id, "a.txt"), cx)
})
.await
@@ -3773,7 +3774,7 @@ async fn test_leaving_project(
let buffer_b2 = project_b2
.update(cx_b, |project, cx| {
- let worktree_id = project.worktrees().next().unwrap().read(cx).id();
+ let worktree_id = project.worktrees(cx).next().unwrap().read(cx).id();
project.open_buffer((worktree_id, "a.txt"), cx)
})
.await
@@ -4627,7 +4628,7 @@ async fn test_definition(
.unwrap();
cx_b.read(|cx| {
assert_eq!(definitions_1.len(), 1);
- assert_eq!(project_b.read(cx).worktrees().count(), 2);
+ assert_eq!(project_b.read(cx).worktrees(cx).count(), 2);
let target_buffer = definitions_1[0].target.buffer.read(cx);
assert_eq!(
target_buffer.text(),
@@ -4656,7 +4657,7 @@ async fn test_definition(
.unwrap();
cx_b.read(|cx| {
assert_eq!(definitions_2.len(), 1);
- assert_eq!(project_b.read(cx).worktrees().count(), 2);
+ assert_eq!(project_b.read(cx).worktrees(cx).count(), 2);
let target_buffer = definitions_2[0].target.buffer.read(cx);
assert_eq!(
target_buffer.text(),
@@ -4814,7 +4815,7 @@ async fn test_references(
assert!(status.pending_work.is_empty());
assert_eq!(references.len(), 3);
- assert_eq!(project.worktrees().count(), 2);
+ assert_eq!(project.worktrees(cx).count(), 2);
let two_buffer = references[0].buffer.read(cx);
let three_buffer = references[2].buffer.read(cx);
@@ -6199,7 +6200,7 @@ async fn test_preview_tabs(cx: &mut TestAppContext) {
let project = workspace.update(cx, |workspace, _| workspace.project().clone());
let worktree_id = project.update(cx, |project, cx| {
- project.worktrees().next().unwrap().read(cx).id()
+ project.worktrees(cx).next().unwrap().read(cx).id()
});
let path_1 = ProjectPath {
@@ -301,7 +301,7 @@ impl RandomizedTest for ProjectCollaborationTest {
let is_local = project.read_with(cx, |project, _| project.is_local());
let worktree = project.read_with(cx, |project, cx| {
project
- .worktrees()
+ .worktrees(cx)
.filter(|worktree| {
let worktree = worktree.read(cx);
worktree.is_visible()
@@ -423,7 +423,7 @@ impl RandomizedTest for ProjectCollaborationTest {
81.. => {
let worktree = project.read_with(cx, |project, cx| {
project
- .worktrees()
+ .worktrees(cx)
.filter(|worktree| {
let worktree = worktree.read(cx);
worktree.is_visible()
@@ -1172,7 +1172,7 @@ impl RandomizedTest for ProjectCollaborationTest {
let host_worktree_snapshots =
host_project.read_with(host_cx, |host_project, cx| {
host_project
- .worktrees()
+ .worktrees(cx)
.map(|worktree| {
let worktree = worktree.read(cx);
(worktree.id(), worktree.snapshot())
@@ -1180,7 +1180,7 @@ impl RandomizedTest for ProjectCollaborationTest {
.collect::<BTreeMap<_, _>>()
});
let guest_worktree_snapshots = guest_project
- .worktrees()
+ .worktrees(cx)
.map(|worktree| {
let worktree = worktree.read(cx);
(worktree.id(), worktree.snapshot())
@@ -1538,7 +1538,7 @@ fn project_path_for_full_path(
let root_name = components.next().unwrap().as_os_str().to_str().unwrap();
let path = components.as_path().into();
let worktree_id = project.read_with(cx, |project, cx| {
- project.worktrees().find_map(|worktree| {
+ project.worktrees(cx).find_map(|worktree| {
let worktree = worktree.read(cx);
if worktree.root_name() == root_name {
Some(worktree.id())
@@ -6253,8 +6253,8 @@ async fn test_multibuffer_format_during_save(cx: &mut gpui::TestAppContext) {
},
);
- let worktree = project.update(cx, |project, _| {
- let mut worktrees = project.worktrees().collect::<Vec<_>>();
+ let worktree = project.update(cx, |project, cx| {
+ let mut worktrees = project.worktrees(cx).collect::<Vec<_>>();
assert_eq!(worktrees.len(), 1);
worktrees.pop().unwrap()
});
@@ -9319,7 +9319,7 @@ async fn test_on_type_formatting_not_triggered(cx: &mut gpui::TestAppContext) {
let worktree_id = workspace
.update(cx, |workspace, cx| {
workspace.project().update(cx, |project, cx| {
- project.worktrees().next().unwrap().read(cx).id()
+ project.worktrees(cx).next().unwrap().read(cx).id()
})
})
.unwrap();
@@ -2581,7 +2581,7 @@ pub mod tests {
);
let worktree_id = project.update(cx, |project, cx| {
- project.worktrees().next().unwrap().read(cx).id()
+ project.worktrees(cx).next().unwrap().read(cx).id()
});
let buffer_1 = project
@@ -2931,7 +2931,7 @@ pub mod tests {
);
let worktree_id = project.update(cx, |project, cx| {
- project.worktrees().next().unwrap().read(cx).id()
+ project.worktrees(cx).next().unwrap().read(cx).id()
});
let buffer_1 = project
@@ -1496,7 +1496,7 @@ async fn test_search_results_refreshed_on_adding_and_removing_worktrees(
let project = Project::test(app_state.fs.clone(), ["/test/project_1".as_ref()], cx).await;
let (workspace, cx) = cx.add_window_view(|cx| Workspace::test_new(project.clone(), cx));
let worktree_1_id = project.update(cx, |project, cx| {
- let worktree = project.worktrees().last().expect("worktree not found");
+ let worktree = project.worktrees(cx).last().expect("worktree not found");
worktree.read(cx).id()
});
@@ -32,7 +32,7 @@ impl Match {
path_match.path.join(suffix),
)
} else {
- (project.worktrees().next(), PathBuf::from(suffix))
+ (project.worktrees(cx).next(), PathBuf::from(suffix))
};
worktree.and_then(|worktree| worktree.read(cx).entry_for_path(path))
@@ -72,7 +72,7 @@ impl Match {
let worktree_id = if let Some(path_match) = &self.path_match {
WorktreeId::from_usize(path_match.worktree_id)
} else {
- project.worktrees().next()?.read(cx).id()
+ project.worktrees(cx).next()?.read(cx).id()
};
let path = PathBuf::from(self.relative_path());
@@ -84,7 +84,7 @@ impl Match {
}
fn existing_prefix(&self, project: &Project, cx: &WindowContext) -> Option<PathBuf> {
- let worktree = project.worktrees().next()?.read(cx);
+ let worktree = project.worktrees(cx).next()?.read(cx);
let mut prefix = PathBuf::new();
let parts = self.suffix.as_ref()?.split('/');
for part in parts {
@@ -75,6 +75,9 @@ impl RealGitRepository {
}
}
+// https://git-scm.com/book/en/v2/Git-Internals-Git-Objects
+const GIT_MODE_SYMLINK: u32 = 0o120000;
+
impl GitRepository for RealGitRepository {
fn reload_index(&self) {
if let Ok(mut index) = self.repository.lock().index() {
@@ -91,8 +94,8 @@ impl GitRepository for RealGitRepository {
check_path_to_repo_path_errors(relative_file_path)?;
let oid = match index.get_path(relative_file_path, STAGE_NORMAL) {
- Some(entry) => entry.id,
- None => return Ok(None),
+ Some(entry) if entry.mode != GIT_MODE_SYMLINK => entry.id,
+ _ => return Ok(None),
};
let content = repo.find_blob(oid)?.content().to_owned();
@@ -258,7 +258,7 @@ mod tests {
let (workspace, cx) = cx.add_window_view(|cx| Workspace::test_new(project.clone(), cx));
let worktree_id = workspace.update(cx, |workspace, cx| {
workspace.project().update(cx, |project, cx| {
- project.worktrees().next().unwrap().read(cx).id()
+ project.worktrees(cx).next().unwrap().read(cx).id()
})
});
let _buffer = project
@@ -551,7 +551,7 @@ impl LspLogView {
self.project
.read(cx)
.supplementary_language_servers()
- .filter_map(|(&server_id, (name, _))| {
+ .filter_map(|(&server_id, name)| {
let state = log_store.language_servers.get(&server_id)?;
Some(LogMenuItem {
server_id,
@@ -85,7 +85,7 @@ async fn test_lsp_logs(cx: &mut TestAppContext) {
server_name: LanguageServerName("the-rust-language-server".into()),
worktree_root_name: project
.read(cx)
- .worktrees()
+ .worktrees(cx)
.next()
.unwrap()
.read(cx)
@@ -321,7 +321,7 @@ mod tests {
let (workspace, cx) = cx.add_window_view(|cx| Workspace::test_new(project.clone(), cx));
let worktree_id = workspace.update(cx, |workspace, cx| {
workspace.project().update(cx, |project, cx| {
- project.worktrees().next().unwrap().read(cx).id()
+ project.worktrees(cx).next().unwrap().read(cx).id()
})
});
let _buffer = project
@@ -1,13 +1,16 @@
-use crate::ProjectPath;
-use anyhow::{anyhow, Context as _, Result};
+use crate::{
+ worktree_store::{WorktreeStore, WorktreeStoreEvent},
+ ProjectPath,
+};
+use anyhow::{anyhow, Result};
use collections::{hash_map, HashMap};
-use futures::{channel::oneshot, StreamExt as _};
+use futures::{channel::oneshot, stream::FuturesUnordered, StreamExt as _};
use gpui::{
AppContext, AsyncAppContext, Context as _, EventEmitter, Model, ModelContext, Task, WeakModel,
};
use language::{
- proto::{deserialize_version, serialize_version, split_operations},
- Buffer, Capability, Language, Operation,
+ proto::{deserialize_line_ending, deserialize_version, serialize_version, split_operations},
+ Buffer, Capability, Event as BufferEvent, Language, Operation,
};
use rpc::{
proto::{self, AnyProtoClient, PeerId},
@@ -16,11 +19,15 @@ use rpc::{
use std::{io, path::Path, sync::Arc};
use text::BufferId;
use util::{debug_panic, maybe, ResultExt as _};
-use worktree::{File, ProjectEntryId, RemoteWorktree, Worktree};
+use worktree::{
+ File, PathChange, ProjectEntryId, RemoteWorktree, UpdatedGitRepositoriesSet, Worktree,
+};
/// A set of open buffers.
pub struct BufferStore {
retain_buffers: bool,
+ #[allow(unused)]
+ worktree_store: Model<WorktreeStore>,
opened_buffers: HashMap<BufferId, OpenBuffer>,
local_buffer_ids_by_path: HashMap<ProjectPath, BufferId>,
local_buffer_ids_by_entry_id: HashMap<ProjectEntryId, BufferId>,
@@ -51,6 +58,12 @@ pub enum BufferStoreEvent {
has_changed_file: bool,
saved_version: clock::Global,
},
+ LocalBufferUpdated {
+ buffer: Model<Buffer>,
+ },
+ DiffBaseUpdated {
+ buffer: Model<Buffer>,
+ },
}
impl EventEmitter<BufferStoreEvent> for BufferStore {}
@@ -62,9 +75,22 @@ impl BufferStore {
/// and won't be released unless they are explicitly removed, or `retain_buffers`
/// is set to `false` via `set_retain_buffers`. Otherwise, buffers are stored as
/// weak handles.
- pub fn new(retain_buffers: bool) -> Self {
+ pub fn new(
+ worktree_store: Model<WorktreeStore>,
+ retain_buffers: bool,
+ cx: &mut ModelContext<Self>,
+ ) -> Self {
+ cx.subscribe(&worktree_store, |this, _, event, cx| match event {
+ WorktreeStoreEvent::WorktreeAdded(worktree) => {
+ this.subscribe_to_worktree(worktree, cx);
+ }
+ _ => {}
+ })
+ .detach();
+
Self {
retain_buffers,
+ worktree_store,
opened_buffers: Default::default(),
remote_buffer_listeners: Default::default(),
loading_remote_buffers_by_id: Default::default(),
@@ -77,7 +103,6 @@ impl BufferStore {
pub fn open_buffer(
&mut self,
project_path: ProjectPath,
- worktree: Model<Worktree>,
cx: &mut ModelContext<Self>,
) -> Task<Result<Model<Buffer>>> {
let existing_buffer = self.get_by_path(&project_path, cx);
@@ -85,6 +110,14 @@ impl BufferStore {
return Task::ready(Ok(existing_buffer));
}
+ let Some(worktree) = self
+ .worktree_store
+ .read(cx)
+ .worktree_for_id(project_path.worktree_id, cx)
+ else {
+ return Task::ready(Err(anyhow!("no such worktree")));
+ };
+
let loading_watch = match self.loading_buffers_by_path.entry(project_path.clone()) {
// If the given path is already being loaded, then wait for that existing
// task to complete and return the same buffer.
@@ -127,6 +160,131 @@ impl BufferStore {
})
}
+ fn subscribe_to_worktree(&mut self, worktree: &Model<Worktree>, cx: &mut ModelContext<Self>) {
+ cx.subscribe(worktree, |this, worktree, event, cx| {
+ if worktree.read(cx).is_local() {
+ match event {
+ worktree::Event::UpdatedEntries(changes) => {
+ this.local_worktree_entries_changed(&worktree, changes, cx);
+ }
+ worktree::Event::UpdatedGitRepositories(updated_repos) => {
+ this.local_worktree_git_repos_changed(worktree.clone(), updated_repos, cx)
+ }
+ _ => {}
+ }
+ }
+ })
+ .detach();
+ }
+
+ fn local_worktree_entries_changed(
+ &mut self,
+ worktree_handle: &Model<Worktree>,
+ changes: &[(Arc<Path>, ProjectEntryId, PathChange)],
+ cx: &mut ModelContext<Self>,
+ ) {
+ let snapshot = worktree_handle.read(cx).snapshot();
+ for (path, entry_id, _) in changes {
+ self.local_worktree_entry_changed(*entry_id, path, worktree_handle, &snapshot, cx);
+ }
+ }
+
+ fn local_worktree_git_repos_changed(
+ &mut self,
+ worktree_handle: Model<Worktree>,
+ changed_repos: &UpdatedGitRepositoriesSet,
+ cx: &mut ModelContext<Self>,
+ ) {
+ debug_assert!(worktree_handle.read(cx).is_local());
+
+ // Identify the loading buffers whose containing repository that has changed.
+ let future_buffers = self
+ .loading_buffers()
+ .filter_map(|(project_path, receiver)| {
+ if project_path.worktree_id != worktree_handle.read(cx).id() {
+ return None;
+ }
+ let path = &project_path.path;
+ changed_repos
+ .iter()
+ .find(|(work_dir, _)| path.starts_with(work_dir))?;
+ let path = path.clone();
+ Some(async move {
+ Self::wait_for_loading_buffer(receiver)
+ .await
+ .ok()
+ .map(|buffer| (buffer, path))
+ })
+ })
+ .collect::<FuturesUnordered<_>>();
+
+ // Identify the current buffers whose containing repository has changed.
+ let current_buffers = self
+ .buffers()
+ .filter_map(|buffer| {
+ let file = File::from_dyn(buffer.read(cx).file())?;
+ if file.worktree != worktree_handle {
+ return None;
+ }
+ changed_repos
+ .iter()
+ .find(|(work_dir, _)| file.path.starts_with(work_dir))?;
+ Some((buffer, file.path.clone()))
+ })
+ .collect::<Vec<_>>();
+
+ if future_buffers.len() + current_buffers.len() == 0 {
+ return;
+ }
+
+ cx.spawn(move |this, mut cx| async move {
+ // Wait for all of the buffers to load.
+ let future_buffers = future_buffers.collect::<Vec<_>>().await;
+
+ // Reload the diff base for every buffer whose containing git repository has changed.
+ let snapshot =
+ worktree_handle.update(&mut cx, |tree, _| tree.as_local().unwrap().snapshot())?;
+ let diff_bases_by_buffer = cx
+ .background_executor()
+ .spawn(async move {
+ let mut diff_base_tasks = future_buffers
+ .into_iter()
+ .flatten()
+ .chain(current_buffers)
+ .filter_map(|(buffer, path)| {
+ let (repo_entry, local_repo_entry) = snapshot.repo_for_path(&path)?;
+ let relative_path = repo_entry.relativize(&snapshot, &path).ok()?;
+ Some(async move {
+ let base_text =
+ local_repo_entry.repo().load_index_text(&relative_path);
+ Some((buffer, base_text))
+ })
+ })
+ .collect::<FuturesUnordered<_>>();
+
+ let mut diff_bases = Vec::with_capacity(diff_base_tasks.len());
+ while let Some(diff_base) = diff_base_tasks.next().await {
+ if let Some(diff_base) = diff_base {
+ diff_bases.push(diff_base);
+ }
+ }
+ diff_bases
+ })
+ .await;
+
+ this.update(&mut cx, |_, cx| {
+ // Assign the new diff bases on all of the buffers.
+ for (buffer, diff_base) in diff_bases_by_buffer {
+ buffer.update(cx, |buffer, cx| {
+ buffer.set_diff_base(diff_base.clone(), cx);
+ });
+ cx.emit(BufferStoreEvent::DiffBaseUpdated { buffer })
+ }
+ })
+ })
+ .detach_and_log_err(cx);
+ }
+
fn open_local_buffer_internal(
&mut self,
path: Arc<Path>,
@@ -265,9 +423,16 @@ impl BufferStore {
&mut self,
buffer: Model<Buffer>,
path: ProjectPath,
- worktree: Model<Worktree>,
cx: &mut ModelContext<Self>,
) -> Task<Result<()>> {
+ let Some(worktree) = self
+ .worktree_store
+ .read(cx)
+ .worktree_for_id(path.worktree_id, cx)
+ else {
+ return Task::ready(Err(anyhow!("no such worktree")));
+ };
+
let old_file = File::from_dyn(buffer.read(cx).file())
.cloned()
.map(Arc::new);
@@ -411,6 +576,7 @@ impl BufferStore {
}
}
+ cx.subscribe(&buffer, Self::on_buffer_event).detach();
cx.emit(BufferStoreEvent::BufferAdded(buffer));
Ok(())
}
@@ -461,31 +627,6 @@ impl BufferStore {
.or_else(|| self.loading_remote_buffers_by_id.get(&buffer_id).cloned())
}
- fn get_or_remove_by_path(
- &mut self,
- entry_id: ProjectEntryId,
- project_path: &ProjectPath,
- ) -> Option<(BufferId, Model<Buffer>)> {
- let buffer_id = match self.local_buffer_ids_by_entry_id.get(&entry_id) {
- Some(&buffer_id) => buffer_id,
- None => match self.local_buffer_ids_by_path.get(project_path) {
- Some(&buffer_id) => buffer_id,
- None => {
- return None;
- }
- },
- };
- let buffer = if let Some(buffer) = self.get(buffer_id) {
- buffer
- } else {
- self.opened_buffers.remove(&buffer_id);
- self.local_buffer_ids_by_path.remove(project_path);
- self.local_buffer_ids_by_entry_id.remove(&entry_id);
- return None;
- };
- Some((buffer_id, buffer))
- }
-
pub fn wait_for_remote_buffer(
&mut self,
id: BufferId,
@@ -561,25 +702,48 @@ impl BufferStore {
.retain(|_, buffer| !matches!(buffer, OpenBuffer::Operations(_)));
}
- pub fn file_changed(
+ fn on_buffer_event(
+ &mut self,
+ buffer: Model<Buffer>,
+ event: &BufferEvent,
+ cx: &mut ModelContext<Self>,
+ ) {
+ match event {
+ BufferEvent::FileHandleChanged => {
+ self.buffer_changed_file(buffer, cx);
+ }
+ _ => {}
+ }
+ }
+
+ fn local_worktree_entry_changed(
&mut self,
- path: Arc<Path>,
entry_id: ProjectEntryId,
- worktree_handle: &Model<worktree::Worktree>,
+ path: &Arc<Path>,
+ worktree: &Model<worktree::Worktree>,
snapshot: &worktree::Snapshot,
cx: &mut ModelContext<Self>,
- ) -> Option<(Model<Buffer>, Arc<File>, Arc<File>)> {
- let (buffer_id, buffer) = self.get_or_remove_by_path(
- entry_id,
- &ProjectPath {
- worktree_id: snapshot.id(),
- path,
- },
- )?;
+ ) -> Option<()> {
+ let project_path = ProjectPath {
+ worktree_id: snapshot.id(),
+ path: path.clone(),
+ };
+ let buffer_id = match self.local_buffer_ids_by_entry_id.get(&entry_id) {
+ Some(&buffer_id) => buffer_id,
+ None => self.local_buffer_ids_by_path.get(&project_path).copied()?,
+ };
+ let buffer = if let Some(buffer) = self.get(buffer_id) {
+ buffer
+ } else {
+ self.opened_buffers.remove(&buffer_id);
+ self.local_buffer_ids_by_path.remove(&project_path);
+ self.local_buffer_ids_by_entry_id.remove(&entry_id);
+ return None;
+ };
- let result = buffer.update(cx, |buffer, cx| {
+ let (old_file, new_file) = buffer.update(cx, |buffer, cx| {
let old_file = File::from_dyn(buffer.file())?;
- if old_file.worktree != *worktree_handle {
+ if old_file.worktree != *worktree {
return None;
}
@@ -592,7 +756,7 @@ impl BufferStore {
entry_id: Some(entry.id),
mtime: entry.mtime,
path: entry.path.clone(),
- worktree: worktree_handle.clone(),
+ worktree: worktree.clone(),
is_deleted: false,
is_private: entry.is_private,
}
@@ -602,7 +766,7 @@ impl BufferStore {
entry_id: Some(entry.id),
mtime: entry.mtime,
path: entry.path.clone(),
- worktree: worktree_handle.clone(),
+ worktree: worktree.clone(),
is_deleted: false,
is_private: entry.is_private,
}
@@ -612,7 +776,7 @@ impl BufferStore {
entry_id: old_file.entry_id,
path: old_file.path.clone(),
mtime: old_file.mtime,
- worktree: worktree_handle.clone(),
+ worktree: worktree.clone(),
is_deleted: true,
is_private: old_file.is_private,
}
@@ -625,47 +789,42 @@ impl BufferStore {
let old_file = Arc::new(old_file.clone());
let new_file = Arc::new(new_file);
buffer.file_updated(new_file.clone(), cx);
- Some((cx.handle(), old_file, new_file))
- });
+ Some((old_file, new_file))
+ })?;
- if let Some((buffer, old_file, new_file)) = &result {
- if new_file.path != old_file.path {
- self.local_buffer_ids_by_path.remove(&ProjectPath {
- path: old_file.path.clone(),
- worktree_id: old_file.worktree_id(cx),
- });
- self.local_buffer_ids_by_path.insert(
- ProjectPath {
- worktree_id: new_file.worktree_id(cx),
- path: new_file.path.clone(),
- },
- buffer_id,
- );
- cx.emit(BufferStoreEvent::BufferChangedFilePath {
- buffer: buffer.clone(),
- old_file: Some(old_file.clone()),
- });
- }
+ if new_file.path != old_file.path {
+ self.local_buffer_ids_by_path.remove(&ProjectPath {
+ path: old_file.path.clone(),
+ worktree_id: old_file.worktree_id(cx),
+ });
+ self.local_buffer_ids_by_path.insert(
+ ProjectPath {
+ worktree_id: new_file.worktree_id(cx),
+ path: new_file.path.clone(),
+ },
+ buffer_id,
+ );
+ cx.emit(BufferStoreEvent::BufferChangedFilePath {
+ buffer: buffer.clone(),
+ old_file: Some(old_file.clone()),
+ });
+ }
- if new_file.entry_id != old_file.entry_id {
- if let Some(entry_id) = old_file.entry_id {
- self.local_buffer_ids_by_entry_id.remove(&entry_id);
- }
- if let Some(entry_id) = new_file.entry_id {
- self.local_buffer_ids_by_entry_id
- .insert(entry_id, buffer_id);
- }
+ if new_file.entry_id != old_file.entry_id {
+ if let Some(entry_id) = old_file.entry_id {
+ self.local_buffer_ids_by_entry_id.remove(&entry_id);
+ }
+ if let Some(entry_id) = new_file.entry_id {
+ self.local_buffer_ids_by_entry_id
+ .insert(entry_id, buffer_id);
}
}
- result
+ cx.emit(BufferStoreEvent::LocalBufferUpdated { buffer });
+ None
}
- pub fn buffer_changed_file(
- &mut self,
- buffer: Model<Buffer>,
- cx: &mut AppContext,
- ) -> Option<()> {
+ fn buffer_changed_file(&mut self, buffer: Model<Buffer>, cx: &mut AppContext) -> Option<()> {
let file = File::from_dyn(buffer.read(cx).file())?;
let remote_id = buffer.read(cx).remote_id();
@@ -862,7 +1021,6 @@ impl BufferStore {
pub async fn handle_save_buffer(
this: Model<Self>,
project_id: u64,
- worktree: Option<Model<Worktree>>,
envelope: TypedEnvelope<proto::SaveBuffer>,
mut cx: AsyncAppContext,
) -> Result<proto::BufferSaved> {
@@ -876,10 +1034,9 @@ impl BufferStore {
let buffer_id = buffer.update(&mut cx, |buffer, _| buffer.remote_id())?;
if let Some(new_path) = envelope.payload.new_path {
- let worktree = worktree.context("no such worktree")?;
let new_path = ProjectPath::from_proto(new_path);
this.update(&mut cx, |this, cx| {
- this.save_buffer_as(buffer.clone(), new_path, worktree, cx)
+ this.save_buffer_as(buffer.clone(), new_path, cx)
})?
.await?;
} else {
@@ -895,6 +1052,44 @@ impl BufferStore {
})
}
+ pub async fn handle_buffer_saved(
+ this: Model<Self>,
+ envelope: TypedEnvelope<proto::BufferSaved>,
+ mut cx: AsyncAppContext,
+ ) -> Result<()> {
+ let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
+ let version = deserialize_version(&envelope.payload.version);
+ let mtime = envelope.payload.mtime.map(|time| time.into());
+ this.update(&mut cx, |this, cx| {
+ if let Some(buffer) = this.get_possibly_incomplete(buffer_id) {
+ buffer.update(cx, |buffer, cx| {
+ buffer.did_save(version, mtime, cx);
+ });
+ }
+ })
+ }
+
+ pub async fn handle_buffer_reloaded(
+ this: Model<Self>,
+ envelope: TypedEnvelope<proto::BufferReloaded>,
+ mut cx: AsyncAppContext,
+ ) -> Result<()> {
+ let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
+ let version = deserialize_version(&envelope.payload.version);
+ let mtime = envelope.payload.mtime.map(|time| time.into());
+ let line_ending = deserialize_line_ending(
+ proto::LineEnding::from_i32(envelope.payload.line_ending)
+ .ok_or_else(|| anyhow!("missing line ending"))?,
+ );
+ this.update(&mut cx, |this, cx| {
+ if let Some(buffer) = this.get_possibly_incomplete(buffer_id) {
+ buffer.update(cx, |buffer, cx| {
+ buffer.did_reload(version, line_ending, mtime, cx);
+ });
+ }
+ })
+ }
+
pub async fn wait_for_loading_buffer(
mut receiver: postage::watch::Receiver<Option<Result<Model<Buffer>, Arc<anyhow::Error>>>>,
) -> Result<Model<Buffer>, Arc<anyhow::Error>> {
@@ -85,7 +85,7 @@ impl Manager {
Some(proto::RejoinProject {
id: project_id,
worktrees: project
- .worktrees()
+ .worktrees(cx)
.map(|worktree| {
let worktree = worktree.read(cx);
proto::RejoinWorktree {
@@ -1,26 +1,25 @@
-use std::time::Duration;
-
use futures::{channel::oneshot, FutureExt};
use gpui::{ModelContext, Task};
+use std::{marker::PhantomData, time::Duration};
-use crate::Project;
-
-pub struct DebouncedDelay {
+pub struct DebouncedDelay<E: 'static> {
task: Option<Task<()>>,
cancel_channel: Option<oneshot::Sender<()>>,
+ _phantom_data: PhantomData<E>,
}
-impl DebouncedDelay {
- pub fn new() -> DebouncedDelay {
- DebouncedDelay {
+impl<E: 'static> DebouncedDelay<E> {
+ pub fn new() -> Self {
+ Self {
task: None,
cancel_channel: None,
+ _phantom_data: PhantomData,
}
}
- pub fn fire_new<F>(&mut self, delay: Duration, cx: &mut ModelContext<Project>, func: F)
+ pub fn fire_new<F>(&mut self, delay: Duration, cx: &mut ModelContext<E>, func: F)
where
- F: 'static + Send + FnOnce(&mut Project, &mut ModelContext<Project>) -> Task<()>,
+ F: 'static + Send + FnOnce(&mut E, &mut ModelContext<E>) -> Task<()>,
{
if let Some(channel) = self.cancel_channel.take() {
_ = channel.send(());
@@ -8,6 +8,7 @@ pub mod project_settings;
pub mod search;
mod task_inventory;
pub mod terminals;
+pub mod worktree_store;
#[cfg(test)]
mod project_tests;
@@ -47,8 +48,8 @@ use language::{
},
markdown, point_to_lsp, prepare_completion_documentation,
proto::{
- deserialize_anchor, deserialize_line_ending, deserialize_version, serialize_anchor,
- serialize_line_ending, serialize_version, split_operations,
+ deserialize_anchor, deserialize_version, serialize_anchor, serialize_line_ending,
+ serialize_version, split_operations,
},
range_from_lsp, Bias, Buffer, BufferSnapshot, CachedLspAdapter, Capability, CodeLabel,
ContextProvider, Diagnostic, DiagnosticEntry, DiagnosticSet, Diff, Documentation,
@@ -119,6 +120,7 @@ use util::{
NumericPrefixWithSuffix, ResultExt, TryFutureExt as _,
};
use worktree::{CreatedEntry, Snapshot, Traversal};
+use worktree_store::{WorktreeStore, WorktreeStoreEvent};
use yarn::YarnPathStore;
pub use fs::*;
@@ -166,8 +168,6 @@ pub enum OpenedBufferEvent {
///
/// Can be either local (for the project opened on the same host) or remote.(for collab projects, browsed by multiple remote users).
pub struct Project {
- worktrees: Vec<WorktreeHandle>,
- worktrees_reordered: bool,
active_entry: Option<ProjectEntryId>,
buffer_ordered_messages_tx: mpsc::UnboundedSender<BufferOrderedMessage>,
languages: Arc<LanguageRegistry>,
@@ -203,6 +203,7 @@ pub struct Project {
client_state: ProjectClientState,
collaborators: HashMap<proto::PeerId, Collaborator>,
client_subscriptions: Vec<client::Subscription>,
+ worktree_store: Model<WorktreeStore>,
buffer_store: Model<BufferStore>,
_subscriptions: Vec<gpui::Subscription>,
shared_buffers: HashMap<proto::PeerId, HashSet<BufferId>>,
@@ -212,7 +213,7 @@ pub struct Project {
buffer_snapshots: HashMap<BufferId, HashMap<LanguageServerId, Vec<LspBufferSnapshot>>>, // buffer_id -> server_id -> vec of snapshots
buffers_being_formatted: HashSet<BufferId>,
buffers_needing_diff: HashSet<WeakModel<Buffer>>,
- git_diff_debouncer: DebouncedDelay,
+ git_diff_debouncer: DebouncedDelay<Self>,
nonce: u128,
_maintain_buffer_languages: Task<()>,
_maintain_workspace_config: Task<Result<()>>,
@@ -263,12 +264,6 @@ enum LocalProjectUpdate {
},
}
-#[derive(Clone)]
-enum WorktreeHandle {
- Strong(Model<Worktree>),
- Weak(WeakModel<Worktree>),
-}
-
#[derive(Debug)]
enum ProjectClientState {
Local,
@@ -765,17 +760,21 @@ impl Project {
let snippets =
SnippetProvider::new(fs.clone(), BTreeSet::from_iter([global_snippets_dir]), cx);
- let buffer_store = cx.new_model(|_| BufferStore::new(false));
+ let worktree_store = cx.new_model(|_| WorktreeStore::new(false));
+ cx.subscribe(&worktree_store, Self::on_worktree_store_event)
+ .detach();
+
+ let buffer_store =
+ cx.new_model(|cx| BufferStore::new(worktree_store.clone(), false, cx));
cx.subscribe(&buffer_store, Self::on_buffer_store_event)
.detach();
let yarn = YarnPathStore::new(fs.clone(), cx);
Self {
- worktrees: Vec::new(),
- worktrees_reordered: false,
buffer_ordered_messages_tx: tx,
collaborators: Default::default(),
+ worktree_store,
buffer_store,
shared_buffers: Default::default(),
loading_worktrees: Default::default(),
@@ -843,6 +842,8 @@ impl Project {
this.update(cx, |this, cx| {
ssh_session.add_message_handler(cx.weak_model(), Self::handle_update_worktree);
ssh_session.add_message_handler(cx.weak_model(), Self::handle_create_buffer_for_peer);
+ ssh_session.add_message_handler(cx.weak_model(), Self::handle_update_buffer_file);
+ ssh_session.add_message_handler(cx.weak_model(), Self::handle_update_diff_base);
this.ssh_session = Some(ssh_session);
});
this
@@ -926,15 +927,17 @@ impl Project {
cx.spawn(move |this, cx| Self::send_buffer_ordered_messages(this, rx, cx))
.detach();
- let buffer_store = cx.new_model(|_| BufferStore::new(true));
+ let worktree_store = cx.new_model(|_| WorktreeStore::new(true));
+
+ let buffer_store =
+ cx.new_model(|cx| BufferStore::new(worktree_store.clone(), true, cx));
cx.subscribe(&buffer_store, Self::on_buffer_store_event)
.detach();
let mut this = Self {
- worktrees: Vec::new(),
- worktrees_reordered: false,
buffer_ordered_messages_tx: tx,
buffer_store,
+ worktree_store,
shared_buffers: Default::default(),
loading_worktrees: Default::default(),
active_entry: None,
@@ -1407,15 +1410,18 @@ impl Project {
self.collaborators.values().find(|c| c.replica_id == 0)
}
- pub fn set_worktrees_reordered(&mut self, worktrees_reordered: bool) {
- self.worktrees_reordered = worktrees_reordered;
+ pub fn set_worktrees_reordered(&mut self, worktrees_reordered: bool, cx: &mut AppContext) {
+ self.worktree_store.update(cx, |store, _| {
+ store.set_worktrees_reordered(worktrees_reordered);
+ });
}
/// Collect all worktrees, including ones that don't appear in the project panel
- pub fn worktrees(&self) -> impl '_ + DoubleEndedIterator<Item = Model<Worktree>> {
- self.worktrees
- .iter()
- .filter_map(move |worktree| worktree.upgrade())
+ pub fn worktrees<'a>(
+ &self,
+ cx: &'a AppContext,
+ ) -> impl 'a + DoubleEndedIterator<Item = Model<Worktree>> {
+ self.worktree_store.read(cx).worktrees()
}
/// Collect all user-visible worktrees, the ones that appear in the project panel.
@@ -1423,8 +1429,7 @@ impl Project {
&'a self,
cx: &'a AppContext,
) -> impl 'a + DoubleEndedIterator<Item = Model<Worktree>> {
- self.worktrees()
- .filter(|worktree| worktree.read(cx).is_visible())
+ self.worktree_store.read(cx).visible_worktrees(cx)
}
pub fn worktree_root_names<'a>(&'a self, cx: &'a AppContext) -> impl Iterator<Item = &'a str> {
@@ -1433,8 +1438,7 @@ impl Project {
}
pub fn worktree_for_id(&self, id: WorktreeId, cx: &AppContext) -> Option<Model<Worktree>> {
- self.worktrees()
- .find(|worktree| worktree.read(cx).id() == id)
+ self.worktree_store.read(cx).worktree_for_id(id, cx)
}
pub fn worktree_for_entry(
@@ -1442,8 +1446,9 @@ impl Project {
entry_id: ProjectEntryId,
cx: &AppContext,
) -> Option<Model<Worktree>> {
- self.worktrees()
- .find(|worktree| worktree.read(cx).contains_entry(entry_id))
+ self.worktree_store
+ .read(cx)
+ .worktree_for_entry(entry_id, cx)
}
pub fn worktree_id_for_entry(
@@ -1476,7 +1481,7 @@ impl Project {
}
pub fn visibility_for_path(&self, path: &Path, cx: &AppContext) -> Option<bool> {
- self.worktrees()
+ self.worktrees(cx)
.filter_map(|worktree| {
let worktree = worktree.read(cx);
worktree
@@ -1539,7 +1544,6 @@ impl Project {
cx: &mut ModelContext<Self>,
) -> Option<Task<Result<()>>> {
let worktree = self.worktree_for_entry(entry_id, cx)?;
- cx.emit(Event::DeletedEntry(entry_id));
worktree.update(cx, |worktree, cx| {
worktree.delete_entry(entry_id, trash, cx)
})
@@ -1577,17 +1581,9 @@ impl Project {
self.buffer_store.update(cx, |buffer_store, cx| {
buffer_store.set_retain_buffers(true, cx)
});
-
- for worktree_handle in self.worktrees.iter_mut() {
- match worktree_handle {
- WorktreeHandle::Strong(_) => {}
- WorktreeHandle::Weak(worktree) => {
- if let Some(worktree) = worktree.upgrade() {
- *worktree_handle = WorktreeHandle::Strong(worktree);
- }
- }
- }
- }
+ self.worktree_store.update(cx, |store, cx| {
+ store.set_shared(true, cx);
+ });
for (server_id, status) in &self.language_server_statuses {
self.client
@@ -1602,7 +1598,7 @@ impl Project {
}
let store = cx.global::<SettingsStore>();
- for worktree in self.worktrees() {
+ for worktree in self.worktrees(cx) {
let worktree_id = worktree.read(cx).id().to_proto();
for (path, content) in store.local_settings(worktree.entity_id().as_u64() as usize) {
self.client
@@ -1625,8 +1621,8 @@ impl Project {
while let Some(update) = updates_rx.next().await {
match update {
LocalProjectUpdate::WorktreesChanged => {
- let worktrees = this.update(&mut cx, |this, _cx| {
- this.worktrees().collect::<Vec<_>>()
+ let worktrees = this.update(&mut cx, |this, cx| {
+ this.worktrees(cx).collect::<Vec<_>>()
})?;
let update_project = this
@@ -1732,11 +1728,13 @@ impl Project {
cx: &mut ModelContext<Self>,
) -> Result<()> {
cx.update_global::<SettingsStore, _>(|store, cx| {
- for worktree in &self.worktrees {
- store
- .clear_local_settings(worktree.handle_id(), cx)
- .log_err();
- }
+ self.worktree_store.update(cx, |worktree_store, cx| {
+ for worktree in worktree_store.worktrees() {
+ store
+ .clear_local_settings(worktree.entity_id().as_u64() as usize, cx)
+ .log_err();
+ }
+ });
});
self.join_project_response_message_id = message_id;
@@ -1788,29 +1786,17 @@ impl Project {
self.collaborators.clear();
self.shared_buffers.clear();
self.client_subscriptions.clear();
-
- for worktree_handle in self.worktrees.iter_mut() {
- if let WorktreeHandle::Strong(worktree) = worktree_handle {
- let is_visible = worktree.update(cx, |worktree, _| {
- worktree.stop_observing_updates();
- worktree.is_visible()
- });
- if !is_visible {
- *worktree_handle = WorktreeHandle::Weak(worktree.downgrade());
- }
- }
- }
-
+ self.worktree_store.update(cx, |store, cx| {
+ store.set_shared(false, cx);
+ });
self.buffer_store.update(cx, |buffer_store, cx| {
buffer_store.set_retain_buffers(false, cx)
});
-
self.client
.send(proto::UnshareProject {
project_id: remote_id,
})
.ok();
-
Ok(())
} else {
Err(anyhow!("attempted to unshare an unshared project"))
@@ -1852,19 +1838,10 @@ impl Project {
} = &mut self.client_state
{
*sharing_has_stopped = true;
-
self.collaborators.clear();
-
- for worktree in &self.worktrees {
- if let Some(worktree) = worktree.upgrade() {
- worktree.update(cx, |worktree, _| {
- if let Some(worktree) = worktree.as_remote_mut() {
- worktree.disconnected_from_host();
- }
- });
- }
- }
-
+ self.worktree_store.update(cx, |store, cx| {
+ store.disconnected_from_host(cx);
+ });
self.buffer_store.update(cx, |buffer_store, cx| {
buffer_store.disconnected_from_host(cx)
});
@@ -1951,30 +1928,6 @@ impl Project {
})
}
- pub fn open_buffer_for_full_path(
- &mut self,
- path: &Path,
- cx: &mut ModelContext<Self>,
- ) -> Task<Result<Model<Buffer>>> {
- if let Some(worktree_name) = path.components().next() {
- let worktree = self.worktrees().find(|worktree| {
- OsStr::new(worktree.read(cx).root_name()) == worktree_name.as_os_str()
- });
- if let Some(worktree) = worktree {
- let worktree = worktree.read(cx);
- let worktree_root_path = Path::new(worktree.root_name());
- if let Ok(path) = path.strip_prefix(worktree_root_path) {
- let project_path = ProjectPath {
- worktree_id: worktree.id(),
- path: path.into(),
- };
- return self.open_buffer(project_path, cx);
- }
- }
- }
- Task::ready(Err(anyhow!("buffer not found for {:?}", path)))
- }
-
pub fn open_local_buffer(
&mut self,
abs_path: impl AsRef<Path>,
@@ -1992,23 +1945,15 @@ impl Project {
path: impl Into<ProjectPath>,
cx: &mut ModelContext<Self>,
) -> Task<Result<Model<Buffer>>> {
- let project_path = path.into();
- let worktree = if let Some(worktree) = self.worktree_for_id(project_path.worktree_id, cx) {
- worktree
- } else {
- return Task::ready(Err(anyhow!("no such worktree")));
- };
-
if self.is_remote() && self.is_disconnected() {
return Task::ready(Err(anyhow!(ErrorCode::Disconnected)));
}
self.buffer_store.update(cx, |buffer_store, cx| {
- buffer_store.open_buffer(project_path, worktree, cx)
+ buffer_store.open_buffer(path.into(), cx)
})
}
- /// LanguageServerName is owned, because it is inserted into a map
pub fn open_local_buffer_via_lsp(
&mut self,
mut abs_path: lsp::Url,
@@ -2138,11 +2083,8 @@ impl Project {
path: ProjectPath,
cx: &mut ModelContext<Self>,
) -> Task<Result<()>> {
- let Some(worktree) = self.worktree_for_id(path.worktree_id, cx) else {
- return Task::ready(Err(anyhow!("worktree does not exist")));
- };
self.buffer_store.update(cx, |buffer_store, cx| {
- buffer_store.save_buffer_as(buffer.clone(), path, worktree, cx)
+ buffer_store.save_buffer_as(buffer.clone(), path, cx)
})
}
@@ -2442,6 +2384,36 @@ impl Project {
self.detect_language_for_buffer(&buffer, cx);
self.register_buffer_with_language_servers(&buffer, cx);
}
+ BufferStoreEvent::LocalBufferUpdated { buffer } => {
+ let buffer = buffer.read(cx);
+ let buffer_id = buffer.remote_id();
+ let Some(new_file) = buffer.file() else {
+ return;
+ };
+ if let Some(project_id) = self.remote_id() {
+ self.client
+ .send(proto::UpdateBufferFile {
+ project_id,
+ buffer_id: buffer_id.into(),
+ file: Some(new_file.to_proto(cx)),
+ })
+ .log_err();
+ }
+ }
+ BufferStoreEvent::DiffBaseUpdated { buffer } => {
+ let buffer = buffer.read(cx);
+ let buffer_id = buffer.remote_id();
+ let diff_base = buffer.diff_base();
+ if let Some(project_id) = self.remote_id() {
+ self.client
+ .send(proto::UpdateDiffBase {
+ project_id,
+ buffer_id: buffer_id.to_proto(),
+ diff_base: diff_base.map(|b| b.to_string()),
+ })
+ .log_err();
+ }
+ }
BufferStoreEvent::BufferSaved {
buffer: buffer_handle,
has_changed_file,
@@ -2475,6 +2447,19 @@ impl Project {
}
}
+ fn on_worktree_store_event(
+ &mut self,
+ _: Model<WorktreeStore>,
+ event: &WorktreeStoreEvent,
+ cx: &mut ModelContext<Self>,
+ ) {
+ match event {
+ WorktreeStoreEvent::WorktreeAdded(_) => cx.emit(Event::WorktreeAdded),
+ WorktreeStoreEvent::WorktreeRemoved(_, id) => cx.emit(Event::WorktreeRemoved(*id)),
+ WorktreeStoreEvent::WorktreeOrderChanged => cx.emit(Event::WorktreeOrderChanged),
+ }
+ }
+
fn on_buffer_event(
&mut self,
buffer: Model<Buffer>,
@@ -2652,11 +2637,6 @@ impl Project {
}
}
- BufferEvent::FileHandleChanged => {
- self.buffer_store.update(cx, |buffer_store, cx| {
- buffer_store.buffer_changed_file(buffer, cx)
- })?;
- }
_ => {}
}
@@ -3172,12 +3152,12 @@ impl Project {
_ => None,
};
- for worktree in &self.worktrees {
- if let Some(worktree) = worktree.upgrade() {
+ self.worktree_store.update(cx, |store, cx| {
+ for worktree in store.worktrees() {
let key = (worktree.read(cx).id(), adapter.name.clone());
self.language_server_ids.remove(&key);
}
- }
+ });
Some(cx.spawn(move |this, mut cx| async move {
if let Some(task) = existing_server.and_then(|server| server.shutdown()) {
@@ -3199,16 +3179,8 @@ impl Project {
task.await;
this.update(&mut cx, |this, cx| {
- let worktrees = this.worktrees.clone();
- for worktree in worktrees {
- if let Some(worktree) = worktree.upgrade() {
- this.start_language_server(
- &worktree,
- adapter.clone(),
- language.clone(),
- cx,
- );
- }
+ for worktree in this.worktree_store.read(cx).worktrees().collect::<Vec<_>>() {
+ this.start_language_server(&worktree, adapter.clone(), language.clone(), cx);
}
})
.ok();
@@ -4424,47 +4396,45 @@ impl Project {
let mut builders = HashMap::default();
for watcher in watchers.values().flatten() {
- for worktree in &self.worktrees {
- if let Some(worktree) = worktree.upgrade() {
- let glob_is_inside_worktree = worktree.update(cx, |tree, _| {
- if let Some(abs_path) = tree.abs_path().to_str() {
- let relative_glob_pattern = match &watcher.glob_pattern {
- lsp::GlobPattern::String(s) => Some(
- s.strip_prefix(abs_path)
- .unwrap_or(s)
- .strip_prefix(std::path::MAIN_SEPARATOR)
- .unwrap_or(s),
- ),
- lsp::GlobPattern::Relative(rp) => {
- let base_uri = match &rp.base_uri {
- lsp::OneOf::Left(workspace_folder) => &workspace_folder.uri,
- lsp::OneOf::Right(base_uri) => base_uri,
- };
- base_uri.to_file_path().ok().and_then(|file_path| {
- (file_path.to_str() == Some(abs_path))
- .then_some(rp.pattern.as_str())
- })
- }
- };
- if let Some(relative_glob_pattern) = relative_glob_pattern {
- let literal_prefix = glob_literal_prefix(relative_glob_pattern);
- tree.as_local_mut()
- .unwrap()
- .add_path_prefix_to_scan(Path::new(literal_prefix).into());
- if let Some(glob) = Glob::new(relative_glob_pattern).log_err() {
- builders
- .entry(tree.id())
- .or_insert_with(|| GlobSetBuilder::new())
- .add(glob);
- }
- return true;
+ for worktree in self.worktree_store.read(cx).worktrees().collect::<Vec<_>>() {
+ let glob_is_inside_worktree = worktree.update(cx, |tree, _| {
+ if let Some(abs_path) = tree.abs_path().to_str() {
+ let relative_glob_pattern = match &watcher.glob_pattern {
+ lsp::GlobPattern::String(s) => Some(
+ s.strip_prefix(abs_path)
+ .unwrap_or(s)
+ .strip_prefix(std::path::MAIN_SEPARATOR)
+ .unwrap_or(s),
+ ),
+ lsp::GlobPattern::Relative(rp) => {
+ let base_uri = match &rp.base_uri {
+ lsp::OneOf::Left(workspace_folder) => &workspace_folder.uri,
+ lsp::OneOf::Right(base_uri) => base_uri,
+ };
+ base_uri.to_file_path().ok().and_then(|file_path| {
+ (file_path.to_str() == Some(abs_path))
+ .then_some(rp.pattern.as_str())
+ })
}
+ };
+ if let Some(relative_glob_pattern) = relative_glob_pattern {
+ let literal_prefix = glob_literal_prefix(relative_glob_pattern);
+ tree.as_local_mut()
+ .unwrap()
+ .add_path_prefix_to_scan(Path::new(literal_prefix).into());
+ if let Some(glob) = Glob::new(relative_glob_pattern).log_err() {
+ builders
+ .entry(tree.id())
+ .or_insert_with(|| GlobSetBuilder::new())
+ .add(glob);
+ }
+ return true;
}
- false
- });
- if glob_is_inside_worktree {
- break;
}
+ false
+ });
+ if glob_is_inside_worktree {
+ break;
}
}
}
@@ -7712,44 +7682,9 @@ impl Project {
destination: WorktreeId,
cx: &mut ModelContext<'_, Self>,
) -> Result<()> {
- if source == destination {
- return Ok(());
- }
-
- let mut source_index = None;
- let mut destination_index = None;
- for (i, worktree) in self.worktrees.iter().enumerate() {
- if let Some(worktree) = worktree.upgrade() {
- let worktree_id = worktree.read(cx).id();
- if worktree_id == source {
- source_index = Some(i);
- if destination_index.is_some() {
- break;
- }
- } else if worktree_id == destination {
- destination_index = Some(i);
- if source_index.is_some() {
- break;
- }
- }
- }
- }
-
- let source_index =
- source_index.with_context(|| format!("Missing worktree for id {source}"))?;
- let destination_index =
- destination_index.with_context(|| format!("Missing worktree for id {destination}"))?;
-
- if source_index == destination_index {
- return Ok(());
- }
-
- let worktree_to_move = self.worktrees.remove(source_index);
- self.worktrees.insert(destination_index, worktree_to_move);
- self.worktrees_reordered = true;
- cx.emit(Event::WorktreeOrderChanged);
- cx.notify();
- Ok(())
+ self.worktree_store.update(cx, |worktree_store, cx| {
+ worktree_store.move_worktree(source, destination, cx)
+ })
}
pub fn find_or_create_worktree(
@@ -7773,8 +7708,8 @@ impl Project {
abs_path: &Path,
cx: &AppContext,
) -> Option<(Model<Worktree>, PathBuf)> {
- for tree in &self.worktrees {
- if let Some(tree) = tree.upgrade() {
+ self.worktree_store.read_with(cx, |worktree_store, cx| {
+ for tree in worktree_store.worktrees() {
if let Some(relative_path) = tree
.read(cx)
.as_local()
@@ -7783,8 +7718,8 @@ impl Project {
return Some((tree.clone(), relative_path.into()));
}
}
- }
- None
+ None
+ })
}
pub fn is_shared(&self) -> bool {
@@ -8042,18 +7977,8 @@ impl Project {
inventory.remove_worktree_sources(id_to_remove);
});
- self.worktrees.retain(|worktree| {
- if let Some(worktree) = worktree.upgrade() {
- let id = worktree.read(cx).id();
- if id == id_to_remove {
- cx.emit(Event::WorktreeRemoved(id));
- false
- } else {
- true
- }
- } else {
- false
- }
+ self.worktree_store.update(cx, |worktree_store, cx| {
+ worktree_store.remove_worktree(id_to_remove, cx);
});
self.metadata_changed(cx);
@@ -8066,7 +7991,6 @@ impl Project {
match event {
worktree::Event::UpdatedEntries(changes) => {
if is_local {
- this.update_local_worktree_buffers(&worktree, changes, cx);
this.update_local_worktree_language_servers(&worktree, changes, cx);
this.update_local_worktree_settings(&worktree, changes, cx);
this.update_prettier_settings(&worktree, changes, cx);
@@ -8082,85 +8006,18 @@ impl Project {
.telemetry()
.report_discovered_project_events(worktree_id, changes);
}
- worktree::Event::UpdatedGitRepositories(updated_repos) => {
- if is_local {
- this.update_local_worktree_buffers_git_repos(
- worktree.clone(),
- updated_repos,
- cx,
- )
- }
+ worktree::Event::UpdatedGitRepositories(_) => {
cx.emit(Event::WorktreeUpdatedGitRepositories);
}
+ worktree::Event::DeletedEntry(id) => cx.emit(Event::DeletedEntry(*id)),
}
})
.detach();
- let push_strong_handle = {
- let worktree = worktree.read(cx);
- self.is_shared() || worktree.is_visible() || worktree.is_remote()
- };
- let handle = if push_strong_handle {
- WorktreeHandle::Strong(worktree.clone())
- } else {
- WorktreeHandle::Weak(worktree.downgrade())
- };
- if self.worktrees_reordered {
- self.worktrees.push(handle);
- } else {
- let i = match self
- .worktrees
- .binary_search_by_key(&Some(worktree.read(cx).abs_path()), |other| {
- other.upgrade().map(|worktree| worktree.read(cx).abs_path())
- }) {
- Ok(i) | Err(i) => i,
- };
- self.worktrees.insert(i, handle);
- }
-
- let handle_id = worktree.entity_id();
- cx.observe_release(worktree, move |this, worktree, cx| {
- let _ = this.remove_worktree(worktree.id(), cx);
- cx.update_global::<SettingsStore, _>(|store, cx| {
- store
- .clear_local_settings(handle_id.as_u64() as usize, cx)
- .log_err()
- });
- })
- .detach();
-
- cx.emit(Event::WorktreeAdded);
- self.metadata_changed(cx);
- }
-
- fn update_local_worktree_buffers(
- &mut self,
- worktree_handle: &Model<Worktree>,
- changes: &[(Arc<Path>, ProjectEntryId, PathChange)],
- cx: &mut ModelContext<Self>,
- ) {
- let snapshot = worktree_handle.read(cx).snapshot();
- self.buffer_store.clone().update(cx, |buffer_store, cx| {
- for (path, entry_id, _) in changes {
- if let Some((buffer, _, new_file)) = buffer_store.file_changed(
- path.clone(),
- *entry_id,
- worktree_handle,
- &snapshot,
- cx,
- ) {
- if let Some(project_id) = self.remote_id() {
- self.client
- .send(proto::UpdateBufferFile {
- project_id,
- buffer_id: buffer.read(cx).remote_id().into(),
- file: Some(new_file.to_proto(cx)),
- })
- .log_err();
- }
- }
- }
+ self.worktree_store.update(cx, |worktree_store, cx| {
+ worktree_store.add(worktree, cx);
});
+ self.metadata_changed(cx);
}
fn update_local_worktree_language_servers(
@@ -8225,138 +8082,6 @@ impl Project {
}
}
- fn update_local_worktree_buffers_git_repos(
- &mut self,
- worktree_handle: Model<Worktree>,
- changed_repos: &UpdatedGitRepositoriesSet,
- cx: &mut ModelContext<Self>,
- ) {
- debug_assert!(worktree_handle.read(cx).is_local());
-
- // Identify the loading buffers whose containing repository that has changed.
- let future_buffers = self
- .buffer_store
- .read(cx)
- .loading_buffers()
- .filter_map(|(project_path, receiver)| {
- if project_path.worktree_id != worktree_handle.read(cx).id() {
- return None;
- }
- let path = &project_path.path;
- changed_repos
- .iter()
- .find(|(work_dir, _)| path.starts_with(work_dir))?;
- let path = path.clone();
- let abs_path = worktree_handle.read(cx).absolutize(&path).ok()?;
- Some(async move {
- BufferStore::wait_for_loading_buffer(receiver)
- .await
- .ok()
- .map(|buffer| (buffer, path, abs_path))
- })
- })
- .collect::<FuturesUnordered<_>>();
-
- // Identify the current buffers whose containing repository has changed.
- let current_buffers = self
- .buffer_store
- .read(cx)
- .buffers()
- .filter_map(|buffer| {
- let file = File::from_dyn(buffer.read(cx).file())?;
- if file.worktree != worktree_handle {
- return None;
- }
- let path = file.path();
- changed_repos
- .iter()
- .find(|(work_dir, _)| path.starts_with(work_dir))?;
- Some((buffer, path.clone(), file.abs_path(cx)))
- })
- .collect::<Vec<_>>();
-
- if future_buffers.len() + current_buffers.len() == 0 {
- return;
- }
-
- let remote_id = self.remote_id();
- let client = self.client.clone();
- let fs = self.fs.clone();
- cx.spawn(move |_, mut cx| async move {
- // Wait for all of the buffers to load.
- let future_buffers = future_buffers.collect::<Vec<_>>().await;
-
- // Reload the diff base for every buffer whose containing git repository has changed.
- let snapshot =
- worktree_handle.update(&mut cx, |tree, _| tree.as_local().unwrap().snapshot())?;
- let diff_bases_by_buffer = cx
- .background_executor()
- .spawn(async move {
- let mut diff_base_tasks = future_buffers
- .into_iter()
- .flatten()
- .chain(current_buffers)
- .filter_map(|(buffer, path, abs_path)| {
- let (repo_entry, local_repo_entry) = snapshot.repo_for_path(&path)?;
- Some((buffer, path, abs_path, repo_entry, local_repo_entry))
- })
- .map(|(buffer, path, abs_path, repo, local_repo_entry)| {
- let fs = fs.clone();
- let snapshot = snapshot.clone();
- async move {
- let abs_path_metadata = fs
- .metadata(&abs_path)
- .await
- .with_context(|| {
- format!("loading file and FS metadata for {path:?}")
- })
- .log_err()
- .flatten()?;
- let base_text = if abs_path_metadata.is_dir
- || abs_path_metadata.is_symlink
- {
- None
- } else {
- let relative_path = repo.relativize(&snapshot, &path).ok()?;
- local_repo_entry.repo().load_index_text(&relative_path)
- };
- Some((buffer, base_text))
- }
- })
- .collect::<FuturesUnordered<_>>();
-
- let mut diff_bases = Vec::with_capacity(diff_base_tasks.len());
- while let Some(diff_base) = diff_base_tasks.next().await {
- if let Some(diff_base) = diff_base {
- diff_bases.push(diff_base);
- }
- }
- diff_bases
- })
- .await;
-
- // Assign the new diff bases on all of the buffers.
- for (buffer, diff_base) in diff_bases_by_buffer {
- let buffer_id = buffer.update(&mut cx, |buffer, cx| {
- buffer.set_diff_base(diff_base.clone(), cx);
- buffer.remote_id().into()
- })?;
- if let Some(project_id) = remote_id {
- client
- .send(proto::UpdateDiffBase {
- project_id,
- buffer_id,
- diff_base,
- })
- .log_err();
- }
- }
-
- anyhow::Ok(())
- })
- .detach();
- }
-
fn update_local_worktree_settings(
&mut self,
worktree: &Model<Worktree>,
@@ -8655,13 +8380,14 @@ impl Project {
full_path: &Path,
cx: &AppContext,
) -> Option<ProjectPath> {
- self.worktrees.iter().find_map(|worktree| {
- let worktree = worktree.upgrade()?;
- let worktree_root_name = worktree.read(cx).root_name();
- let relative_path = full_path.strip_prefix(worktree_root_name).ok()?;
- Some(ProjectPath {
- worktree_id: worktree.read(cx).id(),
- path: relative_path.into(),
+ self.worktree_store.read_with(cx, |worktree_store, cx| {
+ worktree_store.worktrees().find_map(|worktree| {
+ let worktree_root_name = worktree.read(cx).root_name();
+ let relative_path = full_path.strip_prefix(worktree_root_name).ok()?;
+ Some(ProjectPath {
+ worktree_id: worktree.read(cx).id(),
+ path: relative_path.into(),
+ })
})
})
}
@@ -9106,12 +8832,8 @@ impl Project {
envelope: TypedEnvelope<proto::CreateProjectEntry>,
mut cx: AsyncAppContext,
) -> Result<proto::ProjectEntryResponse> {
- let worktree = this.update(&mut cx, |this, cx| {
- let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
- this.worktree_for_id(worktree_id, cx)
- .ok_or_else(|| anyhow!("worktree not found"))
- })??;
- Worktree::handle_create_entry(worktree, envelope.payload, cx).await
+ let worktree_store = this.update(&mut cx, |this, _| this.worktree_store.clone())?;
+ WorktreeStore::handle_create_project_entry(worktree_store, envelope, cx).await
}
async fn handle_rename_project_entry(
@@ -9119,12 +8841,8 @@ impl Project {
envelope: TypedEnvelope<proto::RenameProjectEntry>,
mut cx: AsyncAppContext,
) -> Result<proto::ProjectEntryResponse> {
- let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
- let worktree = this.update(&mut cx, |this, cx| {
- this.worktree_for_entry(entry_id, cx)
- .ok_or_else(|| anyhow!("worktree not found"))
- })??;
- Worktree::handle_rename_entry(worktree, envelope.payload, cx).await
+ let worktree_store = this.update(&mut cx, |this, _| this.worktree_store.clone())?;
+ WorktreeStore::handle_rename_project_entry(worktree_store, envelope, cx).await
}
async fn handle_copy_project_entry(
@@ -9132,12 +8850,8 @@ impl Project {
envelope: TypedEnvelope<proto::CopyProjectEntry>,
mut cx: AsyncAppContext,
) -> Result<proto::ProjectEntryResponse> {
- let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
- let worktree = this.update(&mut cx, |this, cx| {
- this.worktree_for_entry(entry_id, cx)
- .ok_or_else(|| anyhow!("worktree not found"))
- })??;
- Worktree::handle_copy_entry(worktree, envelope.payload, cx).await
+ let worktree_store = this.update(&mut cx, |this, _| this.worktree_store.clone())?;
+ WorktreeStore::handle_copy_project_entry(worktree_store, envelope, cx).await
}
async fn handle_delete_project_entry(
@@ -9145,13 +8859,8 @@ impl Project {
envelope: TypedEnvelope<proto::DeleteProjectEntry>,
mut cx: AsyncAppContext,
) -> Result<proto::ProjectEntryResponse> {
- let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
- let worktree = this.update(&mut cx, |this, cx| {
- this.worktree_for_entry(entry_id, cx)
- .ok_or_else(|| anyhow!("worktree not found"))
- })??;
- this.update(&mut cx, |_, cx| cx.emit(Event::DeletedEntry(entry_id)))?;
- Worktree::handle_delete_entry(worktree, envelope.payload, cx).await
+ let worktree_store = this.update(&mut cx, |this, _| this.worktree_store.clone())?;
+ WorktreeStore::handle_delete_project_entry(worktree_store, envelope, cx).await
}
async fn handle_expand_project_entry(
@@ -9159,11 +8868,8 @@ impl Project {
envelope: TypedEnvelope<proto::ExpandProjectEntry>,
mut cx: AsyncAppContext,
) -> Result<proto::ExpandProjectEntryResponse> {
- let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
- let worktree = this
- .update(&mut cx, |this, cx| this.worktree_for_entry(entry_id, cx))?
- .ok_or_else(|| anyhow!("invalid request"))?;
- Worktree::handle_expand_entry(worktree, envelope.payload, cx).await
+ let worktree_store = this.update(&mut cx, |this, _| this.worktree_store.clone())?;
+ WorktreeStore::handle_expand_project_entry(worktree_store, envelope, cx).await
}
async fn handle_update_diagnostic_summary(
@@ -9327,7 +9033,7 @@ impl Project {
this.buffer_store.update(cx, |buffer_store, cx| {
buffer_store.handle_create_buffer_for_peer(
envelope,
- this.worktrees(),
+ this.worktrees(cx).collect::<Vec<_>>().into_iter(),
this.replica_id(),
this.capability(),
cx,
@@ -80,7 +80,7 @@ async fn test_symlinks(cx: &mut gpui::TestAppContext) {
let project = Project::test(Arc::new(RealFs::default()), [root_link_path.as_ref()], cx).await;
project.update(cx, |project, cx| {
- let tree = project.worktrees().next().unwrap().read(cx);
+ let tree = project.worktrees(cx).next().unwrap().read(cx);
assert_eq!(tree.file_count(), 5);
assert_eq!(
tree.inode_for_path("fennel/grape"),
@@ -124,13 +124,13 @@ async fn test_managing_project_specific_settings(cx: &mut gpui::TestAppContext)
.await;
let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
- let worktree = project.update(cx, |project, _| project.worktrees().next().unwrap());
+ let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
let task_context = TaskContext::default();
cx.executor().run_until_parked();
let worktree_id = cx.update(|cx| {
project.update(cx, |project, cx| {
- project.worktrees().next().unwrap().read(cx).id()
+ project.worktrees(cx).next().unwrap().read(cx).id()
})
});
let global_task_source_kind = TaskSourceKind::Worktree {
@@ -734,7 +734,7 @@ async fn test_reporting_fs_changes_to_language_servers(cx: &mut gpui::TestAppCon
// Initially, we don't load ignored files because the language server has not explicitly asked us to watch them.
project.update(cx, |project, cx| {
- let worktree = project.worktrees().next().unwrap();
+ let worktree = project.worktrees(cx).next().unwrap();
assert_eq!(
worktree
.read(cx)
@@ -808,7 +808,7 @@ async fn test_reporting_fs_changes_to_language_servers(cx: &mut gpui::TestAppCon
// Now the language server has asked us to watch an ignored directory path,
// so we recursively load it.
project.update(cx, |project, cx| {
- let worktree = project.worktrees().next().unwrap();
+ let worktree = project.worktrees(cx).next().unwrap();
assert_eq!(
worktree
.read(cx)
@@ -1132,7 +1132,7 @@ async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
},
);
- let worktree_id = project.update(cx, |p, cx| p.worktrees().next().unwrap().read(cx).id());
+ let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
// Cause worktree to start the fake language server
let _buffer = project
@@ -2477,7 +2477,7 @@ async fn test_definition(cx: &mut gpui::TestAppContext) {
) -> Vec<(&'a Path, bool)> {
project
.read(cx)
- .worktrees()
+ .worktrees(cx)
.map(|worktree| {
let worktree = worktree.read(cx);
(
@@ -2821,7 +2821,7 @@ async fn test_file_changes_multiple_times_on_disk(cx: &mut gpui::TestAppContext)
.await;
let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
- let worktree = project.read_with(cx, |project, _| project.worktrees().next().unwrap());
+ let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
let buffer = project
.update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
.await
@@ -2876,7 +2876,7 @@ async fn test_edit_buffer_while_it_reloads(cx: &mut gpui::TestAppContext) {
.await;
let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
- let worktree = project.read_with(cx, |project, _| project.worktrees().next().unwrap());
+ let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
let buffer = project
.update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
.await
@@ -2978,7 +2978,7 @@ async fn test_save_as(cx: &mut gpui::TestAppContext) {
});
project
.update(cx, |project, cx| {
- let worktree_id = project.worktrees().next().unwrap().read(cx).id();
+ let worktree_id = project.worktrees(cx).next().unwrap().read(cx).id();
let path = ProjectPath {
worktree_id,
path: Arc::from(Path::new("file1.rs")),
@@ -3038,7 +3038,7 @@ async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
};
let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
project.update(cx, |project, cx| {
- let tree = project.worktrees().next().unwrap();
+ let tree = project.worktrees(cx).next().unwrap();
tree.read(cx)
.entry_for_path(path)
.unwrap_or_else(|| panic!("no entry for path {}", path))
@@ -3056,7 +3056,7 @@ async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
let file4_id = id_for_path("b/c/file4", cx);
// Create a remote copy of this worktree.
- let tree = project.update(cx, |project, _| project.worktrees().next().unwrap());
+ let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
let metadata = tree.update(cx, |tree, _| tree.metadata_proto());
let updates = Arc::new(Mutex::new(Vec::new()));
@@ -3173,12 +3173,12 @@ async fn test_buffer_identity_across_renames(cx: &mut gpui::TestAppContext) {
.await;
let project = Project::test(fs, [Path::new("/dir")], cx).await;
- let tree = project.update(cx, |project, _| project.worktrees().next().unwrap());
+ let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
let tree_id = tree.update(cx, |tree, _| tree.id());
let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
project.update(cx, |project, cx| {
- let tree = project.worktrees().next().unwrap();
+ let tree = project.worktrees(cx).next().unwrap();
tree.read(cx)
.entry_for_path(path)
.unwrap_or_else(|| panic!("no entry for path {}", path))
@@ -4549,7 +4549,7 @@ async fn test_create_entry(cx: &mut gpui::TestAppContext) {
let project = Project::test(fs.clone(), ["/one/two/three".as_ref()], cx).await;
project
.update(cx, |project, cx| {
- let id = project.worktrees().next().unwrap().read(cx).id();
+ let id = project.worktrees(cx).next().unwrap().read(cx).id();
project.create_entry((id, "b.."), true, cx)
})
.unwrap()
@@ -4560,7 +4560,7 @@ async fn test_create_entry(cx: &mut gpui::TestAppContext) {
// Can't create paths outside the project
let result = project
.update(cx, |project, cx| {
- let id = project.worktrees().next().unwrap().read(cx).id();
+ let id = project.worktrees(cx).next().unwrap().read(cx).id();
project.create_entry((id, "../../boop"), true, cx)
})
.await;
@@ -4569,7 +4569,7 @@ async fn test_create_entry(cx: &mut gpui::TestAppContext) {
// Can't create paths with '..'
let result = project
.update(cx, |project, cx| {
- let id = project.worktrees().next().unwrap().read(cx).id();
+ let id = project.worktrees(cx).next().unwrap().read(cx).id();
project.create_entry((id, "four/../beep"), true, cx)
})
.await;
@@ -4592,7 +4592,7 @@ async fn test_create_entry(cx: &mut gpui::TestAppContext) {
// And we cannot open buffers with '..'
let result = project
.update(cx, |project, cx| {
- let id = project.worktrees().next().unwrap().read(cx).id();
+ let id = project.worktrees(cx).next().unwrap().read(cx).id();
project.open_buffer((id, "../c.rs"), cx)
})
.await;
@@ -0,0 +1,311 @@
+use anyhow::{anyhow, Context as _, Result};
+use collections::HashMap;
+use gpui::{AppContext, AsyncAppContext, EntityId, EventEmitter, Model, ModelContext, WeakModel};
+use rpc::{
+ proto::{self, AnyProtoClient},
+ TypedEnvelope,
+};
+use text::ReplicaId;
+use worktree::{ProjectEntryId, Worktree, WorktreeId};
+
+pub struct WorktreeStore {
+ is_shared: bool,
+ worktrees: Vec<WorktreeHandle>,
+ worktrees_reordered: bool,
+}
+
+pub enum WorktreeStoreEvent {
+ WorktreeAdded(Model<Worktree>),
+ WorktreeRemoved(EntityId, WorktreeId),
+ WorktreeOrderChanged,
+}
+
+impl EventEmitter<WorktreeStoreEvent> for WorktreeStore {}
+
+impl WorktreeStore {
+ pub fn new(retain_worktrees: bool) -> Self {
+ Self {
+ is_shared: retain_worktrees,
+ worktrees: Vec::new(),
+ worktrees_reordered: false,
+ }
+ }
+
+ /// Iterates through all worktrees, including ones that don't appear in the project panel
+ pub fn worktrees(&self) -> impl '_ + DoubleEndedIterator<Item = Model<Worktree>> {
+ self.worktrees
+ .iter()
+ .filter_map(move |worktree| worktree.upgrade())
+ }
+
+ /// Iterates through all user-visible worktrees, the ones that appear in the project panel.
+ pub fn visible_worktrees<'a>(
+ &'a self,
+ cx: &'a AppContext,
+ ) -> impl 'a + DoubleEndedIterator<Item = Model<Worktree>> {
+ self.worktrees()
+ .filter(|worktree| worktree.read(cx).is_visible())
+ }
+
+ pub fn worktree_for_id(&self, id: WorktreeId, cx: &AppContext) -> Option<Model<Worktree>> {
+ self.worktrees()
+ .find(|worktree| worktree.read(cx).id() == id)
+ }
+
+ pub fn worktree_for_entry(
+ &self,
+ entry_id: ProjectEntryId,
+ cx: &AppContext,
+ ) -> Option<Model<Worktree>> {
+ self.worktrees()
+ .find(|worktree| worktree.read(cx).contains_entry(entry_id))
+ }
+
+ pub fn add(&mut self, worktree: &Model<Worktree>, cx: &mut ModelContext<Self>) {
+ let push_strong_handle = self.is_shared || worktree.read(cx).is_visible();
+ let handle = if push_strong_handle {
+ WorktreeHandle::Strong(worktree.clone())
+ } else {
+ WorktreeHandle::Weak(worktree.downgrade())
+ };
+ if self.worktrees_reordered {
+ self.worktrees.push(handle);
+ } else {
+ let i = match self
+ .worktrees
+ .binary_search_by_key(&Some(worktree.read(cx).abs_path()), |other| {
+ other.upgrade().map(|worktree| worktree.read(cx).abs_path())
+ }) {
+ Ok(i) | Err(i) => i,
+ };
+ self.worktrees.insert(i, handle);
+ }
+
+ cx.emit(WorktreeStoreEvent::WorktreeAdded(worktree.clone()));
+
+ let handle_id = worktree.entity_id();
+ cx.observe_release(worktree, move |_, worktree, cx| {
+ cx.emit(WorktreeStoreEvent::WorktreeRemoved(
+ handle_id,
+ worktree.id(),
+ ));
+ })
+ .detach();
+ }
+
+ pub fn remove_worktree(&mut self, id_to_remove: WorktreeId, cx: &mut ModelContext<Self>) {
+ self.worktrees.retain(|worktree| {
+ if let Some(worktree) = worktree.upgrade() {
+ worktree.read(cx).id() != id_to_remove
+ } else {
+ false
+ }
+ });
+ }
+
+ pub fn set_worktrees_reordered(&mut self, worktrees_reordered: bool) {
+ self.worktrees_reordered = worktrees_reordered;
+ }
+
+ pub fn set_worktrees_from_proto(
+ &mut self,
+ worktrees: Vec<proto::WorktreeMetadata>,
+ replica_id: ReplicaId,
+ remote_id: u64,
+ client: AnyProtoClient,
+ cx: &mut ModelContext<Self>,
+ ) -> Result<()> {
+ let mut old_worktrees_by_id = self
+ .worktrees
+ .drain(..)
+ .filter_map(|worktree| {
+ let worktree = worktree.upgrade()?;
+ Some((worktree.read(cx).id(), worktree))
+ })
+ .collect::<HashMap<_, _>>();
+
+ for worktree in worktrees {
+ if let Some(old_worktree) =
+ old_worktrees_by_id.remove(&WorktreeId::from_proto(worktree.id))
+ {
+ self.worktrees.push(WorktreeHandle::Strong(old_worktree));
+ } else {
+ self.add(
+ &Worktree::remote(remote_id, replica_id, worktree, client.clone(), cx),
+ cx,
+ );
+ }
+ }
+
+ Ok(())
+ }
+
+ pub fn move_worktree(
+ &mut self,
+ source: WorktreeId,
+ destination: WorktreeId,
+ cx: &mut ModelContext<Self>,
+ ) -> Result<()> {
+ if source == destination {
+ return Ok(());
+ }
+
+ let mut source_index = None;
+ let mut destination_index = None;
+ for (i, worktree) in self.worktrees.iter().enumerate() {
+ if let Some(worktree) = worktree.upgrade() {
+ let worktree_id = worktree.read(cx).id();
+ if worktree_id == source {
+ source_index = Some(i);
+ if destination_index.is_some() {
+ break;
+ }
+ } else if worktree_id == destination {
+ destination_index = Some(i);
+ if source_index.is_some() {
+ break;
+ }
+ }
+ }
+ }
+
+ let source_index =
+ source_index.with_context(|| format!("Missing worktree for id {source}"))?;
+ let destination_index =
+ destination_index.with_context(|| format!("Missing worktree for id {destination}"))?;
+
+ if source_index == destination_index {
+ return Ok(());
+ }
+
+ let worktree_to_move = self.worktrees.remove(source_index);
+ self.worktrees.insert(destination_index, worktree_to_move);
+ self.worktrees_reordered = true;
+ cx.emit(WorktreeStoreEvent::WorktreeOrderChanged);
+ cx.notify();
+ Ok(())
+ }
+
+ pub fn disconnected_from_host(&mut self, cx: &mut AppContext) {
+ for worktree in &self.worktrees {
+ if let Some(worktree) = worktree.upgrade() {
+ worktree.update(cx, |worktree, _| {
+ if let Some(worktree) = worktree.as_remote_mut() {
+ worktree.disconnected_from_host();
+ }
+ });
+ }
+ }
+ }
+
+ pub fn set_shared(&mut self, is_shared: bool, cx: &mut ModelContext<Self>) {
+ self.is_shared = is_shared;
+
+ // When shared, retain all worktrees
+ if is_shared {
+ for worktree_handle in self.worktrees.iter_mut() {
+ match worktree_handle {
+ WorktreeHandle::Strong(_) => {}
+ WorktreeHandle::Weak(worktree) => {
+ if let Some(worktree) = worktree.upgrade() {
+ *worktree_handle = WorktreeHandle::Strong(worktree);
+ }
+ }
+ }
+ }
+ }
+ // When not shared, only retain the visible worktrees
+ else {
+ for worktree_handle in self.worktrees.iter_mut() {
+ if let WorktreeHandle::Strong(worktree) = worktree_handle {
+ let is_visible = worktree.update(cx, |worktree, _| {
+ worktree.stop_observing_updates();
+ worktree.is_visible()
+ });
+ if !is_visible {
+ *worktree_handle = WorktreeHandle::Weak(worktree.downgrade());
+ }
+ }
+ }
+ }
+ }
+
+ pub async fn handle_create_project_entry(
+ this: Model<Self>,
+ envelope: TypedEnvelope<proto::CreateProjectEntry>,
+ mut cx: AsyncAppContext,
+ ) -> Result<proto::ProjectEntryResponse> {
+ let worktree = this.update(&mut cx, |this, cx| {
+ let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
+ this.worktree_for_id(worktree_id, cx)
+ .ok_or_else(|| anyhow!("worktree not found"))
+ })??;
+ Worktree::handle_create_entry(worktree, envelope.payload, cx).await
+ }
+
+ pub async fn handle_rename_project_entry(
+ this: Model<Self>,
+ envelope: TypedEnvelope<proto::RenameProjectEntry>,
+ mut cx: AsyncAppContext,
+ ) -> Result<proto::ProjectEntryResponse> {
+ let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
+ let worktree = this.update(&mut cx, |this, cx| {
+ this.worktree_for_entry(entry_id, cx)
+ .ok_or_else(|| anyhow!("worktree not found"))
+ })??;
+ Worktree::handle_rename_entry(worktree, envelope.payload, cx).await
+ }
+
+ pub async fn handle_copy_project_entry(
+ this: Model<Self>,
+ envelope: TypedEnvelope<proto::CopyProjectEntry>,
+ mut cx: AsyncAppContext,
+ ) -> Result<proto::ProjectEntryResponse> {
+ let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
+ let worktree = this.update(&mut cx, |this, cx| {
+ this.worktree_for_entry(entry_id, cx)
+ .ok_or_else(|| anyhow!("worktree not found"))
+ })??;
+ Worktree::handle_copy_entry(worktree, envelope.payload, cx).await
+ }
+
+ pub async fn handle_delete_project_entry(
+ this: Model<Self>,
+ envelope: TypedEnvelope<proto::DeleteProjectEntry>,
+ mut cx: AsyncAppContext,
+ ) -> Result<proto::ProjectEntryResponse> {
+ let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
+ let worktree = this.update(&mut cx, |this, cx| {
+ this.worktree_for_entry(entry_id, cx)
+ .ok_or_else(|| anyhow!("worktree not found"))
+ })??;
+ Worktree::handle_delete_entry(worktree, envelope.payload, cx).await
+ }
+
+ pub async fn handle_expand_project_entry(
+ this: Model<Self>,
+ envelope: TypedEnvelope<proto::ExpandProjectEntry>,
+ mut cx: AsyncAppContext,
+ ) -> Result<proto::ExpandProjectEntryResponse> {
+ let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
+ let worktree = this
+ .update(&mut cx, |this, cx| this.worktree_for_entry(entry_id, cx))?
+ .ok_or_else(|| anyhow!("invalid request"))?;
+ Worktree::handle_expand_entry(worktree, envelope.payload, cx).await
+ }
+}
+
+#[derive(Clone)]
+enum WorktreeHandle {
+ Strong(Model<Worktree>),
+ Weak(WeakModel<Worktree>),
+}
+
+impl WorktreeHandle {
+ fn upgrade(&self) -> Option<Model<Worktree>> {
+ match self {
+ WorktreeHandle::Strong(handle) => Some(handle.clone()),
+ WorktreeHandle::Weak(handle) => handle.upgrade(),
+ }
+ }
+}
@@ -4173,7 +4173,7 @@ mod tests {
let project = Project::test(fs.clone(), ["/project_root".as_ref()], cx).await;
let worktree_id =
- cx.update(|cx| project.read(cx).worktrees().next().unwrap().read(cx).id());
+ cx.update(|cx| project.read(cx).worktrees(cx).next().unwrap().read(cx).id());
let workspace = cx.add_window(|cx| Workspace::test_new(project.clone(), cx));
let cx = &mut VisualTestContext::from_window(*workspace, cx);
let panel = workspace
@@ -4969,7 +4969,7 @@ mod tests {
) {
let path = path.as_ref();
panel.update(cx, |panel, cx| {
- for worktree in panel.project.read(cx).worktrees().collect::<Vec<_>>() {
+ for worktree in panel.project.read(cx).worktrees(cx).collect::<Vec<_>>() {
let worktree = worktree.read(cx);
if let Ok(relative_path) = path.strip_prefix(worktree.root_name()) {
let entry_id = worktree.entry_for_path(relative_path).unwrap().id;
@@ -4984,7 +4984,7 @@ mod tests {
fn select_path(panel: &View<ProjectPanel>, path: impl AsRef<Path>, cx: &mut VisualTestContext) {
let path = path.as_ref();
panel.update(cx, |panel, cx| {
- for worktree in panel.project.read(cx).worktrees().collect::<Vec<_>>() {
+ for worktree in panel.project.read(cx).worktrees(cx).collect::<Vec<_>>() {
let worktree = worktree.read(cx);
if let Ok(relative_path) = path.strip_prefix(worktree.root_name()) {
let entry_id = worktree.entry_for_path(relative_path).unwrap().id;
@@ -5006,7 +5006,7 @@ mod tests {
) -> Option<ProjectEntryId> {
let path = path.as_ref();
panel.update(cx, |panel, cx| {
- for worktree in panel.project.read(cx).worktrees().collect::<Vec<_>>() {
+ for worktree in panel.project.read(cx).worktrees(cx).collect::<Vec<_>>() {
let worktree = worktree.read(cx);
if let Ok(relative_path) = path.strip_prefix(worktree.root_name()) {
return worktree.entry_for_path(relative_path).map(|entry| entry.id);
@@ -534,9 +534,12 @@ impl SshClientState {
}
let mut server_binary_exists = false;
- if let Ok(installed_version) = run_cmd(self.ssh_command(&dst_path).arg("version")).await {
- if installed_version.trim() == version.to_string() {
- server_binary_exists = true;
+ if cfg!(not(debug_assertions)) {
+ if let Ok(installed_version) = run_cmd(self.ssh_command(&dst_path).arg("version")).await
+ {
+ if installed_version.trim() == version.to_string() {
+ server_binary_exists = true;
+ }
}
}
@@ -32,6 +32,7 @@ remote.workspace = true
rpc.workspace = true
settings.workspace = true
smol.workspace = true
+util.workspace = true
worktree.workspace = true
[dev-dependencies]
@@ -1,7 +1,11 @@
-use anyhow::{Context as _, Result};
+use anyhow::Result;
use fs::Fs;
use gpui::{AppContext, AsyncAppContext, Context, Model, ModelContext};
-use project::{buffer_store::BufferStore, ProjectPath, WorktreeId, WorktreeSettings};
+use project::{
+ buffer_store::{BufferStore, BufferStoreEvent},
+ worktree_store::WorktreeStore,
+ ProjectPath, WorktreeId, WorktreeSettings,
+};
use remote::SshSession;
use rpc::{
proto::{self, AnyProtoClient, PeerId},
@@ -12,6 +16,7 @@ use std::{
path::{Path, PathBuf},
sync::{atomic::AtomicUsize, Arc},
};
+use util::ResultExt as _;
use worktree::Worktree;
const PEER_ID: PeerId = PeerId { owner_id: 0, id: 0 };
@@ -20,7 +25,7 @@ const PROJECT_ID: u64 = 0;
pub struct HeadlessProject {
pub fs: Arc<dyn Fs>,
pub session: AnyProtoClient,
- pub worktrees: Vec<Model<Worktree>>,
+ pub worktree_store: Model<WorktreeStore>,
pub buffer_store: Model<BufferStore>,
pub next_entry_id: Arc<AtomicUsize>,
}
@@ -34,27 +39,45 @@ impl HeadlessProject {
pub fn new(session: Arc<SshSession>, fs: Arc<dyn Fs>, cx: &mut ModelContext<Self>) -> Self {
let this = cx.weak_model();
+ let worktree_store = cx.new_model(|_| WorktreeStore::new(true));
+ let buffer_store = cx.new_model(|cx| BufferStore::new(worktree_store.clone(), true, cx));
+ cx.subscribe(&buffer_store, Self::on_buffer_store_event)
+ .detach();
+
session.add_request_handler(this.clone(), Self::handle_add_worktree);
session.add_request_handler(this.clone(), Self::handle_open_buffer_by_path);
session.add_request_handler(this.clone(), Self::handle_update_buffer);
session.add_request_handler(this.clone(), Self::handle_save_buffer);
+ session.add_request_handler(
+ worktree_store.downgrade(),
+ WorktreeStore::handle_create_project_entry,
+ );
+ session.add_request_handler(
+ worktree_store.downgrade(),
+ WorktreeStore::handle_rename_project_entry,
+ );
+ session.add_request_handler(
+ worktree_store.downgrade(),
+ WorktreeStore::handle_copy_project_entry,
+ );
+ session.add_request_handler(
+ worktree_store.downgrade(),
+ WorktreeStore::handle_delete_project_entry,
+ );
+ session.add_request_handler(
+ worktree_store.downgrade(),
+ WorktreeStore::handle_expand_project_entry,
+ );
HeadlessProject {
session: session.into(),
fs,
- worktrees: Vec::new(),
- buffer_store: cx.new_model(|_| BufferStore::new(true)),
+ worktree_store,
+ buffer_store,
next_entry_id: Default::default(),
}
}
- fn worktree_for_id(&self, id: WorktreeId, cx: &AppContext) -> Option<Model<Worktree>> {
- self.worktrees
- .iter()
- .find(|worktree| worktree.read(cx).id() == id)
- .cloned()
- }
-
pub async fn handle_add_worktree(
this: Model<Self>,
message: TypedEnvelope<proto::AddWorktree>,
@@ -74,7 +97,9 @@ impl HeadlessProject {
this.update(&mut cx, |this, cx| {
let session = this.session.clone();
- this.worktrees.push(worktree.clone());
+ this.worktree_store.update(cx, |worktree_store, cx| {
+ worktree_store.add(&worktree, cx);
+ });
worktree.update(cx, |worktree, cx| {
worktree.observe_updates(0, cx, move |update| {
session.send(update).ok();
@@ -104,19 +129,8 @@ impl HeadlessProject {
envelope: TypedEnvelope<proto::SaveBuffer>,
mut cx: AsyncAppContext,
) -> Result<proto::BufferSaved> {
- let (buffer_store, worktree) = this.update(&mut cx, |this, cx| {
- let buffer_store = this.buffer_store.clone();
- let worktree = if let Some(path) = &envelope.payload.new_path {
- Some(
- this.worktree_for_id(WorktreeId::from_proto(path.worktree_id), cx)
- .context("worktree does not exist")?,
- )
- } else {
- None
- };
- anyhow::Ok((buffer_store, worktree))
- })??;
- BufferStore::handle_save_buffer(buffer_store, PROJECT_ID, worktree, envelope, cx).await
+ let buffer_store = this.update(&mut cx, |this, _| this.buffer_store.clone())?;
+ BufferStore::handle_save_buffer(buffer_store, PROJECT_ID, envelope, cx).await
}
pub async fn handle_open_buffer_by_path(
@@ -126,9 +140,6 @@ impl HeadlessProject {
) -> Result<proto::OpenBufferResponse> {
let worktree_id = WorktreeId::from_proto(message.payload.worktree_id);
let (buffer_store, buffer, session) = this.update(&mut cx, |this, cx| {
- let worktree = this
- .worktree_for_id(worktree_id, cx)
- .context("no such worktree")?;
let buffer_store = this.buffer_store.clone();
let buffer = this.buffer_store.update(cx, |buffer_store, cx| {
buffer_store.open_buffer(
@@ -136,7 +147,6 @@ impl HeadlessProject {
worktree_id,
path: PathBuf::from(message.payload.path).into(),
},
- worktree,
cx,
)
});
@@ -163,4 +173,41 @@ impl HeadlessProject {
buffer_id: buffer_id.to_proto(),
})
}
+
+ pub fn on_buffer_store_event(
+ &mut self,
+ _: Model<BufferStore>,
+ event: &BufferStoreEvent,
+ cx: &mut ModelContext<Self>,
+ ) {
+ match event {
+ BufferStoreEvent::LocalBufferUpdated { buffer } => {
+ let buffer = buffer.read(cx);
+ let buffer_id = buffer.remote_id();
+ let Some(new_file) = buffer.file() else {
+ return;
+ };
+ self.session
+ .send(proto::UpdateBufferFile {
+ project_id: 0,
+ buffer_id: buffer_id.into(),
+ file: Some(new_file.to_proto(cx)),
+ })
+ .log_err();
+ }
+ BufferStoreEvent::DiffBaseUpdated { buffer } => {
+ let buffer = buffer.read(cx);
+ let buffer_id = buffer.remote_id();
+ let diff_base = buffer.diff_base();
+ self.session
+ .send(proto::UpdateDiffBase {
+ project_id: 0,
+ buffer_id: buffer_id.to_proto(),
+ diff_base: diff_base.map(|b| b.to_string()),
+ })
+ .log_err();
+ }
+ _ => {}
+ }
+ }
}
@@ -11,7 +11,6 @@ use std::{env, io, mem, process, sync::Arc};
fn main() {
env::set_var("RUST_BACKTRACE", "1");
- env::set_var("RUST_LOG", "remote=trace");
let subcommand = std::env::args().nth(1);
match subcommand.as_deref() {
@@ -12,15 +12,23 @@ use serde_json::json;
use settings::SettingsStore;
use std::{path::Path, sync::Arc};
+fn init_logger() {
+ if std::env::var("RUST_LOG").is_ok() {
+ env_logger::try_init().ok();
+ }
+}
+
#[gpui::test]
async fn test_remote_editing(cx: &mut TestAppContext, server_cx: &mut TestAppContext) {
let (client_ssh, server_ssh) = SshSession::fake(cx, server_cx);
+ init_logger();
let fs = FakeFs::new(server_cx.executor());
fs.insert_tree(
"/code",
json!({
"project1": {
+ ".git": {},
"README.md": "# project 1",
"src": {
"lib.rs": "fn one() -> usize { 1 }"
@@ -32,6 +40,10 @@ async fn test_remote_editing(cx: &mut TestAppContext, server_cx: &mut TestAppCon
}),
)
.await;
+ fs.set_index_for_repo(
+ Path::new("/code/project1/.git"),
+ &[(Path::new("src/lib.rs"), "fn one() -> usize { 0 }".into())],
+ );
server_cx.update(HeadlessProject::init);
let _headless_project =
@@ -52,6 +64,7 @@ async fn test_remote_editing(cx: &mut TestAppContext, server_cx: &mut TestAppCon
assert_eq!(
worktree.paths().map(Arc::as_ref).collect::<Vec<_>>(),
vec![
+ Path::new(".git"),
Path::new("README.md"),
Path::new("src"),
Path::new("src/lib.rs"),
@@ -69,6 +82,10 @@ async fn test_remote_editing(cx: &mut TestAppContext, server_cx: &mut TestAppCon
.unwrap();
buffer.update(cx, |buffer, cx| {
assert_eq!(buffer.text(), "fn one() -> usize { 1 }");
+ assert_eq!(
+ buffer.diff_base().unwrap().to_string(),
+ "fn one() -> usize { 0 }"
+ );
let ix = buffer.text().find('1').unwrap();
buffer.edit([(ix..ix + 1, "100")], None, cx);
});
@@ -76,7 +93,7 @@ async fn test_remote_editing(cx: &mut TestAppContext, server_cx: &mut TestAppCon
// The user saves the buffer. The new contents are written to the
// remote filesystem.
project
- .update(cx, |project, cx| project.save_buffer(buffer, cx))
+ .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
.await
.unwrap();
assert_eq!(
@@ -98,6 +115,7 @@ async fn test_remote_editing(cx: &mut TestAppContext, server_cx: &mut TestAppCon
assert_eq!(
worktree.paths().map(Arc::as_ref).collect::<Vec<_>>(),
vec![
+ Path::new(".git"),
Path::new("README.md"),
Path::new("src"),
Path::new("src/lib.rs"),
@@ -105,6 +123,31 @@ async fn test_remote_editing(cx: &mut TestAppContext, server_cx: &mut TestAppCon
]
);
});
+
+ // A file that is currently open in a buffer is renamed.
+ fs.rename(
+ "/code/project1/src/lib.rs".as_ref(),
+ "/code/project1/src/lib2.rs".as_ref(),
+ Default::default(),
+ )
+ .await
+ .unwrap();
+ cx.executor().run_until_parked();
+ buffer.update(cx, |buffer, _| {
+ assert_eq!(&**buffer.file().unwrap().path(), Path::new("src/lib2.rs"));
+ });
+
+ fs.set_index_for_repo(
+ Path::new("/code/project1/.git"),
+ &[(Path::new("src/lib2.rs"), "fn one() -> usize { 100 }".into())],
+ );
+ cx.executor().run_until_parked();
+ buffer.update(cx, |buffer, _| {
+ assert_eq!(
+ buffer.diff_base().unwrap().to_string(),
+ "fn one() -> usize { 100 }"
+ );
+ });
}
fn build_project(ssh: Arc<SshSession>, cx: &mut TestAppContext) -> Model<Project> {
@@ -2400,7 +2400,7 @@ pub mod tests {
.await;
let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
let worktree_id = project.read_with(cx, |project, cx| {
- project.worktrees().next().unwrap().read(cx).id()
+ project.worktrees(cx).next().unwrap().read(cx).id()
});
let window = cx.add_window(|cx| Workspace::test_new(project, cx));
let workspace = window.root(cx).unwrap();
@@ -2836,7 +2836,7 @@ pub mod tests {
.await;
let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
let worktree_id = project.update(cx, |this, cx| {
- this.worktrees().next().unwrap().read(cx).id()
+ this.worktrees(cx).next().unwrap().read(cx).id()
});
let window = cx.add_window(|cx| Workspace::test_new(project, cx));
@@ -3053,7 +3053,7 @@ pub mod tests {
.await;
let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
let worktree_id = project.update(cx, |this, cx| {
- this.worktrees().next().unwrap().read(cx).id()
+ this.worktrees(cx).next().unwrap().read(cx).id()
});
let window = cx.add_window(|cx| Workspace::test_new(project, cx));
let panes: Vec<_> = window
@@ -273,7 +273,7 @@ async fn open_buffer(
) -> Box<dyn ItemHandle> {
let project = workspace.update(cx, |workspace, _| workspace.project().clone());
let worktree_id = project.update(cx, |project, cx| {
- let worktree = project.worktrees().last().expect("worktree not found");
+ let worktree = project.worktrees(cx).last().expect("worktree not found");
worktree.read(cx).id()
});
let project_path = ProjectPath {
@@ -256,7 +256,7 @@ mod tests {
);
let worktree_id = project.update(cx, |project, cx| {
- project.worktrees().next().unwrap().read(cx).id()
+ project.worktrees(cx).next().unwrap().read(cx).id()
});
let (workspace, cx) = cx.add_window_view(|cx| Workspace::test_new(project.clone(), cx));
@@ -2229,7 +2229,7 @@ impl Render for Pane {
pane.child(self.render_tab_bar(cx))
})
.child({
- let has_worktrees = self.project.read(cx).worktrees().next().is_some();
+ let has_worktrees = self.project.read(cx).worktrees(cx).next().is_some();
// main content
div()
.flex_1()
@@ -1077,8 +1077,8 @@ impl Workspace {
.collect::<Vec<_>>();
if paths_order.iter().enumerate().any(|(i, &j)| i != j) {
project_handle
- .update(&mut cx, |project, _| {
- project.set_worktrees_reordered(true);
+ .update(&mut cx, |project, cx| {
+ project.set_worktrees_reordered(true, cx);
})
.log_err();
}
@@ -1567,7 +1567,7 @@ impl Workspace {
}
pub fn worktrees<'a>(&self, cx: &'a AppContext) -> impl 'a + Iterator<Item = Model<Worktree>> {
- self.project.read(cx).worktrees()
+ self.project.read(cx).worktrees(cx)
}
pub fn visible_worktrees<'a>(
@@ -1861,7 +1861,7 @@ impl Workspace {
) -> Task<Result<()>> {
let window = cx.window_handle().downcast::<Self>();
let is_remote = self.project.read(cx).is_remote();
- let has_worktree = self.project.read(cx).worktrees().next().is_some();
+ let has_worktree = self.project.read(cx).worktrees(cx).next().is_some();
let has_dirty_items = self.items(cx).any(|item| item.is_dirty(cx));
let window_to_replace = if replace_current_window {
@@ -5685,7 +5685,7 @@ mod tests {
let (workspace, cx) = cx.add_window_view(|cx| Workspace::test_new(project.clone(), cx));
let pane = workspace.update(cx, |workspace, _| workspace.active_pane().clone());
let worktree_id = project.update(cx, |project, cx| {
- project.worktrees().next().unwrap().read(cx).id()
+ project.worktrees(cx).next().unwrap().read(cx).id()
});
let item1 = cx.new_view(|cx| {
@@ -6809,7 +6809,7 @@ mod tests {
let (workspace, cx) = cx.add_window_view(|cx| Workspace::test_new(project.clone(), cx));
let worktree_id = project.update(cx, |project, cx| {
- project.worktrees().next().unwrap().read(cx).id()
+ project.worktrees(cx).next().unwrap().read(cx).id()
});
let handle = workspace
@@ -6872,7 +6872,7 @@ mod tests {
let (workspace, cx) = cx.add_window_view(|cx| Workspace::test_new(project.clone(), cx));
let worktree_id = project.update(cx, |project, cx| {
- project.worktrees().next().unwrap().read(cx).id()
+ project.worktrees(cx).next().unwrap().read(cx).id()
});
let handle = workspace
@@ -354,6 +354,7 @@ struct UpdateObservationState {
pub enum Event {
UpdatedEntries(UpdatedEntriesSet),
UpdatedGitRepositories(UpdatedGitRepositoriesSet),
+ DeletedEntry(ProjectEntryId),
}
static EMPTY_PATH: &str = "";
@@ -738,10 +739,12 @@ impl Worktree {
trash: bool,
cx: &mut ModelContext<Worktree>,
) -> Option<Task<Result<()>>> {
- match self {
+ let task = match self {
Worktree::Local(this) => this.delete_entry(entry_id, trash, cx),
Worktree::Remote(this) => this.delete_entry(entry_id, trash, cx),
- }
+ }?;
+ cx.emit(Event::DeletedEntry(entry_id));
+ Some(task)
}
pub fn rename_entry(
@@ -1208,25 +1211,10 @@ impl LocalWorktree {
if let Some(repo_path) = repo.relativize(&snapshot, &path).log_err() {
if let Some(git_repo) = snapshot.git_repositories.get(&*repo.work_directory) {
let git_repo = git_repo.repo_ptr.clone();
- index_task = Some(cx.background_executor().spawn({
- let fs = fs.clone();
- let abs_path = abs_path.clone();
- async move {
- let metadata = fs
- .metadata(&abs_path)
- .await
- .with_context(|| {
- format!("loading file and FS metadata for {abs_path:?}")
- })
- .log_err()
- .flatten()?;
- if metadata.is_dir || metadata.is_symlink {
- None
- } else {
- git_repo.load_index_text(&repo_path)
- }
- }
- }));
+ index_task = Some(
+ cx.background_executor()
+ .spawn(async move { git_repo.load_index_text(&repo_path) }),
+ );
}
}
}