From 2b18975cdc077e77880867f84d764328351d6335 Mon Sep 17 00:00:00 2001 From: Mikayla Maki Date: Mon, 15 May 2023 09:41:56 -0700 Subject: [PATCH 1/8] Change folder styling from a reduce over all child files to a simple 'always modified' Remove git status from tab titles --- crates/editor/src/items.rs | 22 ++-------------------- crates/project/src/worktree.rs | 26 ++++++++------------------ 2 files changed, 10 insertions(+), 38 deletions(-) diff --git a/crates/editor/src/items.rs b/crates/editor/src/items.rs index 80c1009aa4ce12e1a1dece3aacbf52289bb304b0..d2b9c20803e9ee211953302a36613e579b3b9ba2 100644 --- a/crates/editor/src/items.rs +++ b/crates/editor/src/items.rs @@ -14,7 +14,7 @@ use language::{ proto::serialize_anchor as serialize_text_anchor, Bias, Buffer, OffsetRangeExt, Point, SelectionGoal, }; -use project::{repository::GitFileStatus, FormatTrigger, Item as _, Project, ProjectPath}; +use project::{FormatTrigger, Item as _, Project, ProjectPath}; use rpc::proto::{self, update_view}; use settings::Settings; use smallvec::SmallVec; @@ -27,7 +27,6 @@ use std::{ path::{Path, PathBuf}, }; use text::Selection; -use theme::ui::FileName; use util::{ResultExt, TryFutureExt}; use workspace::item::{BreadcrumbText, FollowableItemHandle}; use workspace::{ @@ -566,25 +565,8 @@ impl Item for Editor { style: &theme::Tab, cx: &AppContext, ) -> AnyElement { - fn git_file_status(this: &Editor, cx: &AppContext) -> Option { - let project_entry_id = this - .buffer() - .read(cx) - .as_singleton()? - .read(cx) - .entry_id(cx)?; - let project = this.project.as_ref()?.read(cx); - let path = project.path_for_entry(project_entry_id, cx)?.path; - let worktree = project.worktree_for_entry(project_entry_id, cx)?.read(cx); - worktree.repo_for(&path)?.status_for_path(&worktree, &path) - } - Flex::row() - .with_child(ComponentHost::new(FileName::new( - self.title(cx).to_string(), - git_file_status(self, cx), - FileName::style(style.label.clone(), &cx.global::().theme), - ))) + .with_child(Label::new(self.title(cx).to_string(), style.label.clone()).into_any()) .with_children(detail.and_then(|detail| { let path = path_for_buffer(&self.buffer, detail, false, cx)?; let description = path.to_string_lossy(); diff --git a/crates/project/src/worktree.rs b/crates/project/src/worktree.rs index bfd4eaa43f6a36acca5fb4efc15ecf1fbd20e7ed..cb00fc5c41234d2f77524769c6e82097996a27af 100644 --- a/crates/project/src/worktree.rs +++ b/crates/project/src/worktree.rs @@ -55,7 +55,7 @@ use std::{ time::{Duration, SystemTime}, }; use sum_tree::{Bias, Edit, SeekTarget, SumTree, TreeMap, TreeSet}; -use util::{paths::HOME, ResultExt, TakeUntilExt, TryFutureExt}; +use util::{paths::HOME, ResultExt, TryFutureExt}; #[derive(Copy, Clone, PartialEq, Eq, Debug, Hash, PartialOrd, Ord)] pub struct WorktreeId(usize); @@ -187,20 +187,12 @@ impl RepositoryEntry { self.worktree_statuses .iter_from(&repo_path) .take_while(|(key, _)| key.starts_with(&repo_path)) - .map(|(_, status)| status) - // Short circut once we've found the highest level - .take_until(|status| status == &&GitFileStatus::Conflict) - .reduce( - |status_first, status_second| match (status_first, status_second) { - (GitFileStatus::Conflict, _) | (_, GitFileStatus::Conflict) => { - &GitFileStatus::Conflict - } - (GitFileStatus::Modified, _) | (_, GitFileStatus::Modified) => { - &GitFileStatus::Modified - } - _ => &GitFileStatus::Added, - }, - ) + .map(|(path, status)| if path == &repo_path { + status + } else { + &GitFileStatus::Modified + }) + .next() .copied() }) } @@ -4170,15 +4162,13 @@ mod tests { tree.flush_fs_events(cx).await; - dbg!(git_status(&repo)); + git_status(&repo); // Check that non-repo behavior is tracked tree.read_with(cx, |tree, _cx| { let snapshot = tree.snapshot(); let (_, repo) = snapshot.repository_entries.iter().next().unwrap(); - dbg!(&repo.worktree_statuses); - assert_eq!(repo.worktree_statuses.iter().count(), 0); assert_eq!(repo.worktree_statuses.get(&Path::new(A_TXT).into()), None); assert_eq!(repo.worktree_statuses.get(&Path::new(B_TXT).into()), None); From 6c26f3d0e4d0135e49f7073f8a0412f175df5abf Mon Sep 17 00:00:00 2001 From: Mikayla Maki Date: Mon, 15 May 2023 09:48:27 -0700 Subject: [PATCH 2/8] Fixed formatting --- crates/project/src/worktree.rs | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/crates/project/src/worktree.rs b/crates/project/src/worktree.rs index cb00fc5c41234d2f77524769c6e82097996a27af..5216db76f6ccf52d4238e34114124c1b10656fc5 100644 --- a/crates/project/src/worktree.rs +++ b/crates/project/src/worktree.rs @@ -187,10 +187,12 @@ impl RepositoryEntry { self.worktree_statuses .iter_from(&repo_path) .take_while(|(key, _)| key.starts_with(&repo_path)) - .map(|(path, status)| if path == &repo_path { - status - } else { - &GitFileStatus::Modified + .map(|(path, status)| { + if path == &repo_path { + status + } else { + &GitFileStatus::Modified + } }) .next() .copied() @@ -4162,8 +4164,6 @@ mod tests { tree.flush_fs_events(cx).await; - git_status(&repo); - // Check that non-repo behavior is tracked tree.read_with(cx, |tree, _cx| { let snapshot = tree.snapshot(); From 1e4ab6cd75f83dfe7d920da4b23d7bd663a842ea Mon Sep 17 00:00:00 2001 From: Mikayla Maki Date: Mon, 15 May 2023 12:00:12 -0700 Subject: [PATCH 3/8] Add index tracking to status --- crates/fs/src/repository.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/crates/fs/src/repository.rs b/crates/fs/src/repository.rs index 51b69b8bc7ad07f290c3e2496e7b39ad7aeb5ec3..4163dbab900191c509cfee753286edfeca597aa5 100644 --- a/crates/fs/src/repository.rs +++ b/crates/fs/src/repository.rs @@ -100,9 +100,9 @@ impl GitRepository for LibGitRepository { fn read_status(status: git2::Status) -> Option { if status.contains(git2::Status::CONFLICTED) { Some(GitFileStatus::Conflict) - } else if status.intersects(git2::Status::WT_MODIFIED | git2::Status::WT_RENAMED) { + } else if status.intersects(git2::Status::WT_MODIFIED | git2::Status::WT_RENAMED | git2::Status::INDEX_MODIFIED | git2::Status::INDEX_RENAMED) { Some(GitFileStatus::Modified) - } else if status.intersects(git2::Status::WT_NEW) { + } else if status.intersects(git2::Status::WT_NEW | git2::Status::INDEX_NEW) { Some(GitFileStatus::Added) } else { None From 307dd2b83e240ab23594735a6638fe3bd1bdd5fc Mon Sep 17 00:00:00 2001 From: Mikayla Maki Date: Mon, 15 May 2023 13:40:55 -0700 Subject: [PATCH 4/8] Update proto names to reflect new status info --- crates/collab/src/db.rs | 54 +++++++++------------ crates/fs/src/repository.rs | 19 +++++--- crates/project/src/worktree.rs | 86 +++++++++++++++++----------------- crates/rpc/proto/zed.proto | 4 +- crates/rpc/src/proto.rs | 17 ++++--- 5 files changed, 88 insertions(+), 92 deletions(-) diff --git a/crates/collab/src/db.rs b/crates/collab/src/db.rs index 1047b207b9e0248d40fe241648e9f6812b8e72dd..453aa82b536b21b25a1b8fe8c6d69f319ac5484b 100644 --- a/crates/collab/src/db.rs +++ b/crates/collab/src/db.rs @@ -1569,8 +1569,8 @@ impl Database { worktree.updated_repositories.push(proto::RepositoryEntry { work_directory_id: db_repository.work_directory_id as u64, branch: db_repository.branch, - removed_worktree_repo_paths: Default::default(), - updated_worktree_statuses: Default::default(), + removed_repo_paths: Default::default(), + updated_statuses: Default::default(), }); } } @@ -1607,15 +1607,13 @@ impl Database { let db_status_entry = db_status_entry?; if db_status_entry.is_deleted { repository - .removed_worktree_repo_paths + .removed_repo_paths .push(db_status_entry.repo_path); } else { - repository - .updated_worktree_statuses - .push(proto::StatusEntry { - repo_path: db_status_entry.repo_path, - status: db_status_entry.status as i32, - }); + repository.updated_statuses.push(proto::StatusEntry { + repo_path: db_status_entry.repo_path, + status: db_status_entry.status as i32, + }); } } } @@ -2444,12 +2442,10 @@ impl Database { .await?; for repository in update.updated_repositories.iter() { - if !repository.updated_worktree_statuses.is_empty() { + if !repository.updated_statuses.is_empty() { worktree_repository_statuses::Entity::insert_many( - repository - .updated_worktree_statuses - .iter() - .map(|status_entry| worktree_repository_statuses::ActiveModel { + repository.updated_statuses.iter().map(|status_entry| { + worktree_repository_statuses::ActiveModel { project_id: ActiveValue::set(project_id), worktree_id: ActiveValue::set(worktree_id), work_directory_id: ActiveValue::set( @@ -2459,7 +2455,8 @@ impl Database { status: ActiveValue::set(status_entry.status as i64), scan_id: ActiveValue::set(update.scan_id as i64), is_deleted: ActiveValue::set(false), - }), + } + }), ) .on_conflict( OnConflict::columns([ @@ -2479,7 +2476,7 @@ impl Database { .await?; } - if !repository.removed_worktree_repo_paths.is_empty() { + if !repository.removed_repo_paths.is_empty() { worktree_repository_statuses::Entity::update_many() .filter( worktree_repository_statuses::Column::ProjectId @@ -2492,14 +2489,9 @@ impl Database { worktree_repository_statuses::Column::WorkDirectoryId .eq(repository.work_directory_id as i64), ) - .and( - worktree_repository_statuses::Column::RepoPath.is_in( - repository - .removed_worktree_repo_paths - .iter() - .map(String::as_str), - ), - ), + .and(worktree_repository_statuses::Column::RepoPath.is_in( + repository.removed_repo_paths.iter().map(String::as_str), + )), ) .set(worktree_repository_statuses::ActiveModel { is_deleted: ActiveValue::Set(true), @@ -2765,8 +2757,8 @@ impl Database { proto::RepositoryEntry { work_directory_id: db_repository_entry.work_directory_id as u64, branch: db_repository_entry.branch, - removed_worktree_repo_paths: Default::default(), - updated_worktree_statuses: Default::default(), + removed_repo_paths: Default::default(), + updated_statuses: Default::default(), }, ); } @@ -2791,12 +2783,10 @@ impl Database { .repository_entries .get_mut(&(db_status_entry.work_directory_id as u64)) { - repository_entry - .updated_worktree_statuses - .push(proto::StatusEntry { - repo_path: db_status_entry.repo_path, - status: db_status_entry.status as i32, - }); + repository_entry.updated_statuses.push(proto::StatusEntry { + repo_path: db_status_entry.repo_path, + status: db_status_entry.status as i32, + }); } } } diff --git a/crates/fs/src/repository.rs b/crates/fs/src/repository.rs index 4163dbab900191c509cfee753286edfeca597aa5..2c309351fc004f71e451fb866d64293168627c4f 100644 --- a/crates/fs/src/repository.rs +++ b/crates/fs/src/repository.rs @@ -22,9 +22,9 @@ pub trait GitRepository: Send { fn branch_name(&self) -> Option; - fn worktree_statuses(&self) -> Option>; + fn statuses(&self) -> Option>; - fn worktree_status(&self, path: &RepoPath) -> Option; + fn status(&self, path: &RepoPath) -> Option; } impl std::fmt::Debug for dyn GitRepository { @@ -71,7 +71,7 @@ impl GitRepository for LibGitRepository { Some(branch.to_string()) } - fn worktree_statuses(&self) -> Option> { + fn statuses(&self) -> Option> { let statuses = self.statuses(None).log_err()?; let mut map = TreeMap::default(); @@ -91,7 +91,7 @@ impl GitRepository for LibGitRepository { Some(map) } - fn worktree_status(&self, path: &RepoPath) -> Option { + fn status(&self, path: &RepoPath) -> Option { let status = self.status_file(path).log_err()?; read_status(status) } @@ -100,7 +100,12 @@ impl GitRepository for LibGitRepository { fn read_status(status: git2::Status) -> Option { if status.contains(git2::Status::CONFLICTED) { Some(GitFileStatus::Conflict) - } else if status.intersects(git2::Status::WT_MODIFIED | git2::Status::WT_RENAMED | git2::Status::INDEX_MODIFIED | git2::Status::INDEX_RENAMED) { + } else if status.intersects( + git2::Status::WT_MODIFIED + | git2::Status::WT_RENAMED + | git2::Status::INDEX_MODIFIED + | git2::Status::INDEX_RENAMED, + ) { Some(GitFileStatus::Modified) } else if status.intersects(git2::Status::WT_NEW | git2::Status::INDEX_NEW) { Some(GitFileStatus::Added) @@ -141,7 +146,7 @@ impl GitRepository for FakeGitRepository { state.branch_name.clone() } - fn worktree_statuses(&self) -> Option> { + fn statuses(&self) -> Option> { let state = self.state.lock(); let mut map = TreeMap::default(); for (repo_path, status) in state.worktree_statuses.iter() { @@ -150,7 +155,7 @@ impl GitRepository for FakeGitRepository { Some(map) } - fn worktree_status(&self, path: &RepoPath) -> Option { + fn status(&self, path: &RepoPath) -> Option { let state = self.state.lock(); state.worktree_statuses.get(path).cloned() } diff --git a/crates/project/src/worktree.rs b/crates/project/src/worktree.rs index 5216db76f6ccf52d4238e34114124c1b10656fc5..9c214b7ecf5042a42ba18d33e3c76a213a0624f0 100644 --- a/crates/project/src/worktree.rs +++ b/crates/project/src/worktree.rs @@ -143,7 +143,7 @@ impl Snapshot { pub struct RepositoryEntry { pub(crate) work_directory: WorkDirectoryEntry, pub(crate) branch: Option>, - pub(crate) worktree_statuses: TreeMap, + pub(crate) statuses: TreeMap, } fn read_git_status(git_status: i32) -> Option { @@ -176,7 +176,7 @@ impl RepositoryEntry { pub fn status_for_file(&self, snapshot: &Snapshot, path: &Path) -> Option { self.work_directory .relativize(snapshot, path) - .and_then(|repo_path| self.worktree_statuses.get(&repo_path)) + .and_then(|repo_path| self.statuses.get(&repo_path)) .cloned() } @@ -184,7 +184,7 @@ impl RepositoryEntry { self.work_directory .relativize(snapshot, path) .and_then(|repo_path| { - self.worktree_statuses + self.statuses .iter_from(&repo_path) .take_while(|(key, _)| key.starts_with(&repo_path)) .map(|(path, status)| { @@ -203,8 +203,8 @@ impl RepositoryEntry { let mut updated_statuses: Vec = Vec::new(); let mut removed_statuses: Vec = Vec::new(); - let mut self_statuses = self.worktree_statuses.iter().peekable(); - let mut other_statuses = other.worktree_statuses.iter().peekable(); + let mut self_statuses = self.statuses.iter().peekable(); + let mut other_statuses = other.statuses.iter().peekable(); loop { match (self_statuses.peek(), other_statuses.peek()) { (Some((self_repo_path, self_status)), Some((other_repo_path, other_status))) => { @@ -243,8 +243,8 @@ impl RepositoryEntry { proto::RepositoryEntry { work_directory_id: self.work_directory_id().to_proto(), branch: self.branch.as_ref().map(|str| str.to_string()), - removed_worktree_repo_paths: removed_statuses, - updated_worktree_statuses: updated_statuses, + removed_repo_paths: removed_statuses, + updated_statuses: updated_statuses, } } } @@ -269,12 +269,12 @@ impl From<&RepositoryEntry> for proto::RepositoryEntry { proto::RepositoryEntry { work_directory_id: value.work_directory.to_proto(), branch: value.branch.as_ref().map(|str| str.to_string()), - updated_worktree_statuses: value - .worktree_statuses + updated_statuses: value + .statuses .iter() .map(|(repo_path, status)| make_status_entry(repo_path, status)) .collect(), - removed_worktree_repo_paths: Default::default(), + removed_repo_paths: Default::default(), } } } @@ -1540,7 +1540,7 @@ impl Snapshot { if let Some(entry) = self.entry_for_id(*work_directory_entry) { let mut statuses = TreeMap::default(); - for status_entry in repository.updated_worktree_statuses { + for status_entry in repository.updated_statuses { let Some(git_file_status) = read_git_status(status_entry.status) else { continue; }; @@ -1553,11 +1553,11 @@ impl Snapshot { if self.repository_entries.get(&work_directory).is_some() { self.repository_entries.update(&work_directory, |repo| { repo.branch = repository.branch.map(Into::into); - repo.worktree_statuses.insert_tree(statuses); + repo.statuses.insert_tree(statuses); - for repo_path in repository.removed_worktree_repo_paths { + for repo_path in repository.removed_repo_paths { let repo_path = RepoPath::new(repo_path.into()); - repo.worktree_statuses.remove(&repo_path); + repo.statuses.remove(&repo_path); } }); } else { @@ -1566,7 +1566,7 @@ impl Snapshot { RepositoryEntry { work_directory: work_directory_entry, branch: repository.branch.map(Into::into), - worktree_statuses: statuses, + statuses, }, ) } @@ -1982,7 +1982,7 @@ impl LocalSnapshot { RepositoryEntry { work_directory: work_dir_id.into(), branch: repo_lock.branch_name().map(Into::into), - worktree_statuses: repo_lock.worktree_statuses().unwrap_or_default(), + statuses: repo_lock.statuses().unwrap_or_default(), }, ); drop(repo_lock); @@ -2681,6 +2681,8 @@ impl BackgroundScanner { self.update_ignore_statuses().await; + // + let mut snapshot = self.snapshot.lock(); let mut git_repositories = mem::take(&mut snapshot.git_repositories); @@ -2993,7 +2995,7 @@ impl BackgroundScanner { fs_entry.is_ignored = ignore_stack.is_all(); snapshot.insert_entry(fs_entry, self.fs.as_ref()); - self.reload_repo_for_path(&path, &mut snapshot, self.fs.as_ref()); + self.reload_repo_for_file_path(&path, &mut snapshot, self.fs.as_ref()); if let Some(scan_queue_tx) = &scan_queue_tx { let mut ancestor_inodes = snapshot.ancestor_inodes_for_path(&path); @@ -3042,7 +3044,7 @@ impl BackgroundScanner { snapshot.repository_entries.update(&work_dir, |entry| { entry - .worktree_statuses + .statuses .remove_range(&repo_path, &RepoPathDescendants(&repo_path)) }); } @@ -3050,7 +3052,7 @@ impl BackgroundScanner { Some(()) } - fn reload_repo_for_path( + fn reload_repo_for_file_path( &self, path: &Path, snapshot: &mut LocalSnapshot, @@ -3084,7 +3086,7 @@ impl BackgroundScanner { let repo = repo_ptr.lock(); repo.reload_index(); let branch = repo.branch_name(); - let statuses = repo.worktree_statuses().unwrap_or_default(); + let statuses = repo.statuses().unwrap_or_default(); snapshot.git_repositories.update(&entry_id, |entry| { entry.scan_id = scan_id; @@ -3093,7 +3095,7 @@ impl BackgroundScanner { snapshot.repository_entries.update(&work_dir, |entry| { entry.branch = branch.map(Into::into); - entry.worktree_statuses = statuses; + entry.statuses = statuses; }); } else { if snapshot @@ -3118,7 +3120,7 @@ impl BackgroundScanner { } let git_ptr = local_repo.repo_ptr.lock(); - git_ptr.worktree_status(&repo_path) + git_ptr.status(&repo_path) }; let work_dir = repo.work_directory(snapshot)?; @@ -3130,9 +3132,9 @@ impl BackgroundScanner { snapshot.repository_entries.update(&work_dir, |entry| { if let Some(status) = status { - entry.worktree_statuses.insert(repo_path, status); + entry.statuses.insert(repo_path, status); } else { - entry.worktree_statuses.remove(&repo_path); + entry.statuses.remove(&repo_path); } }); } @@ -4089,17 +4091,17 @@ mod tests { let (dir, repo) = snapshot.repository_entries.iter().next().unwrap(); assert_eq!(dir.0.as_ref(), Path::new("project")); - assert_eq!(repo.worktree_statuses.iter().count(), 3); + assert_eq!(repo.statuses.iter().count(), 3); assert_eq!( - repo.worktree_statuses.get(&Path::new(A_TXT).into()), + repo.statuses.get(&Path::new(A_TXT).into()), Some(&GitFileStatus::Modified) ); assert_eq!( - repo.worktree_statuses.get(&Path::new(B_TXT).into()), + repo.statuses.get(&Path::new(B_TXT).into()), Some(&GitFileStatus::Added) ); assert_eq!( - repo.worktree_statuses.get(&Path::new(F_TXT).into()), + repo.statuses.get(&Path::new(F_TXT).into()), Some(&GitFileStatus::Added) ); }); @@ -4114,11 +4116,11 @@ mod tests { let snapshot = tree.snapshot(); let (_, repo) = snapshot.repository_entries.iter().next().unwrap(); - assert_eq!(repo.worktree_statuses.iter().count(), 1); - assert_eq!(repo.worktree_statuses.get(&Path::new(A_TXT).into()), None); - assert_eq!(repo.worktree_statuses.get(&Path::new(B_TXT).into()), None); + assert_eq!(repo.statuses.iter().count(), 1); + assert_eq!(repo.statuses.get(&Path::new(A_TXT).into()), None); + assert_eq!(repo.statuses.get(&Path::new(B_TXT).into()), None); assert_eq!( - repo.worktree_statuses.get(&Path::new(F_TXT).into()), + repo.statuses.get(&Path::new(F_TXT).into()), Some(&GitFileStatus::Added) ); }); @@ -4135,18 +4137,18 @@ mod tests { let snapshot = tree.snapshot(); let (_, repo) = snapshot.repository_entries.iter().next().unwrap(); - assert_eq!(repo.worktree_statuses.iter().count(), 3); - assert_eq!(repo.worktree_statuses.get(&Path::new(A_TXT).into()), None); + assert_eq!(repo.statuses.iter().count(), 3); + assert_eq!(repo.statuses.get(&Path::new(A_TXT).into()), None); assert_eq!( - repo.worktree_statuses.get(&Path::new(B_TXT).into()), + repo.statuses.get(&Path::new(B_TXT).into()), Some(&GitFileStatus::Added) ); assert_eq!( - repo.worktree_statuses.get(&Path::new(E_TXT).into()), + repo.statuses.get(&Path::new(E_TXT).into()), Some(&GitFileStatus::Modified) ); assert_eq!( - repo.worktree_statuses.get(&Path::new(F_TXT).into()), + repo.statuses.get(&Path::new(F_TXT).into()), Some(&GitFileStatus::Added) ); }); @@ -4169,11 +4171,11 @@ mod tests { let snapshot = tree.snapshot(); let (_, repo) = snapshot.repository_entries.iter().next().unwrap(); - assert_eq!(repo.worktree_statuses.iter().count(), 0); - assert_eq!(repo.worktree_statuses.get(&Path::new(A_TXT).into()), None); - assert_eq!(repo.worktree_statuses.get(&Path::new(B_TXT).into()), None); - assert_eq!(repo.worktree_statuses.get(&Path::new(E_TXT).into()), None); - assert_eq!(repo.worktree_statuses.get(&Path::new(F_TXT).into()), None); + assert_eq!(repo.statuses.iter().count(), 0); + assert_eq!(repo.statuses.get(&Path::new(A_TXT).into()), None); + assert_eq!(repo.statuses.get(&Path::new(B_TXT).into()), None); + assert_eq!(repo.statuses.get(&Path::new(E_TXT).into()), None); + assert_eq!(repo.statuses.get(&Path::new(F_TXT).into()), None); }); } diff --git a/crates/rpc/proto/zed.proto b/crates/rpc/proto/zed.proto index 8e45435b89d7be6de90ccf9369ffa87380d7b5ed..eca5fda3064364893fee91d8451ab401813e7b65 100644 --- a/crates/rpc/proto/zed.proto +++ b/crates/rpc/proto/zed.proto @@ -986,8 +986,8 @@ message Entry { message RepositoryEntry { uint64 work_directory_id = 1; optional string branch = 2; - repeated string removed_worktree_repo_paths = 3; - repeated StatusEntry updated_worktree_statuses = 4; + repeated string removed_repo_paths = 3; + repeated StatusEntry updated_statuses = 4; } message StatusEntry { diff --git a/crates/rpc/src/proto.rs b/crates/rpc/src/proto.rs index d74ed5e46cef0af3832e7dbe334d2d366ac54764..efaaaea52e45a932152ce5b2000c7760e425321c 100644 --- a/crates/rpc/src/proto.rs +++ b/crates/rpc/src/proto.rs @@ -509,8 +509,8 @@ pub fn split_worktree_update( updated_repositories.push(RepositoryEntry { work_directory_id: repo.work_directory_id, branch: repo.branch.clone(), - removed_worktree_repo_paths: Default::default(), - updated_worktree_statuses: Default::default(), + removed_repo_paths: Default::default(), + updated_statuses: Default::default(), }); break; } @@ -535,26 +535,25 @@ pub fn split_worktree_update( { let updated_statuses_chunk_size = cmp::min( message.updated_repositories[repository_index] - .updated_worktree_statuses + .updated_statuses .len(), max_chunk_size - total_statuses, ); let updated_statuses: Vec<_> = message.updated_repositories[repository_index] - .updated_worktree_statuses + .updated_statuses .drain(..updated_statuses_chunk_size) .collect(); total_statuses += updated_statuses.len(); let done_this_repo = message.updated_repositories[repository_index] - .updated_worktree_statuses + .updated_statuses .is_empty(); let removed_repo_paths = if done_this_repo { mem::take( - &mut message.updated_repositories[repository_index] - .removed_worktree_repo_paths, + &mut message.updated_repositories[repository_index].removed_repo_paths, ) } else { Default::default() @@ -566,8 +565,8 @@ pub fn split_worktree_update( branch: message.updated_repositories[repository_index] .branch .clone(), - updated_worktree_statuses: updated_statuses, - removed_worktree_repo_paths: removed_repo_paths, + updated_statuses, + removed_repo_paths, }); if done_this_repo { From 68078853b7f44325743710cc86f8540bcda01cd3 Mon Sep 17 00:00:00 2001 From: Mikayla Maki Date: Mon, 15 May 2023 15:50:24 -0700 Subject: [PATCH 5/8] Made status tracking resilient to folder renames co-authored-by: max --- crates/project/src/worktree.rs | 269 ++++++++++++++++++++++++++++----- 1 file changed, 228 insertions(+), 41 deletions(-) diff --git a/crates/project/src/worktree.rs b/crates/project/src/worktree.rs index 9c214b7ecf5042a42ba18d33e3c76a213a0624f0..cea308d7c1f62152ef23ed6f89476fe7769144e8 100644 --- a/crates/project/src/worktree.rs +++ b/crates/project/src/worktree.rs @@ -1663,6 +1663,30 @@ impl Snapshot { } } + fn descendent_entries<'a>( + &'a self, + include_dirs: bool, + include_ignored: bool, + parent_path: &'a Path, + ) -> DescendentEntriesIter<'a> { + let mut cursor = self.entries_by_path.cursor(); + cursor.seek(&TraversalTarget::Path(parent_path), Bias::Left, &()); + let mut traversal = Traversal { + cursor, + include_dirs, + include_ignored, + }; + + if traversal.end_offset() == traversal.start_offset() { + traversal.advance(); + } + + DescendentEntriesIter { + traversal, + parent_path, + } + } + pub fn root_entry(&self) -> Option<&Entry> { self.entry_for_path("") } @@ -2664,14 +2688,13 @@ impl BackgroundScanner { async fn process_events(&mut self, paths: Vec) { let (scan_job_tx, scan_job_rx) = channel::unbounded(); - if let Some(mut paths) = self + let paths = self .reload_entries_for_paths(paths, Some(scan_job_tx.clone())) - .await - { - paths.sort_unstable(); + .await; + if let Some(paths) = &paths { util::extend_sorted( &mut self.prev_state.lock().event_paths, - paths, + paths.iter().cloned(), usize::MAX, Ord::cmp, ); @@ -2681,10 +2704,14 @@ impl BackgroundScanner { self.update_ignore_statuses().await; - // - let mut snapshot = self.snapshot.lock(); + if let Some(paths) = paths { + for path in paths { + self.reload_repo_for_file_path(&path, &mut *snapshot, self.fs.as_ref()); + } + } + let mut git_repositories = mem::take(&mut snapshot.git_repositories); git_repositories.retain(|work_directory_id, _| { snapshot @@ -2995,8 +3022,6 @@ impl BackgroundScanner { fs_entry.is_ignored = ignore_stack.is_all(); snapshot.insert_entry(fs_entry, self.fs.as_ref()); - self.reload_repo_for_file_path(&path, &mut snapshot, self.fs.as_ref()); - if let Some(scan_queue_tx) = &scan_queue_tx { let mut ancestor_inodes = snapshot.ancestor_inodes_for_path(&path); if metadata.is_dir && !ancestor_inodes.contains(&metadata.inode) { @@ -3109,34 +3134,36 @@ impl BackgroundScanner { let repo = snapshot.repo_for(&path)?; - let repo_path = repo.work_directory.relativize(&snapshot, &path)?; - - let status = { - let local_repo = snapshot.get_local_repo(&repo)?; - - // Short circuit if we've already scanned everything - if local_repo.full_scan_id == scan_id { - return None; - } - - let git_ptr = local_repo.repo_ptr.lock(); - git_ptr.status(&repo_path) - }; - let work_dir = repo.work_directory(snapshot)?; - let work_dir_id = repo.work_directory; + let work_dir_id = repo.work_directory.clone(); snapshot .git_repositories .update(&work_dir_id, |entry| entry.scan_id = scan_id); - snapshot.repository_entries.update(&work_dir, |entry| { + let local_repo = snapshot.get_local_repo(&repo)?.to_owned(); + + // Short circuit if we've already scanned everything + if local_repo.full_scan_id == scan_id { + return None; + } + + let mut repository = snapshot.repository_entries.remove(&work_dir)?; + + for entry in snapshot.descendent_entries(false, false, path) { + let Some(repo_path) = repo.work_directory.relativize(snapshot, &entry.path) else { + continue; + }; + + let status = local_repo.repo_ptr.lock().status(&repo_path); if let Some(status) = status { - entry.statuses.insert(repo_path, status); + repository.statuses.insert(repo_path.clone(), status); } else { - entry.statuses.remove(&repo_path); + repository.statuses.remove(&repo_path); } - }); + } + + snapshot.repository_entries.insert(work_dir, repository) } Some(()) @@ -3471,17 +3498,13 @@ pub struct Traversal<'a> { impl<'a> Traversal<'a> { pub fn advance(&mut self) -> bool { - self.advance_to_offset(self.offset() + 1) - } - - pub fn advance_to_offset(&mut self, offset: usize) -> bool { self.cursor.seek_forward( &TraversalTarget::Count { - count: offset, + count: self.end_offset() + 1, include_dirs: self.include_dirs, include_ignored: self.include_ignored, }, - Bias::Right, + Bias::Left, &(), ) } @@ -3508,11 +3531,17 @@ impl<'a> Traversal<'a> { self.cursor.item() } - pub fn offset(&self) -> usize { + pub fn start_offset(&self) -> usize { self.cursor .start() .count(self.include_dirs, self.include_ignored) } + + pub fn end_offset(&self) -> usize { + self.cursor + .end(&()) + .count(self.include_dirs, self.include_ignored) + } } impl<'a> Iterator for Traversal<'a> { @@ -3581,6 +3610,25 @@ impl<'a> Iterator for ChildEntriesIter<'a> { } } +struct DescendentEntriesIter<'a> { + parent_path: &'a Path, + traversal: Traversal<'a>, +} + +impl<'a> Iterator for DescendentEntriesIter<'a> { + type Item = &'a Entry; + + fn next(&mut self) -> Option { + if let Some(item) = self.traversal.entry() { + if item.path.starts_with(&self.parent_path) { + self.traversal.advance(); + return Some(item); + } + } + None + } +} + impl<'a> From<&'a Entry> for proto::Entry { fn from(entry: &'a Entry) -> Self { Self { @@ -3695,6 +3743,105 @@ mod tests { }) } + #[gpui::test] + async fn test_descendent_entries(cx: &mut TestAppContext) { + let fs = FakeFs::new(cx.background()); + fs.insert_tree( + "/root", + json!({ + "a": "", + "b": { + "c": { + "d": "" + }, + "e": {} + }, + "f": "", + "g": { + "h": {} + }, + "i": { + "j": { + "k": "" + }, + "l": { + + } + }, + ".gitignore": "i/j\n", + }), + ) + .await; + + let http_client = FakeHttpClient::with_404_response(); + let client = cx.read(|cx| Client::new(http_client, cx)); + + let tree = Worktree::local( + client, + Path::new("/root"), + true, + fs, + Default::default(), + &mut cx.to_async(), + ) + .await + .unwrap(); + cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete()) + .await; + + tree.read_with(cx, |tree, _| { + assert_eq!( + tree.descendent_entries(false, false, Path::new("b")) + .map(|entry| entry.path.as_ref()) + .collect::>(), + vec![Path::new("b/c/d"),] + ); + assert_eq!( + tree.descendent_entries(true, false, Path::new("b")) + .map(|entry| entry.path.as_ref()) + .collect::>(), + vec![ + Path::new("b"), + Path::new("b/c"), + Path::new("b/c/d"), + Path::new("b/e"), + ] + ); + + assert_eq!( + tree.descendent_entries(false, false, Path::new("g")) + .map(|entry| entry.path.as_ref()) + .collect::>(), + Vec::::new() + ); + assert_eq!( + tree.descendent_entries(true, false, Path::new("g")) + .map(|entry| entry.path.as_ref()) + .collect::>(), + vec![Path::new("g"), Path::new("g/h"),] + ); + + assert_eq!( + tree.descendent_entries(false, false, Path::new("i")) + .map(|entry| entry.path.as_ref()) + .collect::>(), + Vec::::new() + ); + assert_eq!( + tree.descendent_entries(false, true, Path::new("i")) + .map(|entry| entry.path.as_ref()) + .collect::>(), + vec![Path::new("i/j/k")] + ); + assert_eq!( + tree.descendent_entries(true, false, Path::new("i")) + .map(|entry| entry.path.as_ref()) + .collect::>(), + vec![Path::new("i"), Path::new("i/l"),] + ); + }) + } + #[gpui::test(iterations = 10)] async fn test_circular_symlinks(executor: Arc, cx: &mut TestAppContext) { let fs = FakeFs::new(cx.background()); @@ -4117,8 +4264,6 @@ mod tests { let (_, repo) = snapshot.repository_entries.iter().next().unwrap(); assert_eq!(repo.statuses.iter().count(), 1); - assert_eq!(repo.statuses.get(&Path::new(A_TXT).into()), None); - assert_eq!(repo.statuses.get(&Path::new(B_TXT).into()), None); assert_eq!( repo.statuses.get(&Path::new(F_TXT).into()), Some(&GitFileStatus::Added) @@ -4172,10 +4317,52 @@ mod tests { let (_, repo) = snapshot.repository_entries.iter().next().unwrap(); assert_eq!(repo.statuses.iter().count(), 0); - assert_eq!(repo.statuses.get(&Path::new(A_TXT).into()), None); - assert_eq!(repo.statuses.get(&Path::new(B_TXT).into()), None); - assert_eq!(repo.statuses.get(&Path::new(E_TXT).into()), None); - assert_eq!(repo.statuses.get(&Path::new(F_TXT).into()), None); + }); + + let mut renamed_dir_name = "first_directory/second_directory"; + const RENAMED_FILE: &'static str = "rf.txt"; + + std::fs::create_dir_all(work_dir.join(renamed_dir_name)).unwrap(); + std::fs::write( + work_dir.join(renamed_dir_name).join(RENAMED_FILE), + "new-contents", + ) + .unwrap(); + + tree.flush_fs_events(cx).await; + + tree.read_with(cx, |tree, _cx| { + let snapshot = tree.snapshot(); + let (_, repo) = snapshot.repository_entries.iter().next().unwrap(); + + assert_eq!(repo.statuses.iter().count(), 1); + assert_eq!( + repo.statuses + .get(&Path::new(renamed_dir_name).join(RENAMED_FILE).into()), + Some(&GitFileStatus::Added) + ); + }); + + renamed_dir_name = "new_first_directory/second_directory"; + + std::fs::rename( + work_dir.join("first_directory"), + work_dir.join("new_first_directory"), + ) + .unwrap(); + + tree.flush_fs_events(cx).await; + + tree.read_with(cx, |tree, _cx| { + let snapshot = tree.snapshot(); + let (_, repo) = snapshot.repository_entries.iter().next().unwrap(); + + assert_eq!(repo.statuses.iter().count(), 1); + assert_eq!( + repo.statuses + .get(&Path::new(renamed_dir_name).join(RENAMED_FILE).into()), + Some(&GitFileStatus::Added) + ); }); } From f59256f761bbb9915c565d6de92338aa116c940e Mon Sep 17 00:00:00 2001 From: Mikayla Maki Date: Mon, 15 May 2023 16:15:41 -0700 Subject: [PATCH 6/8] Update git repositories to be streamed with their entries co-authored-by: max --- crates/rpc/src/proto.rs | 93 +++++++++-------------------------------- 1 file changed, 20 insertions(+), 73 deletions(-) diff --git a/crates/rpc/src/proto.rs b/crates/rpc/src/proto.rs index efaaaea52e45a932152ce5b2000c7760e425321c..cef4e6867ca12db6a35f4c86d14dd814a249686b 100644 --- a/crates/rpc/src/proto.rs +++ b/crates/rpc/src/proto.rs @@ -1,6 +1,7 @@ use super::{entity_messages, messages, request_messages, ConnectionId, TypedEnvelope}; use anyhow::{anyhow, Result}; use async_tungstenite::tungstenite::Message as WebSocketMessage; +use collections::HashMap; use futures::{SinkExt as _, StreamExt as _}; use prost::Message as _; use serde::Serialize; @@ -485,11 +486,15 @@ pub fn split_worktree_update( max_chunk_size: usize, ) -> impl Iterator { let mut done_files = false; - let mut done_statuses = false; - let mut repository_index = 0; - let mut root_repo_found = false; + + let mut repository_map = message + .updated_repositories + .into_iter() + .map(|repo| (repo.work_directory_id, repo)) + .collect::>(); + iter::from_fn(move || { - if done_files && done_statuses { + if done_files { return None; } @@ -499,25 +504,6 @@ pub fn split_worktree_update( .drain(..updated_entries_chunk_size) .collect(); - let mut updated_repositories: Vec<_> = Default::default(); - - if !root_repo_found { - for entry in updated_entries.iter() { - if let Some(repo) = message.updated_repositories.get(0) { - if repo.work_directory_id == entry.id { - root_repo_found = true; - updated_repositories.push(RepositoryEntry { - work_directory_id: repo.work_directory_id, - branch: repo.branch.clone(), - removed_repo_paths: Default::default(), - updated_statuses: Default::default(), - }); - break; - } - } - } - } - let removed_entries_chunk_size = cmp::min(message.removed_entries.len(), max_chunk_size); let removed_entries = message .removed_entries @@ -526,64 +512,25 @@ pub fn split_worktree_update( done_files = message.updated_entries.is_empty() && message.removed_entries.is_empty(); - // Wait to send repositories until after we've guaranteed that their associated entries - // will be read - if done_files { - let mut total_statuses = 0; - while total_statuses < max_chunk_size - && repository_index < message.updated_repositories.len() - { - let updated_statuses_chunk_size = cmp::min( - message.updated_repositories[repository_index] - .updated_statuses - .len(), - max_chunk_size - total_statuses, - ); - - let updated_statuses: Vec<_> = message.updated_repositories[repository_index] - .updated_statuses - .drain(..updated_statuses_chunk_size) - .collect(); - - total_statuses += updated_statuses.len(); - - let done_this_repo = message.updated_repositories[repository_index] - .updated_statuses - .is_empty(); - - let removed_repo_paths = if done_this_repo { - mem::take( - &mut message.updated_repositories[repository_index].removed_repo_paths, - ) - } else { - Default::default() - }; - - updated_repositories.push(RepositoryEntry { - work_directory_id: message.updated_repositories[repository_index] - .work_directory_id, - branch: message.updated_repositories[repository_index] - .branch - .clone(), - updated_statuses, - removed_repo_paths, - }); - - if done_this_repo { - repository_index += 1; + let mut updated_repositories = Vec::new(); + + if !repository_map.is_empty() { + for entry in &updated_entries { + if let Some(repo) = repository_map.remove(&entry.id) { + updated_repositories.push(repo) } } - } else { - Default::default() - }; + } - let removed_repositories = if done_files && done_statuses { + let removed_repositories = if done_files { mem::take(&mut message.removed_repositories) } else { Default::default() }; - done_statuses = repository_index >= message.updated_repositories.len(); + if done_files { + updated_repositories.extend(mem::take(&mut repository_map).into_values()); + } Some(UpdateWorktree { project_id: message.project_id, From 4d40aa5d6fe5505cdf9cb3108e8eeecd20105f10 Mon Sep 17 00:00:00 2001 From: Mikayla Maki Date: Mon, 15 May 2023 16:17:18 -0700 Subject: [PATCH 7/8] Restore trickle up git status to folder co-authored-by: max --- crates/project/src/worktree.rs | 24 +++++++++++++++--------- 1 file changed, 15 insertions(+), 9 deletions(-) diff --git a/crates/project/src/worktree.rs b/crates/project/src/worktree.rs index cea308d7c1f62152ef23ed6f89476fe7769144e8..92c3c20c75d8d57935eccedd905b3aef3eb37c41 100644 --- a/crates/project/src/worktree.rs +++ b/crates/project/src/worktree.rs @@ -55,7 +55,7 @@ use std::{ time::{Duration, SystemTime}, }; use sum_tree::{Bias, Edit, SeekTarget, SumTree, TreeMap, TreeSet}; -use util::{paths::HOME, ResultExt, TryFutureExt}; +use util::{paths::HOME, ResultExt, TryFutureExt, TakeUntilExt}; #[derive(Copy, Clone, PartialEq, Eq, Debug, Hash, PartialOrd, Ord)] pub struct WorktreeId(usize); @@ -187,14 +187,20 @@ impl RepositoryEntry { self.statuses .iter_from(&repo_path) .take_while(|(key, _)| key.starts_with(&repo_path)) - .map(|(path, status)| { - if path == &repo_path { - status - } else { - &GitFileStatus::Modified - } - }) - .next() + // Short circut once we've found the highest level + .take_until(|(_, status)| status == &&GitFileStatus::Conflict) + .map(|(_, status)| status) + .reduce( + |status_first, status_second| match (status_first, status_second) { + (GitFileStatus::Conflict, _) | (_, GitFileStatus::Conflict) => { + &GitFileStatus::Conflict + } + (GitFileStatus::Modified, _) | (_, GitFileStatus::Modified) => { + &GitFileStatus::Modified + } + _ => &GitFileStatus::Added, + }, + ) .copied() }) } From e4d509adf47758231b0947dcc5daa85457c8bd45 Mon Sep 17 00:00:00 2001 From: Mikayla Maki Date: Mon, 15 May 2023 16:22:52 -0700 Subject: [PATCH 8/8] fmt --- crates/project/src/worktree.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/project/src/worktree.rs b/crates/project/src/worktree.rs index 92c3c20c75d8d57935eccedd905b3aef3eb37c41..cc16ed91b8ecf01b6e75af650500f24522e6d86c 100644 --- a/crates/project/src/worktree.rs +++ b/crates/project/src/worktree.rs @@ -55,7 +55,7 @@ use std::{ time::{Duration, SystemTime}, }; use sum_tree::{Bias, Edit, SeekTarget, SumTree, TreeMap, TreeSet}; -use util::{paths::HOME, ResultExt, TryFutureExt, TakeUntilExt}; +use util::{paths::HOME, ResultExt, TakeUntilExt, TryFutureExt}; #[derive(Copy, Clone, PartialEq, Eq, Debug, Hash, PartialOrd, Ord)] pub struct WorktreeId(usize);