Detailed changes
@@ -1410,7 +1410,7 @@ impl Thread {
git_store
.repositories()
.values()
- .find(|repo| repo.read(cx).worktree_id == snapshot.id())
+ .find(|repo| repo.read(cx).worktree_id == Some(snapshot.id()))
.and_then(|repo| {
let repo = repo.read(cx);
Some((repo.branch().cloned(), repo.local_repository()?))
@@ -2892,15 +2892,17 @@ async fn test_git_branch_name(
#[track_caller]
fn assert_branch(branch_name: Option<impl Into<String>>, project: &Project, cx: &App) {
let branch_name = branch_name.map(Into::into);
- let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
- assert_eq!(worktrees.len(), 1);
- let worktree = worktrees[0].clone();
- let snapshot = worktree.read(cx).snapshot();
- let repo = snapshot.repositories().first().unwrap();
+ let repositories = project.repositories(cx).values().collect::<Vec<_>>();
+ assert_eq!(repositories.len(), 1);
+ let repository = repositories[0].clone();
assert_eq!(
- repo.branch().map(|branch| branch.name.to_string()),
+ repository
+ .read(cx)
+ .repository_entry
+ .branch()
+ .map(|branch| branch.name.to_string()),
branch_name
- );
+ )
}
// Smoke test branch reading
@@ -3022,11 +3024,20 @@ async fn test_git_status_sync(
cx: &App,
) {
let file = file.as_ref();
- let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
- assert_eq!(worktrees.len(), 1);
- let worktree = worktrees[0].clone();
- let snapshot = worktree.read(cx).snapshot();
- assert_eq!(snapshot.status_for_file(file), status);
+ let repos = project
+ .repositories(cx)
+ .values()
+ .cloned()
+ .collect::<Vec<_>>();
+ assert_eq!(repos.len(), 1);
+ let repo = repos.into_iter().next().unwrap();
+ assert_eq!(
+ repo.read(cx)
+ .repository_entry
+ .status_for_path(&file.into())
+ .map(|entry| entry.status),
+ status
+ );
}
project_local.read_with(cx_a, |project, cx| {
@@ -3094,6 +3105,27 @@ async fn test_git_status_sync(
assert_status("b.txt", Some(B_STATUS_END), project, cx);
assert_status("c.txt", Some(C_STATUS_END), project, cx);
});
+
+ // Now remove the original git repository and check that collaborators are notified.
+ client_a
+ .fs()
+ .remove_dir("/dir/.git".as_ref(), RemoveOptions::default())
+ .await
+ .unwrap();
+
+ executor.run_until_parked();
+ project_remote.update(cx_b, |project, cx| {
+ pretty_assertions::assert_eq!(
+ project.git_store().read(cx).repo_snapshots(cx),
+ HashMap::default()
+ );
+ });
+ project_remote_c.update(cx_c, |project, cx| {
+ pretty_assertions::assert_eq!(
+ project.git_store().read(cx).repo_snapshots(cx),
+ HashMap::default()
+ );
+ });
}
#[gpui::test(iterations = 10)]
@@ -1,8 +1,8 @@
use crate::tests::TestServer;
use call::ActiveCall;
-use collections::HashSet;
+use collections::{HashMap, HashSet};
use extension::ExtensionHostProxy;
-use fs::{FakeFs, Fs as _};
+use fs::{FakeFs, Fs as _, RemoveOptions};
use futures::StreamExt as _;
use gpui::{
AppContext as _, BackgroundExecutor, SemanticVersion, TestAppContext, UpdateGlobal as _,
@@ -356,6 +356,26 @@ async fn test_ssh_collaboration_git_branches(
});
assert_eq!(server_branch.name, "totally-new-branch");
+
+ // Remove the git repository and check that all participants get the update.
+ remote_fs
+ .remove_dir("/project/.git".as_ref(), RemoveOptions::default())
+ .await
+ .unwrap();
+ executor.run_until_parked();
+
+ project_a.update(cx_a, |project, cx| {
+ pretty_assertions::assert_eq!(
+ project.git_store().read(cx).repo_snapshots(cx),
+ HashMap::default()
+ );
+ });
+ project_b.update(cx_b, |project, cx| {
+ pretty_assertions::assert_eq!(
+ project.git_store().read(cx).repo_snapshots(cx),
+ HashMap::default()
+ );
+ });
}
#[gpui::test]
@@ -150,7 +150,7 @@ impl GitBlame {
this.generate(cx);
}
}
- project::Event::WorktreeUpdatedGitRepositories(_) => {
+ project::Event::GitStateUpdated => {
log::debug!("Status of git repositories updated. Regenerating blame data...",);
this.generate(cx);
}
@@ -339,7 +339,8 @@ impl EditorTestContext {
let mut found = None;
fs.with_git_state(&Self::root_path().join(".git"), false, |git_state| {
found = git_state.index_contents.get(path.as_ref()).cloned();
- });
+ })
+ .unwrap();
assert_eq!(expected, found.as_deref());
}
@@ -57,12 +57,14 @@ impl FakeGitRepository {
where
F: FnOnce(&mut FakeGitRepositoryState) -> T,
{
- self.fs.with_git_state(&self.dot_git_path, false, f)
+ self.fs
+ .with_git_state(&self.dot_git_path, false, f)
+ .unwrap()
}
- fn with_state_async<F, T>(&self, write: bool, f: F) -> BoxFuture<T>
+ fn with_state_async<F, T>(&self, write: bool, f: F) -> BoxFuture<Result<T>>
where
- F: 'static + Send + FnOnce(&mut FakeGitRepositoryState) -> T,
+ F: 'static + Send + FnOnce(&mut FakeGitRepositoryState) -> Result<T>,
T: Send,
{
let fs = self.fs.clone();
@@ -70,7 +72,7 @@ impl FakeGitRepository {
let dot_git_path = self.dot_git_path.clone();
async move {
executor.simulate_random_delay().await;
- fs.with_git_state(&dot_git_path, write, f)
+ fs.with_git_state(&dot_git_path, write, f)?
}
.boxed()
}
@@ -80,15 +82,33 @@ impl GitRepository for FakeGitRepository {
fn reload_index(&self) {}
fn load_index_text(&self, path: RepoPath, _cx: AsyncApp) -> BoxFuture<Option<String>> {
- self.with_state_async(false, move |state| {
- state.index_contents.get(path.as_ref()).cloned()
- })
+ async {
+ self.with_state_async(false, move |state| {
+ state
+ .index_contents
+ .get(path.as_ref())
+ .ok_or_else(|| anyhow!("not present in index"))
+ .cloned()
+ })
+ .await
+ .ok()
+ }
+ .boxed()
}
fn load_committed_text(&self, path: RepoPath, _cx: AsyncApp) -> BoxFuture<Option<String>> {
- self.with_state_async(false, move |state| {
- state.head_contents.get(path.as_ref()).cloned()
- })
+ async {
+ self.with_state_async(false, move |state| {
+ state
+ .head_contents
+ .get(path.as_ref())
+ .ok_or_else(|| anyhow!("not present in HEAD"))
+ .cloned()
+ })
+ .await
+ .ok()
+ }
+ .boxed()
}
fn set_index_text(
@@ -194,7 +214,7 @@ impl GitRepository for FakeGitRepository {
})
.collect();
- self.with_state(|state| {
+ self.fs.with_git_state(&self.dot_git_path, false, |state| {
let mut entries = Vec::new();
let paths = state
.head_contents
@@ -278,7 +298,7 @@ impl GitRepository for FakeGitRepository {
Ok(GitStatus {
entries: entries.into(),
})
- })
+ })?
}
fn branches(&self) -> BoxFuture<Result<Vec<Branch>>> {
@@ -1248,12 +1248,12 @@ impl FakeFs {
.boxed()
}
- pub fn with_git_state<T, F>(&self, dot_git: &Path, emit_git_event: bool, f: F) -> T
+ pub fn with_git_state<T, F>(&self, dot_git: &Path, emit_git_event: bool, f: F) -> Result<T>
where
F: FnOnce(&mut FakeGitRepositoryState) -> T,
{
let mut state = self.state.lock();
- let entry = state.read_path(dot_git).unwrap();
+ let entry = state.read_path(dot_git).context("open .git")?;
let mut entry = entry.lock();
if let FakeFsEntry::Dir { git_repo_state, .. } = &mut *entry {
@@ -1271,9 +1271,9 @@ impl FakeFs {
state.emit_event([(dot_git, None)]);
}
- result
+ Ok(result)
} else {
- panic!("not a directory");
+ Err(anyhow!("not a directory"))
}
}
@@ -1283,6 +1283,7 @@ impl FakeFs {
state.branches.extend(branch.clone());
state.current_branch_name = branch
})
+ .unwrap();
}
pub fn insert_branches(&self, dot_git: &Path, branches: &[&str]) {
@@ -1296,6 +1297,7 @@ impl FakeFs {
.branches
.extend(branches.iter().map(ToString::to_string));
})
+ .unwrap();
}
pub fn set_unmerged_paths_for_repo(
@@ -1310,7 +1312,8 @@ impl FakeFs {
.iter()
.map(|(path, content)| (path.clone(), *content)),
);
- });
+ })
+ .unwrap();
}
pub fn set_index_for_repo(&self, dot_git: &Path, index_state: &[(RepoPath, String)]) {
@@ -1321,7 +1324,8 @@ impl FakeFs {
.iter()
.map(|(path, content)| (path.clone(), content.clone())),
);
- });
+ })
+ .unwrap();
}
pub fn set_head_for_repo(&self, dot_git: &Path, head_state: &[(RepoPath, String)]) {
@@ -1332,7 +1336,8 @@ impl FakeFs {
.iter()
.map(|(path, content)| (path.clone(), content.clone())),
);
- });
+ })
+ .unwrap();
}
pub fn set_git_content_for_repo(
@@ -1356,7 +1361,8 @@ impl FakeFs {
)
},
));
- });
+ })
+ .unwrap();
}
pub fn set_head_and_index_for_repo(
@@ -1371,14 +1377,16 @@ impl FakeFs {
state
.index_contents
.extend(contents_by_path.iter().cloned());
- });
+ })
+ .unwrap();
}
pub fn set_blame_for_repo(&self, dot_git: &Path, blames: Vec<(RepoPath, git::blame::Blame)>) {
self.with_git_state(dot_git, true, |state| {
state.blames.clear();
state.blames.extend(blames);
- });
+ })
+ .unwrap();
}
/// Put the given git repository into a state with the given status,
@@ -1460,13 +1468,14 @@ impl FakeFs {
state.head_contents.insert(repo_path.clone(), content);
}
}
- });
+ }).unwrap();
}
pub fn set_error_message_for_index_write(&self, dot_git: &Path, message: Option<String>) {
self.with_git_state(dot_git, true, |state| {
state.simulated_index_write_error_message = message;
- });
+ })
+ .unwrap();
}
pub fn paths(&self, include_dot_git: bool) -> Vec<PathBuf> {
@@ -438,7 +438,7 @@ impl std::ops::Sub for GitSummary {
}
}
-#[derive(Clone)]
+#[derive(Clone, Debug)]
pub struct GitStatus {
pub entries: Arc<[(RepoPath, FileStatus)]>,
}
@@ -3,7 +3,7 @@ use crate::commit_modal::CommitModal;
use crate::git_panel_settings::StatusStyle;
use crate::project_diff::Diff;
use crate::remote_output::{self, RemoteAction, SuccessMessage};
-use crate::repository_selector::filtered_repository_entries;
+
use crate::{branch_picker, render_remote_button};
use crate::{
git_panel_settings::GitPanelSettings, git_status_icon, repository_selector::RepositorySelector,
@@ -63,7 +63,7 @@ use ui::{
Tooltip,
};
use util::{maybe, post_inc, ResultExt, TryFutureExt};
-use workspace::{AppState, OpenOptions, OpenVisible};
+use workspace::AppState;
use notifications::status_toast::{StatusToast, ToastIcon};
use workspace::{
@@ -195,7 +195,6 @@ impl GitListEntry {
#[derive(Debug, PartialEq, Eq, Clone)]
pub struct GitStatusEntry {
pub(crate) repo_path: RepoPath,
- pub(crate) worktree_path: Arc<Path>,
pub(crate) abs_path: PathBuf,
pub(crate) status: FileStatus,
pub(crate) staging: StageStatus,
@@ -203,14 +202,14 @@ pub struct GitStatusEntry {
impl GitStatusEntry {
fn display_name(&self) -> String {
- self.worktree_path
+ self.repo_path
.file_name()
.map(|name| name.to_string_lossy().into_owned())
- .unwrap_or_else(|| self.worktree_path.to_string_lossy().into_owned())
+ .unwrap_or_else(|| self.repo_path.to_string_lossy().into_owned())
}
fn parent_dir(&self) -> Option<String> {
- self.worktree_path
+ self.repo_path
.parent()
.map(|parent| parent.to_string_lossy().into_owned())
}
@@ -652,7 +651,7 @@ impl GitPanel {
let Some(git_repo) = self.active_repository.as_ref() else {
return;
};
- let Some(repo_path) = git_repo.read(cx).project_path_to_repo_path(&path) else {
+ let Some(repo_path) = git_repo.read(cx).project_path_to_repo_path(&path, cx) else {
return;
};
let Some(ix) = self.entry_by_path(&repo_path) else {
@@ -865,7 +864,7 @@ impl GitPanel {
if Some(&entry.repo_path)
== git_repo
.read(cx)
- .project_path_to_repo_path(&project_path)
+ .project_path_to_repo_path(&project_path, cx)
.as_ref()
{
project_diff.focus_handle(cx).focus(window);
@@ -875,31 +874,12 @@ impl GitPanel {
}
};
- if entry.worktree_path.starts_with("..") {
- self.workspace
- .update(cx, |workspace, cx| {
- workspace
- .open_abs_path(
- entry.abs_path.clone(),
- OpenOptions {
- visible: Some(OpenVisible::All),
- focus: Some(false),
- ..Default::default()
- },
- window,
- cx,
- )
- .detach_and_log_err(cx);
- })
- .ok();
- } else {
- self.workspace
- .update(cx, |workspace, cx| {
- ProjectDiff::deploy_at(workspace, Some(entry.clone()), window, cx);
- })
- .ok();
- self.focus_handle.focus(window);
- }
+ self.workspace
+ .update(cx, |workspace, cx| {
+ ProjectDiff::deploy_at(workspace, Some(entry.clone()), window, cx);
+ })
+ .ok();
+ self.focus_handle.focus(window);
Some(())
});
@@ -916,7 +896,7 @@ impl GitPanel {
let active_repo = self.active_repository.as_ref()?;
let path = active_repo
.read(cx)
- .repo_path_to_project_path(&entry.repo_path)?;
+ .repo_path_to_project_path(&entry.repo_path, cx)?;
if entry.status.is_deleted() {
return None;
}
@@ -992,7 +972,7 @@ impl GitPanel {
let active_repo = self.active_repository.clone()?;
let path = active_repo
.read(cx)
- .repo_path_to_project_path(&entry.repo_path)?;
+ .repo_path_to_project_path(&entry.repo_path, cx)?;
let workspace = self.workspace.clone();
if entry.status.staging().has_staged() {
@@ -1052,7 +1032,7 @@ impl GitPanel {
.filter_map(|entry| {
let path = active_repository
.read(cx)
- .repo_path_to_project_path(&entry.repo_path)?;
+ .repo_path_to_project_path(&entry.repo_path, cx)?;
Some(project.open_buffer(path, cx))
})
.collect()
@@ -1218,7 +1198,7 @@ impl GitPanel {
workspace.project().update(cx, |project, cx| {
let project_path = active_repo
.read(cx)
- .repo_path_to_project_path(&entry.repo_path)?;
+ .repo_path_to_project_path(&entry.repo_path, cx)?;
project.delete_file(project_path, true, cx)
})
})
@@ -2295,16 +2275,12 @@ impl GitPanel {
continue;
}
- // dot_git_abs path always has at least one component, namely .git.
let abs_path = repo
- .dot_git_abs_path
- .parent()
- .unwrap()
- .join(&entry.repo_path);
- let worktree_path = repo.repository_entry.unrelativize(&entry.repo_path);
+ .repository_entry
+ .work_directory_abs_path
+ .join(&entry.repo_path.0);
let entry = GitStatusEntry {
repo_path: entry.repo_path.clone(),
- worktree_path,
abs_path,
status: entry.status,
staging,
@@ -2883,7 +2859,6 @@ impl GitPanel {
) -> Option<impl IntoElement> {
let active_repository = self.active_repository.clone()?;
let (can_commit, tooltip) = self.configure_commit_button(cx);
- let project = self.project.clone().read(cx);
let panel_editor_style = panel_editor_style(true, window, cx);
let enable_coauthors = self.render_co_authors(cx);
@@ -2907,7 +2882,7 @@ impl GitPanel {
let display_name = SharedString::from(Arc::from(
active_repository
.read(cx)
- .display_name(project, cx)
+ .display_name()
.trim_end_matches("/"),
));
let editor_is_long = self.commit_editor.update(cx, |editor, cx| {
@@ -3236,7 +3211,8 @@ impl GitPanel {
cx: &App,
) -> Option<AnyElement> {
let repo = self.active_repository.as_ref()?.read(cx);
- let repo_path = repo.worktree_id_path_to_repo_path(file.worktree_id(cx), file.path())?;
+ let project_path = (file.worktree_id(cx), file.path()).into();
+ let repo_path = repo.project_path_to_repo_path(&project_path, cx)?;
let ix = self.entry_by_path(&repo_path)?;
let entry = self.entries.get(ix)?;
@@ -4056,9 +4032,7 @@ impl RenderOnce for PanelRepoFooter {
let single_repo = project
.as_ref()
- .map(|project| {
- filtered_repository_entries(project.read(cx).git_store().read(cx), cx).len() == 1
- })
+ .map(|project| project.read(cx).git_store().read(cx).repositories().len() == 1)
.unwrap_or(true);
const MAX_BRANCH_LEN: usize = 16;
@@ -4558,66 +4532,65 @@ mod tests {
GitListEntry::GitStatusEntry(GitStatusEntry {
abs_path: path!("/root/zed/crates/gpui/gpui.rs").into(),
repo_path: "crates/gpui/gpui.rs".into(),
- worktree_path: Path::new("gpui.rs").into(),
status: StatusCode::Modified.worktree(),
staging: StageStatus::Unstaged,
}),
GitListEntry::GitStatusEntry(GitStatusEntry {
abs_path: path!("/root/zed/crates/util/util.rs").into(),
repo_path: "crates/util/util.rs".into(),
- worktree_path: Path::new("../util/util.rs").into(),
status: StatusCode::Modified.worktree(),
staging: StageStatus::Unstaged,
},),
],
);
- cx.update_window_entity(&panel, |panel, window, cx| {
- panel.select_last(&Default::default(), window, cx);
- assert_eq!(panel.selected_entry, Some(2));
- panel.open_diff(&Default::default(), window, cx);
- });
- cx.run_until_parked();
-
- let worktree_roots = workspace.update(cx, |workspace, cx| {
- workspace
- .worktrees(cx)
- .map(|worktree| worktree.read(cx).abs_path())
- .collect::<Vec<_>>()
- });
- pretty_assertions::assert_eq!(
- worktree_roots,
- vec![
- Path::new(path!("/root/zed/crates/gpui")).into(),
- Path::new(path!("/root/zed/crates/util/util.rs")).into(),
- ]
- );
-
- project.update(cx, |project, cx| {
- let git_store = project.git_store().read(cx);
- // The repo that comes from the single-file worktree can't be selected through the UI.
- let filtered_entries = filtered_repository_entries(git_store, cx)
- .iter()
- .map(|repo| repo.read(cx).worktree_abs_path.clone())
- .collect::<Vec<_>>();
- assert_eq!(
- filtered_entries,
- [Path::new(path!("/root/zed/crates/gpui")).into()]
- );
- // But we can select it artificially here.
- let repo_from_single_file_worktree = git_store
- .repositories()
- .values()
- .find(|repo| {
- repo.read(cx).worktree_abs_path.as_ref()
- == Path::new(path!("/root/zed/crates/util/util.rs"))
- })
- .unwrap()
- .clone();
-
- // Paths still make sense when we somehow activate a repo that comes from a single-file worktree.
- repo_from_single_file_worktree.update(cx, |repo, cx| repo.set_as_active_repository(cx));
- });
+ // TODO(cole) restore this once repository deduplication is implemented properly.
+ //cx.update_window_entity(&panel, |panel, window, cx| {
+ // panel.select_last(&Default::default(), window, cx);
+ // assert_eq!(panel.selected_entry, Some(2));
+ // panel.open_diff(&Default::default(), window, cx);
+ //});
+ //cx.run_until_parked();
+
+ //let worktree_roots = workspace.update(cx, |workspace, cx| {
+ // workspace
+ // .worktrees(cx)
+ // .map(|worktree| worktree.read(cx).abs_path())
+ // .collect::<Vec<_>>()
+ //});
+ //pretty_assertions::assert_eq!(
+ // worktree_roots,
+ // vec![
+ // Path::new(path!("/root/zed/crates/gpui")).into(),
+ // Path::new(path!("/root/zed/crates/util/util.rs")).into(),
+ // ]
+ //);
+
+ //project.update(cx, |project, cx| {
+ // let git_store = project.git_store().read(cx);
+ // // The repo that comes from the single-file worktree can't be selected through the UI.
+ // let filtered_entries = filtered_repository_entries(git_store, cx)
+ // .iter()
+ // .map(|repo| repo.read(cx).worktree_abs_path.clone())
+ // .collect::<Vec<_>>();
+ // assert_eq!(
+ // filtered_entries,
+ // [Path::new(path!("/root/zed/crates/gpui")).into()]
+ // );
+ // // But we can select it artificially here.
+ // let repo_from_single_file_worktree = git_store
+ // .repositories()
+ // .values()
+ // .find(|repo| {
+ // repo.read(cx).worktree_abs_path.as_ref()
+ // == Path::new(path!("/root/zed/crates/util/util.rs"))
+ // })
+ // .unwrap()
+ // .clone();
+
+ // // Paths still make sense when we somehow activate a repo that comes from a single-file worktree.
+ // repo_from_single_file_worktree.update(cx, |repo, cx| repo.set_as_active_repository(cx));
+ //});
let handle = cx.update_window_entity(&panel, |panel, _, _| {
std::mem::replace(&mut panel.update_visible_entries_task, Task::ready(()))
@@ -4634,14 +4607,12 @@ mod tests {
GitListEntry::GitStatusEntry(GitStatusEntry {
abs_path: path!("/root/zed/crates/gpui/gpui.rs").into(),
repo_path: "crates/gpui/gpui.rs".into(),
- worktree_path: Path::new("../../gpui/gpui.rs").into(),
status: StatusCode::Modified.worktree(),
staging: StageStatus::Unstaged,
}),
GitListEntry::GitStatusEntry(GitStatusEntry {
abs_path: path!("/root/zed/crates/util/util.rs").into(),
repo_path: "crates/util/util.rs".into(),
- worktree_path: Path::new("util.rs").into(),
status: StatusCode::Modified.worktree(),
staging: StageStatus::Unstaged,
},),
@@ -343,7 +343,8 @@ impl ProjectDiff {
if !entry.status.has_changes() {
continue;
}
- let Some(project_path) = repo.repo_path_to_project_path(&entry.repo_path) else {
+ let Some(project_path) = repo.repo_path_to_project_path(&entry.repo_path, cx)
+ else {
continue;
};
let namespace = if repo.has_conflict(&entry.repo_path) {
@@ -3,10 +3,7 @@ use gpui::{
};
use itertools::Itertools;
use picker::{Picker, PickerDelegate};
-use project::{
- git_store::{GitStore, Repository},
- Project,
-};
+use project::{git_store::Repository, Project};
use std::sync::Arc;
use ui::{prelude::*, ListItem, ListItemSpacing};
use workspace::{ModalView, Workspace};
@@ -40,21 +37,23 @@ impl RepositorySelector {
cx: &mut Context<Self>,
) -> Self {
let git_store = project_handle.read(cx).git_store().clone();
- let repository_entries = git_store.update(cx, |git_store, cx| {
- filtered_repository_entries(git_store, cx)
+ let repository_entries = git_store.update(cx, |git_store, _cx| {
+ git_store
+ .repositories()
+ .values()
+ .cloned()
+ .collect::<Vec<_>>()
});
- let project = project_handle.read(cx);
let filtered_repositories = repository_entries.clone();
let widest_item_ix = repository_entries.iter().position_max_by(|a, b| {
a.read(cx)
- .display_name(project, cx)
+ .display_name()
.len()
- .cmp(&b.read(cx).display_name(project, cx).len())
+ .cmp(&b.read(cx).display_name().len())
});
let delegate = RepositorySelectorDelegate {
- project: project_handle.downgrade(),
repository_selector: cx.entity().downgrade(),
repository_entries,
filtered_repositories,
@@ -71,36 +70,36 @@ impl RepositorySelector {
}
}
-pub(crate) fn filtered_repository_entries(
- git_store: &GitStore,
- cx: &App,
-) -> Vec<Entity<Repository>> {
- let repositories = git_store
- .repositories()
- .values()
- .sorted_by_key(|repo| {
- let repo = repo.read(cx);
- (
- repo.dot_git_abs_path.clone(),
- repo.worktree_abs_path.clone(),
- )
- })
- .collect::<Vec<&Entity<Repository>>>();
-
- repositories
- .chunk_by(|a, b| a.read(cx).dot_git_abs_path == b.read(cx).dot_git_abs_path)
- .flat_map(|chunk| {
- let has_non_single_file_worktree = chunk
- .iter()
- .any(|repo| !repo.read(cx).is_from_single_file_worktree);
- chunk.iter().filter(move |repo| {
- // Remove any entry that comes from a single file worktree and represents a repository that is also represented by a non-single-file worktree.
- !repo.read(cx).is_from_single_file_worktree || !has_non_single_file_worktree
- })
- })
- .map(|&repo| repo.clone())
- .collect()
-}
+//pub(crate) fn filtered_repository_entries(
+// git_store: &GitStore,
+// cx: &App,
+//) -> Vec<Entity<Repository>> {
+// let repositories = git_store
+// .repositories()
+// .values()
+// .sorted_by_key(|repo| {
+// let repo = repo.read(cx);
+// (
+// repo.dot_git_abs_path.clone(),
+// repo.worktree_abs_path.clone(),
+// )
+// })
+// .collect::<Vec<&Entity<Repository>>>();
+//
+// repositories
+// .chunk_by(|a, b| a.read(cx).dot_git_abs_path == b.read(cx).dot_git_abs_path)
+// .flat_map(|chunk| {
+// let has_non_single_file_worktree = chunk
+// .iter()
+// .any(|repo| !repo.read(cx).is_from_single_file_worktree);
+// chunk.iter().filter(move |repo| {
+// // Remove any entry that comes from a single file worktree and represents a repository that is also represented by a non-single-file worktree.
+// !repo.read(cx).is_from_single_file_worktree || !has_non_single_file_worktree
+// })
+// })
+// .map(|&repo| repo.clone())
+// .collect()
+//}
impl EventEmitter<DismissEvent> for RepositorySelector {}
@@ -119,7 +118,6 @@ impl Render for RepositorySelector {
impl ModalView for RepositorySelector {}
pub struct RepositorySelectorDelegate {
- project: WeakEntity<Project>,
repository_selector: WeakEntity<RepositorySelector>,
repository_entries: Vec<Entity<Repository>>,
filtered_repositories: Vec<Entity<Repository>>,
@@ -225,9 +223,8 @@ impl PickerDelegate for RepositorySelectorDelegate {
_window: &mut Window,
cx: &mut Context<Picker<Self>>,
) -> Option<Self::ListItem> {
- let project = self.project.upgrade()?;
let repo_info = self.filtered_repositories.get(ix)?;
- let display_name = repo_info.read(cx).display_name(project.read(cx), cx);
+ let display_name = repo_info.read(cx).display_name();
Some(
ListItem::new(ix)
.inset(true)
@@ -2555,6 +2555,9 @@ impl OutlinePanel {
let auto_fold_dirs = OutlinePanelSettings::get_global(cx).auto_fold_dirs;
let active_multi_buffer = active_editor.read(cx).buffer().clone();
let new_entries = self.new_entries_for_fs_update.clone();
+ let repo_snapshots = self.project.update(cx, |project, cx| {
+ project.git_store().read(cx).repo_snapshots(cx)
+ });
self.updating_fs_entries = true;
self.fs_entries_update_task = cx.spawn_in(window, async move |outline_panel, cx| {
if let Some(debounce) = debounce {
@@ -2679,13 +2682,15 @@ impl OutlinePanel {
.unwrap_or_default(),
entry,
};
- let mut traversal =
- GitTraversal::new(worktree.traverse_from_path(
+ let mut traversal = GitTraversal::new(
+ &repo_snapshots,
+ worktree.traverse_from_path(
true,
true,
true,
entry.path.as_ref(),
- ));
+ ),
+ );
let mut entries_to_add = HashMap::default();
worktree_excerpts
@@ -3,7 +3,7 @@ pub mod git_traversal;
use crate::{
buffer_store::{BufferStore, BufferStoreEvent},
worktree_store::{WorktreeStore, WorktreeStoreEvent},
- Project, ProjectEnvironment, ProjectItem, ProjectPath,
+ ProjectEnvironment, ProjectItem, ProjectPath,
};
use anyhow::{anyhow, bail, Context as _, Result};
use askpass::{AskPassDelegate, AskPassSession};
@@ -36,7 +36,7 @@ use language::{
};
use parking_lot::Mutex;
use rpc::{
- proto::{self, git_reset, ToProto, SSH_PROJECT_ID},
+ proto::{self, git_reset, FromProto, ToProto, SSH_PROJECT_ID},
AnyProtoClient, TypedEnvelope,
};
use serde::Deserialize;
@@ -48,17 +48,18 @@ use std::{
path::{Path, PathBuf},
sync::Arc,
};
+use sum_tree::TreeSet;
use text::BufferId;
use util::{debug_panic, maybe, ResultExt};
use worktree::{
- File, ProjectEntryId, RepositoryEntry, StatusEntry, UpdatedGitRepositoriesSet, WorkDirectory,
- Worktree,
+ proto_to_branch, File, PathKey, ProjectEntryId, RepositoryEntry, StatusEntry,
+ UpdatedGitRepositoriesSet, Worktree,
};
pub struct GitStore {
state: GitStoreState,
buffer_store: Entity<BufferStore>,
- _worktree_store: Entity<WorktreeStore>,
+ worktree_store: Entity<WorktreeStore>,
repositories: HashMap<ProjectEntryId, Entity<Repository>>,
active_repo_id: Option<ProjectEntryId>,
#[allow(clippy::type_complexity)]
@@ -111,7 +112,7 @@ enum DiffKind {
enum GitStoreState {
Local {
- downstream_client: Option<(AnyProtoClient, ProjectId)>,
+ downstream_client: Option<LocalDownstreamState>,
environment: Entity<ProjectEnvironment>,
fs: Arc<dyn Fs>,
},
@@ -127,23 +128,32 @@ enum GitStoreState {
},
}
+enum DownstreamUpdate {
+ UpdateRepository(RepositoryEntry),
+ RemoveRepository(ProjectEntryId),
+}
+
+struct LocalDownstreamState {
+ client: AnyProtoClient,
+ project_id: ProjectId,
+ updates_tx: mpsc::UnboundedSender<DownstreamUpdate>,
+ _task: Task<Result<()>>,
+}
+
#[derive(Clone)]
pub struct GitStoreCheckpoint {
- checkpoints_by_dot_git_abs_path: HashMap<PathBuf, GitRepositoryCheckpoint>,
+ checkpoints_by_work_dir_abs_path: HashMap<PathBuf, GitRepositoryCheckpoint>,
}
pub struct Repository {
- commit_message_buffer: Option<Entity<Buffer>>,
- git_store: WeakEntity<GitStore>,
- project_environment: Option<WeakEntity<ProjectEnvironment>>,
- pub worktree_id: WorktreeId,
pub repository_entry: RepositoryEntry,
- pub dot_git_abs_path: PathBuf,
- pub worktree_abs_path: Arc<Path>,
- pub is_from_single_file_worktree: bool,
pub merge_message: Option<String>,
pub completed_scan_id: usize,
- git_repo: RepositoryState,
+ commit_message_buffer: Option<Entity<Buffer>>,
+ git_store: WeakEntity<GitStore>,
+ project_environment: Option<WeakEntity<ProjectEnvironment>>,
+ pub worktree_id: Option<WorktreeId>,
+ state: RepositoryState,
job_sender: mpsc::UnboundedSender<GitJob>,
askpass_delegates: Arc<Mutex<HashMap<u64, AskPassDelegate>>>,
latest_askpass_id: u64,
@@ -155,7 +165,6 @@ enum RepositoryState {
Remote {
project_id: ProjectId,
client: AnyProtoClient,
- worktree_id: WorktreeId,
work_directory_id: ProjectEntryId,
},
}
@@ -254,7 +263,7 @@ impl GitStore {
GitStore {
state,
buffer_store,
- _worktree_store: worktree_store,
+ worktree_store,
repositories: HashMap::default(),
active_repo_id: None,
update_sender,
@@ -290,21 +299,84 @@ impl GitStore {
client.add_entity_message_handler(Self::handle_update_diff_bases);
client.add_entity_request_handler(Self::handle_get_permalink_to_line);
client.add_entity_request_handler(Self::handle_blame_buffer);
+ client.add_entity_message_handler(Self::handle_update_repository);
+ client.add_entity_message_handler(Self::handle_remove_repository);
}
pub fn is_local(&self) -> bool {
matches!(self.state, GitStoreState::Local { .. })
}
- pub fn shared(&mut self, remote_id: u64, client: AnyProtoClient, _cx: &mut App) {
+ pub fn shared(&mut self, project_id: u64, client: AnyProtoClient, cx: &mut Context<Self>) {
match &mut self.state {
- GitStoreState::Local {
+ GitStoreState::Ssh {
downstream_client, ..
+ } => {
+ for repo in self.repositories.values() {
+ client
+ .send(repo.read(cx).repository_entry.initial_update(project_id))
+ .log_err();
+ }
+ *downstream_client = Some((client, ProjectId(project_id)));
}
- | GitStoreState::Ssh {
+ GitStoreState::Local {
downstream_client, ..
} => {
- *downstream_client = Some((client, ProjectId(remote_id)));
+ let mut snapshots = HashMap::default();
+ let (updates_tx, mut updates_rx) = mpsc::unbounded();
+ for repo in self.repositories.values() {
+ updates_tx
+ .unbounded_send(DownstreamUpdate::UpdateRepository(
+ repo.read(cx).repository_entry.clone(),
+ ))
+ .ok();
+ }
+ *downstream_client = Some(LocalDownstreamState {
+ client: client.clone(),
+ project_id: ProjectId(project_id),
+ updates_tx,
+ _task: cx.spawn(async move |this, cx| {
+ cx.background_spawn(async move {
+ while let Some(update) = updates_rx.next().await {
+ match update {
+ DownstreamUpdate::UpdateRepository(snapshot) => {
+ if let Some(old_snapshot) =
+ snapshots.get_mut(&snapshot.work_directory_id)
+ {
+ let update =
+ snapshot.build_update(old_snapshot, project_id);
+ *old_snapshot = snapshot;
+ client.send(update)?;
+ } else {
+ let update = snapshot.initial_update(project_id);
+ client.send(update)?;
+ snapshots.insert(snapshot.work_directory_id, snapshot);
+ }
+ }
+ DownstreamUpdate::RemoveRepository(id) => {
+ client.send(proto::RemoveRepository {
+ project_id,
+ id: id.to_proto(),
+ })?;
+ }
+ }
+ }
+ anyhow::Ok(())
+ })
+ .await
+ .ok();
+ this.update(cx, |this, _| {
+ if let GitStoreState::Local {
+ downstream_client, ..
+ } = &mut this.state
+ {
+ downstream_client.take();
+ } else {
+ unreachable!("unshared called on remote store");
+ }
+ })
+ }),
+ });
}
GitStoreState::Remote { .. } => {
debug_panic!("shared called on remote store");
@@ -316,8 +388,10 @@ impl GitStore {
match &mut self.state {
GitStoreState::Local {
downstream_client, ..
+ } => {
+ downstream_client.take();
}
- | GitStoreState::Ssh {
+ GitStoreState::Ssh {
downstream_client, ..
} => {
downstream_client.take();
@@ -540,18 +614,19 @@ impl GitStore {
}
pub fn checkpoint(&self, cx: &App) -> Task<Result<GitStoreCheckpoint>> {
- let mut dot_git_abs_paths = Vec::new();
+ let mut work_directory_abs_paths = Vec::new();
let mut checkpoints = Vec::new();
for repository in self.repositories.values() {
let repository = repository.read(cx);
- dot_git_abs_paths.push(repository.dot_git_abs_path.clone());
+ work_directory_abs_paths
+ .push(repository.repository_entry.work_directory_abs_path.clone());
checkpoints.push(repository.checkpoint().map(|checkpoint| checkpoint?));
}
cx.background_executor().spawn(async move {
let checkpoints = future::try_join_all(checkpoints).await?;
Ok(GitStoreCheckpoint {
- checkpoints_by_dot_git_abs_path: dot_git_abs_paths
+ checkpoints_by_work_dir_abs_path: work_directory_abs_paths
.into_iter()
.zip(checkpoints)
.collect(),
@@ -560,15 +635,23 @@ impl GitStore {
}
pub fn restore_checkpoint(&self, checkpoint: GitStoreCheckpoint, cx: &App) -> Task<Result<()>> {
- let repositories_by_dot_git_abs_path = self
+ let repositories_by_work_dir_abs_path = self
.repositories
.values()
- .map(|repo| (repo.read(cx).dot_git_abs_path.clone(), repo))
+ .map(|repo| {
+ (
+ repo.read(cx)
+ .repository_entry
+ .work_directory_abs_path
+ .clone(),
+ repo,
+ )
+ })
.collect::<HashMap<_, _>>();
let mut tasks = Vec::new();
- for (dot_git_abs_path, checkpoint) in checkpoint.checkpoints_by_dot_git_abs_path {
- if let Some(repository) = repositories_by_dot_git_abs_path.get(&dot_git_abs_path) {
+ for (dot_git_abs_path, checkpoint) in checkpoint.checkpoints_by_work_dir_abs_path {
+ if let Some(repository) = repositories_by_work_dir_abs_path.get(&dot_git_abs_path) {
let restore = repository.read(cx).restore_checkpoint(checkpoint);
tasks.push(async move { restore.await? });
}
@@ -586,19 +669,27 @@ impl GitStore {
mut right: GitStoreCheckpoint,
cx: &App,
) -> Task<Result<bool>> {
- let repositories_by_dot_git_abs_path = self
+ let repositories_by_work_dir_abs_path = self
.repositories
.values()
- .map(|repo| (repo.read(cx).dot_git_abs_path.clone(), repo))
+ .map(|repo| {
+ (
+ repo.read(cx)
+ .repository_entry
+ .work_directory_abs_path
+ .clone(),
+ repo,
+ )
+ })
.collect::<HashMap<_, _>>();
let mut tasks = Vec::new();
- for (dot_git_abs_path, left_checkpoint) in left.checkpoints_by_dot_git_abs_path {
+ for (dot_git_abs_path, left_checkpoint) in left.checkpoints_by_work_dir_abs_path {
if let Some(right_checkpoint) = right
- .checkpoints_by_dot_git_abs_path
+ .checkpoints_by_work_dir_abs_path
.remove(&dot_git_abs_path)
{
- if let Some(repository) = repositories_by_dot_git_abs_path.get(&dot_git_abs_path) {
+ if let Some(repository) = repositories_by_work_dir_abs_path.get(&dot_git_abs_path) {
let compare = repository
.read(cx)
.compare_checkpoints(left_checkpoint, right_checkpoint);
@@ -617,15 +708,25 @@ impl GitStore {
}
pub fn delete_checkpoint(&self, checkpoint: GitStoreCheckpoint, cx: &App) -> Task<Result<()>> {
- let repositories_by_dot_git_abs_path = self
+ let repositories_by_work_directory_abs_path = self
.repositories
.values()
- .map(|repo| (repo.read(cx).dot_git_abs_path.clone(), repo))
+ .map(|repo| {
+ (
+ repo.read(cx)
+ .repository_entry
+ .work_directory_abs_path
+ .clone(),
+ repo,
+ )
+ })
.collect::<HashMap<_, _>>();
let mut tasks = Vec::new();
- for (dot_git_abs_path, checkpoint) in checkpoint.checkpoints_by_dot_git_abs_path {
- if let Some(repository) = repositories_by_dot_git_abs_path.get(&dot_git_abs_path) {
+ for (work_dir_abs_path, checkpoint) in checkpoint.checkpoints_by_work_dir_abs_path {
+ if let Some(repository) =
+ repositories_by_work_directory_abs_path.get(&work_dir_abs_path)
+ {
let delete = repository.read(cx).delete_checkpoint(checkpoint);
tasks.push(async move { delete.await? });
}
@@ -652,7 +753,7 @@ impl GitStore {
Worktree::Local(worktree) => {
let worktree = worktree.snapshot();
let blame_params = maybe!({
- let local_repo = match worktree.local_repo_for_path(&file.path) {
+ let local_repo = match worktree.local_repo_containing_path(&file.path) {
Some(repo_for_path) => repo_for_path,
None => return Ok(None),
};
@@ -713,13 +814,17 @@ impl GitStore {
match file.worktree.read(cx) {
Worktree::Local(worktree) => {
- let worktree_path = worktree.abs_path().clone();
- let Some((repo_entry, repo)) =
- worktree.repository_for_path(&file.path).and_then(|entry| {
- let repo = worktree.get_local_repo(&entry)?.repo().clone();
- Some((entry, repo))
- })
- else {
+ let repository = self
+ .repository_and_path_for_project_path(
+ &(worktree.id(), file.path.clone()).into(),
+ cx,
+ )
+ .map(|(repository, _)| repository);
+ let Some((local_repo_entry, repo_entry)) = repository.and_then(|repository| {
+ let repository = repository.read(cx);
+ let repo_entry = repository.repository_entry.clone();
+ Some((worktree.get_local_repo(&repo_entry)?, repo_entry))
+ }) else {
// If we're not in a Git repo, check whether this is a Rust source
// file in the Cargo registry (presumably opened with go-to-definition
// from a normal Rust file). If so, we can put together a permalink
@@ -730,7 +835,9 @@ impl GitStore {
{
return Task::ready(Err(anyhow!("no permalink available")));
}
- let file_path = worktree_path.join(&file.path);
+ let Some(file_path) = worktree.absolutize(&file.path).ok() else {
+ return Task::ready(Err(anyhow!("no permalink available")));
+ };
return cx.spawn(async move |cx| {
let provider_registry =
cx.update(GitHostingProviderRegistry::default_global)?;
@@ -739,7 +846,7 @@ impl GitStore {
});
};
- let path = match repo_entry.relativize(&file.path) {
+ let path = match local_repo_entry.relativize(&file.path) {
Ok(RepoPath(path)) => path,
Err(e) => return Task::ready(Err(e)),
};
@@ -751,6 +858,7 @@ impl GitStore {
.unwrap_or("origin")
.to_string();
+ let repo = local_repo_entry.repo().clone();
cx.spawn(async move |cx| {
let origin_url = repo
.remote_url(&remote)
@@ -807,8 +915,10 @@ impl GitStore {
match &self.state {
GitStoreState::Local {
downstream_client, ..
- }
- | GitStoreState::Ssh {
+ } => downstream_client
+ .as_ref()
+ .map(|state| (state.client.clone(), state.project_id)),
+ GitStoreState::Ssh {
downstream_client, ..
} => downstream_client.clone(),
GitStoreState::Remote { .. } => None,
@@ -848,10 +958,34 @@ impl GitStore {
worktree_store: Entity<WorktreeStore>,
event: &WorktreeStoreEvent,
cx: &mut Context<Self>,
+ ) {
+ match event {
+ WorktreeStoreEvent::WorktreeUpdatedGitRepositories(worktree_id, changed_repos) => {
+ // We should only get this event for a local project.
+ self.update_repositories(&worktree_store, cx);
+ if self.is_local() {
+ if let Some(worktree) =
+ worktree_store.read(cx).worktree_for_id(*worktree_id, cx)
+ {
+ self.local_worktree_git_repos_changed(worktree, changed_repos, cx);
+ }
+ }
+ cx.emit(GitEvent::GitStateUpdated);
+ }
+ WorktreeStoreEvent::WorktreeAdded(_) => {}
+ _ => {
+ cx.emit(GitEvent::FileSystemUpdated);
+ }
+ }
+ }
+
+ fn update_repositories(
+ &mut self,
+ worktree_store: &Entity<WorktreeStore>,
+ cx: &mut Context<'_, GitStore>,
) {
let mut new_repositories = HashMap::default();
let git_store = cx.weak_entity();
-
worktree_store.update(cx, |worktree_store, cx| {
for worktree in worktree_store.worktrees() {
worktree.update(cx, |worktree, cx| {
@@ -874,7 +1008,6 @@ impl GitStore {
.context("no upstream client")
.log_err()?
.clone(),
- worktree_id: worktree.id(),
work_directory_id: repo_entry.work_directory_id(),
};
Some((git_repo, None))
@@ -884,9 +1017,10 @@ impl GitStore {
continue;
};
- let existing_repo = self.repositories.values().find(|repo| {
- repo.read(cx).id() == (worktree.id(), repo_entry.work_directory_id())
- });
+ let existing_repo = self
+ .repositories
+ .values()
+ .find(|repo| repo.read(cx).id() == repo_entry.work_directory_id());
let repo = if let Some(existing_repo) = existing_repo {
// Update the statuses and merge message but keep everything else.
@@ -901,32 +1035,55 @@ impl GitStore {
existing_repo
} else {
cx.new(|_| Repository {
+ worktree_id: Some(worktree.id()),
project_environment: self
.project_environment()
.as_ref()
.map(|env| env.downgrade()),
git_store: git_store.clone(),
- worktree_id: worktree.id(),
askpass_delegates: Default::default(),
latest_askpass_id: 0,
repository_entry: repo_entry.clone(),
- dot_git_abs_path: worktree
- .dot_git_abs_path(&repo_entry.work_directory),
- worktree_abs_path: worktree.abs_path(),
- is_from_single_file_worktree: worktree.is_single_file(),
- git_repo,
job_sender: self.update_sender.clone(),
merge_message,
commit_message_buffer: None,
completed_scan_id: worktree.completed_scan_id(),
+ state: git_repo,
})
};
+
+ // TODO only send out messages for repository snapshots that have changed
+ let snapshot = repo.read(cx).repository_entry.clone();
+ if let GitStoreState::Local {
+ downstream_client: Some(state),
+ ..
+ } = &self.state
+ {
+ state
+ .updates_tx
+ .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
+ .ok();
+ }
new_repositories.insert(repo_entry.work_directory_id(), repo);
+ self.repositories.remove(&repo_entry.work_directory_id());
}
})
}
});
+ if let GitStoreState::Local {
+ downstream_client: Some(state),
+ ..
+ } = &self.state
+ {
+ for id in self.repositories.keys().cloned() {
+ state
+ .updates_tx
+ .unbounded_send(DownstreamUpdate::RemoveRepository(id))
+ .ok();
+ }
+ }
+
self.repositories = new_repositories;
if let Some(id) = self.active_repo_id.as_ref() {
if !self.repositories.contains_key(id) {
@@ -935,31 +1092,6 @@ impl GitStore {
} else if let Some(&first_id) = self.repositories.keys().next() {
self.active_repo_id = Some(first_id);
}
-
- match event {
- WorktreeStoreEvent::WorktreeUpdatedGitRepositories(_) => {
- cx.emit(GitEvent::GitStateUpdated);
- }
- WorktreeStoreEvent::WorktreeAdded(worktree) => {
- if self.is_local() {
- cx.subscribe(worktree, Self::on_worktree_event).detach();
- }
- }
- _ => {
- cx.emit(GitEvent::FileSystemUpdated);
- }
- }
- }
-
- fn on_worktree_event(
- &mut self,
- worktree: Entity<Worktree>,
- event: &worktree::Event,
- cx: &mut Context<Self>,
- ) {
- if let worktree::Event::UpdatedGitRepositories(changed_repos) = event {
- self.local_worktree_git_repos_changed(worktree, changed_repos, cx);
- }
}
fn on_buffer_store_event(
@@ -1123,7 +1255,7 @@ impl GitStore {
for (buffer, path, current_index_text, current_head_text) in
&repo_diff_state_updates
{
- let Some(local_repo) = snapshot.local_repo_for_path(&path) else {
+ let Some(local_repo) = snapshot.local_repo_containing_path(&path) else {
continue;
};
let Some(relative_path) = local_repo.relativize(&path).ok() else {
@@ -1260,21 +1392,20 @@ impl GitStore {
path: &ProjectPath,
cx: &App,
) -> Option<(Entity<Repository>, RepoPath)> {
- let mut result: Option<(Entity<Repository>, RepoPath)> = None;
- for repo_handle in self.repositories.values() {
- let repo = repo_handle.read(cx);
- if repo.worktree_id == path.worktree_id {
- if let Ok(relative_path) = repo.repository_entry.relativize(&path.path) {
- if result
- .as_ref()
- .is_none_or(|(result, _)| !repo.contains_sub_repo(result, cx))
- {
- result = Some((repo_handle.clone(), relative_path))
- }
- }
- }
- }
- result
+ let abs_path = self.worktree_store.read(cx).absolutize(path, cx)?;
+ self.repositories
+ .values()
+ .filter_map(|repo_handle| {
+ let repo = repo_handle.read(cx);
+ let relative_path = repo.repository_entry.relativize_abs_path(&abs_path)?;
+ Some((repo_handle.clone(), relative_path))
+ })
+ .max_by_key(|(repo, _)| {
+ repo.read(cx)
+ .repository_entry
+ .work_directory_abs_path
+ .clone()
+ })
}
fn spawn_git_worker(cx: &mut Context<GitStore>) -> mpsc::UnboundedSender<GitJob> {
@@ -1346,6 +1477,88 @@ impl GitStore {
}
}
+ async fn handle_update_repository(
+ this: Entity<Self>,
+ envelope: TypedEnvelope<proto::UpdateRepository>,
+ mut cx: AsyncApp,
+ ) -> Result<()> {
+ this.update(&mut cx, |this, cx| {
+ let mut update = envelope.payload;
+
+ let work_directory_id = ProjectEntryId::from_proto(update.id);
+ let client = this
+ .upstream_client()
+ .context("no upstream client")?
+ .clone();
+
+ let repo = this
+ .repositories
+ .entry(work_directory_id)
+ .or_insert_with(|| {
+ let git_store = cx.weak_entity();
+
+ cx.new(|_| Repository {
+ commit_message_buffer: None,
+ git_store,
+ project_environment: None,
+ worktree_id: None,
+ repository_entry: RepositoryEntry {
+ work_directory_id,
+ current_branch: None,
+ statuses_by_path: Default::default(),
+ current_merge_conflicts: Default::default(),
+ work_directory_abs_path: update.abs_path.clone().into(),
+ worktree_scan_id: update.scan_id as usize,
+ },
+ merge_message: None,
+ completed_scan_id: update.scan_id as usize,
+ state: RepositoryState::Remote {
+ project_id: ProjectId(update.project_id),
+ client,
+ work_directory_id,
+ },
+ job_sender: this.update_sender.clone(),
+ askpass_delegates: Default::default(),
+ latest_askpass_id: 0,
+ })
+ });
+
+ repo.update(cx, |repo, _cx| repo.apply_remote_update(update.clone()))?;
+ cx.emit(GitEvent::GitStateUpdated);
+ this.active_repo_id.get_or_insert_with(|| {
+ cx.emit(GitEvent::ActiveRepositoryChanged);
+ work_directory_id
+ });
+
+ if let Some((client, project_id)) = this.downstream_client() {
+ update.project_id = project_id.to_proto();
+ client.send(update).log_err();
+ }
+ Ok(())
+ })?
+ }
+
+ async fn handle_remove_repository(
+ this: Entity<Self>,
+ envelope: TypedEnvelope<proto::RemoveRepository>,
+ mut cx: AsyncApp,
+ ) -> Result<()> {
+ this.update(&mut cx, |this, cx| {
+ let mut update = envelope.payload;
+ let id = ProjectEntryId::from_proto(update.id);
+ this.repositories.remove(&id);
+ if let Some((client, project_id)) = this.downstream_client() {
+ update.project_id = project_id.to_proto();
+ client.send(update).log_err();
+ }
+ if this.active_repo_id == Some(id) {
+ this.active_repo_id = None;
+ cx.emit(GitEvent::ActiveRepositoryChanged);
+ }
+ cx.emit(GitEvent::GitStateUpdated);
+ })
+ }
+
async fn handle_git_init(
this: Entity<Self>,
envelope: TypedEnvelope<proto::GitInit>,
@@ -1364,16 +1577,13 @@ impl GitStore {
envelope: TypedEnvelope<proto::Fetch>,
mut cx: AsyncApp,
) -> Result<proto::RemoteMessageResponse> {
- let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
let work_directory_id = ProjectEntryId::from_proto(envelope.payload.work_directory_id);
- let repository_handle =
- Self::repository_for_request(&this, worktree_id, work_directory_id, &mut cx)?;
+ let repository_handle = Self::repository_for_request(&this, work_directory_id, &mut cx)?;
let askpass_id = envelope.payload.askpass_id;
let askpass = make_remote_delegate(
this,
envelope.payload.project_id,
- worktree_id,
work_directory_id,
askpass_id,
&mut cx,
@@ -1396,16 +1606,13 @@ impl GitStore {
envelope: TypedEnvelope<proto::Push>,
mut cx: AsyncApp,
) -> Result<proto::RemoteMessageResponse> {
- let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
let work_directory_id = ProjectEntryId::from_proto(envelope.payload.work_directory_id);
- let repository_handle =
- Self::repository_for_request(&this, worktree_id, work_directory_id, &mut cx)?;
+ let repository_handle = Self::repository_for_request(&this, work_directory_id, &mut cx)?;
let askpass_id = envelope.payload.askpass_id;
let askpass = make_remote_delegate(
this,
envelope.payload.project_id,
- worktree_id,
work_directory_id,
askpass_id,
&mut cx,
@@ -1439,15 +1646,12 @@ impl GitStore {
envelope: TypedEnvelope<proto::Pull>,
mut cx: AsyncApp,
) -> Result<proto::RemoteMessageResponse> {
- let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
let work_directory_id = ProjectEntryId::from_proto(envelope.payload.work_directory_id);
- let repository_handle =
- Self::repository_for_request(&this, worktree_id, work_directory_id, &mut cx)?;
+ let repository_handle = Self::repository_for_request(&this, work_directory_id, &mut cx)?;
let askpass_id = envelope.payload.askpass_id;
let askpass = make_remote_delegate(
this,
envelope.payload.project_id,
- worktree_id,
work_directory_id,
askpass_id,
&mut cx,
@@ -1473,10 +1677,8 @@ impl GitStore {
envelope: TypedEnvelope<proto::Stage>,
mut cx: AsyncApp,
) -> Result<proto::Ack> {
- let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
let work_directory_id = ProjectEntryId::from_proto(envelope.payload.work_directory_id);
- let repository_handle =
- Self::repository_for_request(&this, worktree_id, work_directory_id, &mut cx)?;
+ let repository_handle = Self::repository_for_request(&this, work_directory_id, &mut cx)?;
let entries = envelope
.payload
@@ -1499,10 +1701,8 @@ impl GitStore {
envelope: TypedEnvelope<proto::Unstage>,
mut cx: AsyncApp,
) -> Result<proto::Ack> {
- let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
let work_directory_id = ProjectEntryId::from_proto(envelope.payload.work_directory_id);
- let repository_handle =
- Self::repository_for_request(&this, worktree_id, work_directory_id, &mut cx)?;
+ let repository_handle = Self::repository_for_request(&this, work_directory_id, &mut cx)?;
let entries = envelope
.payload
@@ -1526,10 +1726,8 @@ impl GitStore {
envelope: TypedEnvelope<proto::SetIndexText>,
mut cx: AsyncApp,
) -> Result<proto::Ack> {
- let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
let work_directory_id = ProjectEntryId::from_proto(envelope.payload.work_directory_id);
- let repository_handle =
- Self::repository_for_request(&this, worktree_id, work_directory_id, &mut cx)?;
+ let repository_handle = Self::repository_for_request(&this, work_directory_id, &mut cx)?;
repository_handle
.update(&mut cx, |repository_handle, cx| {
@@ -1548,10 +1746,8 @@ impl GitStore {
envelope: TypedEnvelope<proto::Commit>,
mut cx: AsyncApp,
) -> Result<proto::Ack> {
- let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
let work_directory_id = ProjectEntryId::from_proto(envelope.payload.work_directory_id);
- let repository_handle =
- Self::repository_for_request(&this, worktree_id, work_directory_id, &mut cx)?;
+ let repository_handle = Self::repository_for_request(&this, work_directory_id, &mut cx)?;
let message = SharedString::from(envelope.payload.message);
let name = envelope.payload.name.map(SharedString::from);
@@ -1570,10 +1766,8 @@ impl GitStore {
envelope: TypedEnvelope<proto::GetRemotes>,
mut cx: AsyncApp,
) -> Result<proto::GetRemotesResponse> {
- let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
let work_directory_id = ProjectEntryId::from_proto(envelope.payload.work_directory_id);
- let repository_handle =
- Self::repository_for_request(&this, worktree_id, work_directory_id, &mut cx)?;
+ let repository_handle = Self::repository_for_request(&this, work_directory_id, &mut cx)?;
let branch_name = envelope.payload.branch_name;
@@ -1598,10 +1792,8 @@ impl GitStore {
envelope: TypedEnvelope<proto::GitGetBranches>,
mut cx: AsyncApp,
) -> Result<proto::GitBranchesResponse> {
- let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
let work_directory_id = ProjectEntryId::from_proto(envelope.payload.work_directory_id);
- let repository_handle =
- Self::repository_for_request(&this, worktree_id, work_directory_id, &mut cx)?;
+ let repository_handle = Self::repository_for_request(&this, work_directory_id, &mut cx)?;
let branches = repository_handle
.update(&mut cx, |repository_handle, _| repository_handle.branches())?
@@ -1619,10 +1811,8 @@ impl GitStore {
envelope: TypedEnvelope<proto::GitCreateBranch>,
mut cx: AsyncApp,
) -> Result<proto::Ack> {
- let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
let work_directory_id = ProjectEntryId::from_proto(envelope.payload.work_directory_id);
- let repository_handle =
- Self::repository_for_request(&this, worktree_id, work_directory_id, &mut cx)?;
+ let repository_handle = Self::repository_for_request(&this, work_directory_id, &mut cx)?;
let branch_name = envelope.payload.branch_name;
repository_handle
@@ -1639,10 +1829,8 @@ impl GitStore {
envelope: TypedEnvelope<proto::GitChangeBranch>,
mut cx: AsyncApp,
) -> Result<proto::Ack> {
- let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
let work_directory_id = ProjectEntryId::from_proto(envelope.payload.work_directory_id);
- let repository_handle =
- Self::repository_for_request(&this, worktree_id, work_directory_id, &mut cx)?;
+ let repository_handle = Self::repository_for_request(&this, work_directory_id, &mut cx)?;
let branch_name = envelope.payload.branch_name;
repository_handle
@@ -1659,10 +1847,8 @@ impl GitStore {
envelope: TypedEnvelope<proto::GitShow>,
mut cx: AsyncApp,
) -> Result<proto::GitCommitDetails> {
- let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
let work_directory_id = ProjectEntryId::from_proto(envelope.payload.work_directory_id);
- let repository_handle =
- Self::repository_for_request(&this, worktree_id, work_directory_id, &mut cx)?;
+ let repository_handle = Self::repository_for_request(&this, work_directory_id, &mut cx)?;
let commit = repository_handle
.update(&mut cx, |repository_handle, _| {
@@ -1683,10 +1869,8 @@ impl GitStore {
envelope: TypedEnvelope<proto::GitReset>,
mut cx: AsyncApp,
) -> Result<proto::Ack> {
- let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
let work_directory_id = ProjectEntryId::from_proto(envelope.payload.work_directory_id);
- let repository_handle =
- Self::repository_for_request(&this, worktree_id, work_directory_id, &mut cx)?;
+ let repository_handle = Self::repository_for_request(&this, work_directory_id, &mut cx)?;
let mode = match envelope.payload.mode() {
git_reset::ResetMode::Soft => ResetMode::Soft,
@@ -1706,10 +1890,8 @@ impl GitStore {
envelope: TypedEnvelope<proto::GitCheckoutFiles>,
mut cx: AsyncApp,
) -> Result<proto::Ack> {
- let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
let work_directory_id = ProjectEntryId::from_proto(envelope.payload.work_directory_id);
- let repository_handle =
- Self::repository_for_request(&this, worktree_id, work_directory_id, &mut cx)?;
+ let repository_handle = Self::repository_for_request(&this, work_directory_id, &mut cx)?;
let paths = envelope
.payload
.paths
@@ -1730,10 +1912,8 @@ impl GitStore {
envelope: TypedEnvelope<proto::OpenCommitMessageBuffer>,
mut cx: AsyncApp,
) -> Result<proto::OpenBufferResponse> {
- let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
let work_directory_id = ProjectEntryId::from_proto(envelope.payload.work_directory_id);
- let repository =
- Self::repository_for_request(&this, worktree_id, work_directory_id, &mut cx)?;
+ let repository = Self::repository_for_request(&this, work_directory_id, &mut cx)?;
let buffer = repository
.update(&mut cx, |repository, cx| {
repository.open_commit_buffer(None, this.read(cx).buffer_store.clone(), cx)
@@ -1763,10 +1943,8 @@ impl GitStore {
envelope: TypedEnvelope<proto::AskPassRequest>,
mut cx: AsyncApp,
) -> Result<proto::AskPassResponse> {
- let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
let work_directory_id = ProjectEntryId::from_proto(envelope.payload.work_directory_id);
- let repository =
- Self::repository_for_request(&this, worktree_id, work_directory_id, &mut cx)?;
+ let repository = Self::repository_for_request(&this, work_directory_id, &mut cx)?;
let delegates = cx.update(|cx| repository.read(cx).askpass_delegates.clone())?;
let Some(mut askpass) = delegates.lock().remove(&envelope.payload.askpass_id) else {
@@ -1788,10 +1966,8 @@ impl GitStore {
envelope: TypedEnvelope<proto::CheckForPushedCommits>,
mut cx: AsyncApp,
) -> Result<proto::CheckForPushedCommitsResponse> {
- let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
let work_directory_id = ProjectEntryId::from_proto(envelope.payload.work_directory_id);
- let repository_handle =
- Self::repository_for_request(&this, worktree_id, work_directory_id, &mut cx)?;
+ let repository_handle = Self::repository_for_request(&this, work_directory_id, &mut cx)?;
let branches = repository_handle
.update(&mut cx, |repository_handle, _| {
@@ -1811,10 +1987,8 @@ impl GitStore {
envelope: TypedEnvelope<proto::GitDiff>,
mut cx: AsyncApp,
) -> Result<proto::GitDiffResponse> {
- let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
let work_directory_id = ProjectEntryId::from_proto(envelope.payload.work_directory_id);
- let repository_handle =
- Self::repository_for_request(&this, worktree_id, work_directory_id, &mut cx)?;
+ let repository_handle = Self::repository_for_request(&this, work_directory_id, &mut cx)?;
let diff_type = match envelope.payload.diff_type() {
proto::git_diff::DiffType::HeadToIndex => DiffType::HeadToIndex,
proto::git_diff::DiffType::HeadToWorktree => DiffType::HeadToWorktree,
@@ -1988,7 +2162,6 @@ impl GitStore {
fn repository_for_request(
this: &Entity<Self>,
- worktree_id: WorktreeId,
work_directory_id: ProjectEntryId,
cx: &mut AsyncApp,
) -> Result<Entity<Repository>> {
@@ -1996,17 +2169,23 @@ impl GitStore {
this.repositories
.values()
.find(|repository_handle| {
- repository_handle.read(cx).worktree_id == worktree_id
- && repository_handle
- .read(cx)
- .repository_entry
- .work_directory_id()
- == work_directory_id
+ repository_handle
+ .read(cx)
+ .repository_entry
+ .work_directory_id()
+ == work_directory_id
})
.context("missing repository handle")
.cloned()
})?
}
+
+ pub fn repo_snapshots(&self, cx: &App) -> HashMap<ProjectEntryId, RepositoryEntry> {
+ self.repositories
+ .iter()
+ .map(|(id, repo)| (*id, repo.read(cx).repository_entry.clone()))
+ .collect()
+ }
}
impl BufferDiffState {
@@ -2213,7 +2392,6 @@ impl BufferDiffState {
fn make_remote_delegate(
this: Entity<GitStore>,
project_id: u64,
- worktree_id: WorktreeId,
work_directory_id: ProjectEntryId,
askpass_id: u64,
cx: &mut AsyncApp,
@@ -2225,7 +2403,6 @@ fn make_remote_delegate(
};
let response = client.request(proto::AskPassRequest {
project_id,
- worktree_id: worktree_id.to_proto(),
work_directory_id: work_directory_id.to_proto(),
askpass_id,
prompt,
@@ -2355,8 +2532,8 @@ impl Repository {
self.git_store.upgrade()
}
- fn id(&self) -> (WorktreeId, ProjectEntryId) {
- (self.worktree_id, self.repository_entry.work_directory_id())
+ fn id(&self) -> ProjectEntryId {
+ self.repository_entry.work_directory_id()
}
pub fn current_branch(&self) -> Option<&Branch> {
@@ -2383,7 +2560,7 @@ impl Repository {
R: Send + 'static,
{
let (result_tx, result_rx) = futures::channel::oneshot::channel();
- let git_repo = self.git_repo.clone();
+ let git_repo = self.state.clone();
self.job_sender
.unbounded_send(GitJob {
key,
@@ -2399,23 +2576,15 @@ impl Repository {
result_rx
}
- pub fn display_name(&self, project: &Project, cx: &App) -> SharedString {
- maybe!({
- let project_path = self.repo_path_to_project_path(&"".into())?;
- let worktree_name = project
- .worktree_for_id(project_path.worktree_id, cx)?
- .read(cx)
- .root_name();
-
- let mut path = PathBuf::new();
- path = path.join(worktree_name);
- if project_path.path.components().count() > 0 {
- path = path.join(project_path.path);
- }
- Some(path.to_string_lossy().to_string())
- })
- .unwrap_or_else(|| self.repository_entry.work_directory.display_name())
- .into()
+ /// This is the name that will be displayed in the repository selector for this repository.
+ pub fn display_name(&self) -> SharedString {
+ self.repository_entry
+ .work_directory_abs_path
+ .file_name()
+ .unwrap_or_default()
+ .to_string_lossy()
+ .to_string()
+ .into()
}
pub fn set_as_active_repository(&self, cx: &mut Context<Self>) {
@@ -1,23 +1,28 @@
+use collections::HashMap;
use git::status::GitSummary;
use std::{ops::Deref, path::Path};
use sum_tree::Cursor;
use text::Bias;
-use worktree::{Entry, PathProgress, PathTarget, RepositoryEntry, StatusEntry, Traversal};
+use worktree::{
+ Entry, PathProgress, PathTarget, ProjectEntryId, RepositoryEntry, StatusEntry, Traversal,
+};
/// Walks the worktree entries and their associated git statuses.
pub struct GitTraversal<'a> {
traversal: Traversal<'a>,
current_entry_summary: Option<GitSummary>,
- repo_location: Option<(
- &'a RepositoryEntry,
- Cursor<'a, StatusEntry, PathProgress<'a>>,
- )>,
+ repo_snapshots: &'a HashMap<ProjectEntryId, RepositoryEntry>,
+ repo_location: Option<(ProjectEntryId, Cursor<'a, StatusEntry, PathProgress<'a>>)>,
}
impl<'a> GitTraversal<'a> {
- pub fn new(traversal: Traversal<'a>) -> GitTraversal<'a> {
+ pub fn new(
+ repo_snapshots: &'a HashMap<ProjectEntryId, RepositoryEntry>,
+ traversal: Traversal<'a>,
+ ) -> GitTraversal<'a> {
let mut this = GitTraversal {
traversal,
+ repo_snapshots,
current_entry_summary: None,
repo_location: None,
};
@@ -32,7 +37,20 @@ impl<'a> GitTraversal<'a> {
return;
};
- let Some(repo) = self.traversal.snapshot().repository_for_path(&entry.path) else {
+ let Ok(abs_path) = self.traversal.snapshot().absolutize(&entry.path) else {
+ self.repo_location = None;
+ return;
+ };
+
+ let Some((repo, repo_path)) = self
+ .repo_snapshots
+ .values()
+ .filter_map(|repo_snapshot| {
+ let relative_path = repo_snapshot.relativize_abs_path(&abs_path)?;
+ Some((repo_snapshot, relative_path))
+ })
+ .max_by_key(|(repo, _)| repo.work_directory_abs_path.clone())
+ else {
self.repo_location = None;
return;
};
@@ -42,18 +60,19 @@ impl<'a> GitTraversal<'a> {
|| self
.repo_location
.as_ref()
- .map(|(prev_repo, _)| &prev_repo.work_directory)
- != Some(&repo.work_directory)
+ .map(|(prev_repo_id, _)| *prev_repo_id)
+ != Some(repo.work_directory_id())
{
- self.repo_location = Some((repo, repo.statuses_by_path.cursor::<PathProgress>(&())));
+ self.repo_location = Some((
+ repo.work_directory_id(),
+ repo.statuses_by_path.cursor::<PathProgress>(&()),
+ ));
}
- let Some((repo, statuses)) = &mut self.repo_location else {
+ let Some((_, statuses)) = &mut self.repo_location else {
return;
};
- let repo_path = repo.relativize(&entry.path).unwrap();
-
if entry.is_dir() {
let mut statuses = statuses.clone();
statuses.seek_forward(&PathTarget::Path(repo_path.as_ref()), Bias::Left, &());
@@ -128,9 +147,15 @@ pub struct ChildEntriesGitIter<'a> {
}
impl<'a> ChildEntriesGitIter<'a> {
- pub fn new(snapshot: &'a worktree::Snapshot, parent_path: &'a Path) -> Self {
- let mut traversal =
- GitTraversal::new(snapshot.traverse_from_path(true, true, true, parent_path));
+ pub fn new(
+ repo_snapshots: &'a HashMap<ProjectEntryId, RepositoryEntry>,
+ worktree_snapshot: &'a worktree::Snapshot,
+ parent_path: &'a Path,
+ ) -> Self {
+ let mut traversal = GitTraversal::new(
+ repo_snapshots,
+ worktree_snapshot.traverse_from_path(true, true, true, parent_path),
+ );
traversal.advance();
ChildEntriesGitIter {
parent_path,
@@ -215,6 +240,8 @@ impl AsRef<Entry> for GitEntry {
mod tests {
use std::time::Duration;
+ use crate::Project;
+
use super::*;
use fs::FakeFs;
use git::status::{FileStatus, StatusCode, TrackedSummary, UnmergedStatus, UnmergedStatusCode};
@@ -222,7 +249,7 @@ mod tests {
use serde_json::json;
use settings::{Settings as _, SettingsStore};
use util::path;
- use worktree::{Worktree, WorktreeSettings};
+ use worktree::WorktreeSettings;
const CONFLICT: FileStatus = FileStatus::Unmerged(UnmergedStatus {
first_head: UnmergedStatusCode::Updated,
@@ -282,44 +309,35 @@ mod tests {
&[(Path::new("z2.txt"), StatusCode::Added.index())],
);
- let tree = Worktree::local(
- Path::new(path!("/root")),
- true,
- fs.clone(),
- Default::default(),
- &mut cx.to_async(),
- )
- .await
- .unwrap();
-
+ let project = Project::test(fs, [path!("/root").as_ref()], cx).await;
cx.executor().run_until_parked();
- let snapshot = tree.read_with(cx, |tree, _| tree.snapshot());
-
- let mut traversal =
- GitTraversal::new(snapshot.traverse_from_path(true, false, true, Path::new("x")));
-
- let entry = traversal.next().unwrap();
- assert_eq!(entry.path.as_ref(), Path::new("x/x1.txt"));
- assert_eq!(entry.git_summary, GitSummary::UNCHANGED);
- let entry = traversal.next().unwrap();
- assert_eq!(entry.path.as_ref(), Path::new("x/x2.txt"));
- assert_eq!(entry.git_summary, MODIFIED);
- let entry = traversal.next().unwrap();
- assert_eq!(entry.path.as_ref(), Path::new("x/y/y1.txt"));
- assert_eq!(entry.git_summary, GitSummary::CONFLICT);
- let entry = traversal.next().unwrap();
- assert_eq!(entry.path.as_ref(), Path::new("x/y/y2.txt"));
- assert_eq!(entry.git_summary, GitSummary::UNCHANGED);
- let entry = traversal.next().unwrap();
- assert_eq!(entry.path.as_ref(), Path::new("x/z.txt"));
- assert_eq!(entry.git_summary, ADDED);
- let entry = traversal.next().unwrap();
- assert_eq!(entry.path.as_ref(), Path::new("z/z1.txt"));
- assert_eq!(entry.git_summary, GitSummary::UNCHANGED);
- let entry = traversal.next().unwrap();
- assert_eq!(entry.path.as_ref(), Path::new("z/z2.txt"));
- assert_eq!(entry.git_summary, ADDED);
+ let (repo_snapshots, worktree_snapshot) = project.read_with(cx, |project, cx| {
+ (
+ project.git_store().read(cx).repo_snapshots(cx),
+ project.worktrees(cx).next().unwrap().read(cx).snapshot(),
+ )
+ });
+
+ let traversal = GitTraversal::new(
+ &repo_snapshots,
+ worktree_snapshot.traverse_from_path(true, false, true, Path::new("x")),
+ );
+ let entries = traversal
+ .map(|entry| (entry.path.clone(), entry.git_summary))
+ .collect::<Vec<_>>();
+ pretty_assertions::assert_eq!(
+ entries,
+ [
+ (Path::new("x/x1.txt").into(), GitSummary::UNCHANGED),
+ (Path::new("x/x2.txt").into(), MODIFIED),
+ (Path::new("x/y/y1.txt").into(), GitSummary::CONFLICT),
+ (Path::new("x/y/y2.txt").into(), GitSummary::UNCHANGED),
+ (Path::new("x/z.txt").into(), ADDED),
+ (Path::new("z/z1.txt").into(), GitSummary::UNCHANGED),
+ (Path::new("z/z2.txt").into(), ADDED),
+ ]
+ )
}
#[gpui::test]
@@ -366,23 +384,20 @@ mod tests {
&[(Path::new("z2.txt"), StatusCode::Added.index())],
);
- let tree = Worktree::local(
- Path::new(path!("/root")),
- true,
- fs.clone(),
- Default::default(),
- &mut cx.to_async(),
- )
- .await
- .unwrap();
-
+ let project = Project::test(fs, [path!("/root").as_ref()], cx).await;
cx.executor().run_until_parked();
- let snapshot = tree.read_with(cx, |tree, _| tree.snapshot());
+ let (repo_snapshots, worktree_snapshot) = project.read_with(cx, |project, cx| {
+ (
+ project.git_store().read(cx).repo_snapshots(cx),
+ project.worktrees(cx).next().unwrap().read(cx).snapshot(),
+ )
+ });
// Sanity check the propagation for x/y and z
check_git_statuses(
- &snapshot,
+ &repo_snapshots,
+ &worktree_snapshot,
&[
(Path::new("x/y"), GitSummary::CONFLICT),
(Path::new("x/y/y1.txt"), GitSummary::CONFLICT),
@@ -390,7 +405,8 @@ mod tests {
],
);
check_git_statuses(
- &snapshot,
+ &repo_snapshots,
+ &worktree_snapshot,
&[
(Path::new("z"), ADDED),
(Path::new("z/z1.txt"), GitSummary::UNCHANGED),
@@ -400,7 +416,8 @@ mod tests {
// Test one of the fundamental cases of propagation blocking, the transition from one git repository to another
check_git_statuses(
- &snapshot,
+ &repo_snapshots,
+ &worktree_snapshot,
&[
(Path::new("x"), MODIFIED + ADDED),
(Path::new("x/y"), GitSummary::CONFLICT),
@@ -410,7 +427,8 @@ mod tests {
// Sanity check everything around it
check_git_statuses(
- &snapshot,
+ &repo_snapshots,
+ &worktree_snapshot,
&[
(Path::new("x"), MODIFIED + ADDED),
(Path::new("x/x1.txt"), GitSummary::UNCHANGED),
@@ -424,7 +442,8 @@ mod tests {
// Test the other fundamental case, transitioning from git repository to non-git repository
check_git_statuses(
- &snapshot,
+ &repo_snapshots,
+ &worktree_snapshot,
&[
(Path::new(""), GitSummary::UNCHANGED),
(Path::new("x"), MODIFIED + ADDED),
@@ -434,7 +453,8 @@ mod tests {
// And all together now
check_git_statuses(
- &snapshot,
+ &repo_snapshots,
+ &worktree_snapshot,
&[
(Path::new(""), GitSummary::UNCHANGED),
(Path::new("x"), MODIFIED + ADDED),
@@ -490,21 +510,19 @@ mod tests {
],
);
- let tree = Worktree::local(
- Path::new(path!("/root")),
- true,
- fs.clone(),
- Default::default(),
- &mut cx.to_async(),
- )
- .await
- .unwrap();
+ let project = Project::test(fs, [path!("/root").as_ref()], cx).await;
cx.executor().run_until_parked();
- let snapshot = tree.read_with(cx, |tree, _| tree.snapshot());
+ let (repo_snapshots, worktree_snapshot) = project.read_with(cx, |project, cx| {
+ (
+ project.git_store().read(cx).repo_snapshots(cx),
+ project.worktrees(cx).next().unwrap().read(cx).snapshot(),
+ )
+ });
check_git_statuses(
- &snapshot,
+ &repo_snapshots,
+ &worktree_snapshot,
&[
(Path::new(""), GitSummary::CONFLICT + MODIFIED + ADDED),
(Path::new("g"), GitSummary::CONFLICT),
@@ -513,7 +531,8 @@ mod tests {
);
check_git_statuses(
- &snapshot,
+ &repo_snapshots,
+ &worktree_snapshot,
&[
(Path::new(""), GitSummary::CONFLICT + ADDED + MODIFIED),
(Path::new("a"), ADDED + MODIFIED),
@@ -530,7 +549,8 @@ mod tests {
);
check_git_statuses(
- &snapshot,
+ &repo_snapshots,
+ &worktree_snapshot,
&[
(Path::new("a/b"), ADDED),
(Path::new("a/b/c1.txt"), ADDED),
@@ -545,7 +565,8 @@ mod tests {
);
check_git_statuses(
- &snapshot,
+ &repo_snapshots,
+ &worktree_snapshot,
&[
(Path::new("a/b/c1.txt"), ADDED),
(Path::new("a/b/c2.txt"), GitSummary::UNCHANGED),
@@ -598,26 +619,25 @@ mod tests {
&[(Path::new("z2.txt"), StatusCode::Modified.index())],
);
- let tree = Worktree::local(
- Path::new(path!("/root")),
- true,
- fs.clone(),
- Default::default(),
- &mut cx.to_async(),
- )
- .await
- .unwrap();
+ let project = Project::test(fs, [path!("/root").as_ref()], cx).await;
cx.executor().run_until_parked();
- let snapshot = tree.read_with(cx, |tree, _| tree.snapshot());
+ let (repo_snapshots, worktree_snapshot) = project.read_with(cx, |project, cx| {
+ (
+ project.git_store().read(cx).repo_snapshots(cx),
+ project.worktrees(cx).next().unwrap().read(cx).snapshot(),
+ )
+ });
check_git_statuses(
- &snapshot,
+ &repo_snapshots,
+ &worktree_snapshot,
&[(Path::new("x"), ADDED), (Path::new("x/x1.txt"), ADDED)],
);
check_git_statuses(
- &snapshot,
+ &repo_snapshots,
+ &worktree_snapshot,
&[
(Path::new("y"), GitSummary::CONFLICT + MODIFIED),
(Path::new("y/y1.txt"), GitSummary::CONFLICT),
@@ -626,7 +646,8 @@ mod tests {
);
check_git_statuses(
- &snapshot,
+ &repo_snapshots,
+ &worktree_snapshot,
&[
(Path::new("z"), MODIFIED),
(Path::new("z/z2.txt"), MODIFIED),
@@ -634,12 +655,14 @@ mod tests {
);
check_git_statuses(
- &snapshot,
+ &repo_snapshots,
+ &worktree_snapshot,
&[(Path::new("x"), ADDED), (Path::new("x/x1.txt"), ADDED)],
);
check_git_statuses(
- &snapshot,
+ &repo_snapshots,
+ &worktree_snapshot,
&[
(Path::new("x"), ADDED),
(Path::new("x/x1.txt"), ADDED),
@@ -689,18 +712,11 @@ mod tests {
);
cx.run_until_parked();
- let tree = Worktree::local(
- path!("/root").as_ref(),
- true,
- fs.clone(),
- Default::default(),
- &mut cx.to_async(),
- )
- .await
- .unwrap();
+ let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
cx.executor().run_until_parked();
- let (old_entry_ids, old_mtimes) = tree.read_with(cx, |tree, _| {
+ let (old_entry_ids, old_mtimes) = project.read_with(cx, |project, cx| {
+ let tree = project.worktrees(cx).next().unwrap().read(cx);
(
tree.entries(true, 0).map(|e| e.id).collect::<Vec<_>>(),
tree.entries(true, 0).map(|e| e.mtime).collect::<Vec<_>>(),
@@ -713,7 +729,8 @@ mod tests {
fs.touch_path(path!("/root")).await;
cx.executor().run_until_parked();
- let (new_entry_ids, new_mtimes) = tree.read_with(cx, |tree, _| {
+ let (new_entry_ids, new_mtimes) = project.read_with(cx, |project, cx| {
+ let tree = project.worktrees(cx).next().unwrap().read(cx);
(
tree.entries(true, 0).map(|e| e.id).collect::<Vec<_>>(),
tree.entries(true, 0).map(|e| e.mtime).collect::<Vec<_>>(),
@@ -734,10 +751,16 @@ mod tests {
cx.executor().run_until_parked();
cx.executor().advance_clock(Duration::from_secs(1));
- let snapshot = tree.read_with(cx, |tree, _| tree.snapshot());
+ let (repo_snapshots, worktree_snapshot) = project.read_with(cx, |project, cx| {
+ (
+ project.git_store().read(cx).repo_snapshots(cx),
+ project.worktrees(cx).next().unwrap().read(cx).snapshot(),
+ )
+ });
check_git_statuses(
- &snapshot,
+ &repo_snapshots,
+ &worktree_snapshot,
&[
(Path::new(""), MODIFIED),
(Path::new("a.txt"), GitSummary::UNCHANGED),
@@ -748,11 +771,14 @@ mod tests {
#[track_caller]
fn check_git_statuses(
- snapshot: &worktree::Snapshot,
+ repo_snapshots: &HashMap<ProjectEntryId, RepositoryEntry>,
+ worktree_snapshot: &worktree::Snapshot,
expected_statuses: &[(&Path, GitSummary)],
) {
- let mut traversal =
- GitTraversal::new(snapshot.traverse_from_path(true, true, false, "".as_ref()));
+ let mut traversal = GitTraversal::new(
+ repo_snapshots,
+ worktree_snapshot.traverse_from_path(true, true, false, "".as_ref()),
+ );
let found_statuses = expected_statuses
.iter()
.map(|&(path, _)| {
@@ -762,6 +788,6 @@ mod tests {
(path, git_entry.git_summary)
})
.collect::<Vec<_>>();
- assert_eq!(found_statuses, expected_statuses);
+ pretty_assertions::assert_eq!(found_statuses, expected_statuses);
}
}
@@ -24,7 +24,7 @@ mod direnv;
mod environment;
use buffer_diff::BufferDiff;
pub use environment::{EnvironmentErrorMessage, ProjectEnvironmentEvent};
-use git_store::Repository;
+use git_store::{GitEvent, Repository};
pub mod search_history;
mod yarn;
@@ -270,7 +270,6 @@ pub enum Event {
WorktreeOrderChanged,
WorktreeRemoved(WorktreeId),
WorktreeUpdatedEntries(WorktreeId, UpdatedEntriesSet),
- WorktreeUpdatedGitRepositories(WorktreeId),
DiskBasedDiagnosticsStarted {
language_server_id: LanguageServerId,
},
@@ -300,6 +299,8 @@ pub enum Event {
RevealInProjectPanel(ProjectEntryId),
SnippetEdit(BufferId, Vec<(lsp::Range, Snippet)>),
ExpandedAllForEntry(WorktreeId, ProjectEntryId),
+ GitStateUpdated,
+ ActiveRepositoryChanged,
}
pub enum DebugAdapterClientState {
@@ -793,8 +794,6 @@ impl Project {
client.add_entity_message_handler(Self::handle_unshare_project);
client.add_entity_request_handler(Self::handle_update_buffer);
client.add_entity_message_handler(Self::handle_update_worktree);
- client.add_entity_message_handler(Self::handle_update_repository);
- client.add_entity_message_handler(Self::handle_remove_repository);
client.add_entity_request_handler(Self::handle_synchronize_buffers);
client.add_entity_request_handler(Self::handle_search_candidate_buffers);
@@ -922,6 +921,7 @@ impl Project {
cx,
)
});
+ cx.subscribe(&git_store, Self::on_git_store_event).detach();
cx.subscribe(&lsp_store, Self::on_lsp_store_event).detach();
@@ -1136,8 +1136,6 @@ impl Project {
ssh_proto.add_entity_message_handler(Self::handle_create_buffer_for_peer);
ssh_proto.add_entity_message_handler(Self::handle_update_worktree);
- ssh_proto.add_entity_message_handler(Self::handle_update_repository);
- ssh_proto.add_entity_message_handler(Self::handle_remove_repository);
ssh_proto.add_entity_message_handler(Self::handle_update_project);
ssh_proto.add_entity_message_handler(Self::handle_toast);
ssh_proto.add_entity_request_handler(Self::handle_language_server_prompt_request);
@@ -2040,6 +2038,11 @@ impl Project {
self.worktree_store.update(cx, |worktree_store, cx| {
worktree_store.send_project_updates(cx);
});
+ if let Some(remote_id) = self.remote_id() {
+ self.git_store.update(cx, |git_store, cx| {
+ git_store.shared(remote_id, self.client.clone().into(), cx)
+ });
+ }
cx.emit(Event::Reshared);
Ok(())
}
@@ -2707,6 +2710,19 @@ impl Project {
}
}
+ fn on_git_store_event(
+ &mut self,
+ _: Entity<GitStore>,
+ event: &GitEvent,
+ cx: &mut Context<Self>,
+ ) {
+ match event {
+ GitEvent::GitStateUpdated => cx.emit(Event::GitStateUpdated),
+ GitEvent::ActiveRepositoryChanged => cx.emit(Event::ActiveRepositoryChanged),
+ GitEvent::FileSystemUpdated | GitEvent::IndexWriteError(_) => {}
+ }
+ }
+
fn on_ssh_event(
&mut self,
_: Entity<SshRemoteClient>,
@@ -2792,12 +2808,11 @@ impl Project {
.report_discovered_project_events(*worktree_id, changes);
cx.emit(Event::WorktreeUpdatedEntries(*worktree_id, changes.clone()))
}
- WorktreeStoreEvent::WorktreeUpdatedGitRepositories(worktree_id) => {
- cx.emit(Event::WorktreeUpdatedGitRepositories(*worktree_id))
- }
WorktreeStoreEvent::WorktreeDeletedEntry(worktree_id, id) => {
cx.emit(Event::DeletedEntry(*worktree_id, *id))
}
+ // Listen to the GitStore instead.
+ WorktreeStoreEvent::WorktreeUpdatedGitRepositories(_, _) => {}
}
}
@@ -4309,43 +4324,7 @@ impl Project {
if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
worktree.update(cx, |worktree, _| {
let worktree = worktree.as_remote_mut().unwrap();
- worktree.update_from_remote(envelope.payload.into());
- });
- }
- Ok(())
- })?
- }
-
- async fn handle_update_repository(
- this: Entity<Self>,
- envelope: TypedEnvelope<proto::UpdateRepository>,
- mut cx: AsyncApp,
- ) -> Result<()> {
- this.update(&mut cx, |this, cx| {
- if let Some((worktree, _relative_path)) =
- this.find_worktree(envelope.payload.abs_path.as_ref(), cx)
- {
- worktree.update(cx, |worktree, _| {
- let worktree = worktree.as_remote_mut().unwrap();
- worktree.update_from_remote(envelope.payload.into());
- });
- }
- Ok(())
- })?
- }
-
- async fn handle_remove_repository(
- this: Entity<Self>,
- envelope: TypedEnvelope<proto::RemoveRepository>,
- mut cx: AsyncApp,
- ) -> Result<()> {
- this.update(&mut cx, |this, cx| {
- if let Some(worktree) =
- this.worktree_for_entry(ProjectEntryId::from_proto(envelope.payload.id), cx)
- {
- worktree.update(cx, |worktree, _| {
- let worktree = worktree.as_remote_mut().unwrap();
- worktree.update_from_remote(envelope.payload.into());
+ worktree.update_from_remote(envelope.payload);
});
}
Ok(())
@@ -6,7 +6,8 @@ use buffer_diff::{
};
use fs::FakeFs;
use futures::{future, StreamExt};
-use gpui::{App, SemanticVersion, UpdateGlobal};
+use git::repository::RepoPath;
+use gpui::{App, BackgroundExecutor, SemanticVersion, UpdateGlobal};
use http_client::Url;
use language::{
language_settings::{language_settings, AllLanguageSettings, LanguageSettingsContent},
@@ -34,6 +35,7 @@ use util::{
test::{marked_text_offsets, TempTree},
uri, TryFutureExt as _,
};
+use worktree::WorktreeModelHandle as _;
#[gpui::test]
async fn test_block_via_channel(cx: &mut gpui::TestAppContext) {
@@ -6769,6 +6771,158 @@ async fn test_single_file_diffs(cx: &mut gpui::TestAppContext) {
});
}
+#[gpui::test]
+async fn test_repository_and_path_for_project_path(
+ background_executor: BackgroundExecutor,
+ cx: &mut gpui::TestAppContext,
+) {
+ init_test(cx);
+ let fs = FakeFs::new(background_executor);
+ fs.insert_tree(
+ path!("/root"),
+ json!({
+ "c.txt": "",
+ "dir1": {
+ ".git": {},
+ "deps": {
+ "dep1": {
+ ".git": {},
+ "src": {
+ "a.txt": ""
+ }
+ }
+ },
+ "src": {
+ "b.txt": ""
+ }
+ },
+ }),
+ )
+ .await;
+
+ let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
+ let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
+ let tree_id = tree.read_with(cx, |tree, _| tree.id());
+ tree.read_with(cx, |tree, _| tree.as_local().unwrap().scan_complete())
+ .await;
+ tree.flush_fs_events(cx).await;
+
+ project.read_with(cx, |project, cx| {
+ let git_store = project.git_store().read(cx);
+ let pairs = [
+ ("c.txt", None),
+ ("dir1/src/b.txt", Some((path!("/root/dir1"), "src/b.txt"))),
+ (
+ "dir1/deps/dep1/src/a.txt",
+ Some((path!("/root/dir1/deps/dep1"), "src/a.txt")),
+ ),
+ ];
+ let expected = pairs
+ .iter()
+ .map(|(path, result)| {
+ (
+ path,
+ result.map(|(repo, repo_path)| {
+ (Path::new(repo).to_owned(), RepoPath::from(repo_path))
+ }),
+ )
+ })
+ .collect::<Vec<_>>();
+ let actual = pairs
+ .iter()
+ .map(|(path, _)| {
+ let project_path = (tree_id, Path::new(path)).into();
+ let result = maybe!({
+ let (repo, repo_path) =
+ git_store.repository_and_path_for_project_path(&project_path, cx)?;
+ Some((
+ repo.read(cx)
+ .repository_entry
+ .work_directory_abs_path
+ .clone(),
+ repo_path,
+ ))
+ });
+ (path, result)
+ })
+ .collect::<Vec<_>>();
+ pretty_assertions::assert_eq!(expected, actual);
+ });
+
+ fs.remove_dir(path!("/root/dir1/.git").as_ref(), RemoveOptions::default())
+ .await
+ .unwrap();
+ tree.flush_fs_events(cx).await;
+
+ project.read_with(cx, |project, cx| {
+ let git_store = project.git_store().read(cx);
+ assert_eq!(
+ git_store.repository_and_path_for_project_path(
+ &(tree_id, Path::new("dir1/src/b.txt")).into(),
+ cx
+ ),
+ None
+ );
+ });
+}
+
+#[gpui::test]
+async fn test_home_dir_as_git_repository(cx: &mut gpui::TestAppContext) {
+ init_test(cx);
+ let fs = FakeFs::new(cx.background_executor.clone());
+ fs.insert_tree(
+ path!("/root"),
+ json!({
+ "home": {
+ ".git": {},
+ "project": {
+ "a.txt": "A"
+ },
+ },
+ }),
+ )
+ .await;
+ fs.set_home_dir(Path::new(path!("/root/home")).to_owned());
+
+ let project = Project::test(fs.clone(), [path!("/root/home/project").as_ref()], cx).await;
+ let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
+ let tree_id = tree.read_with(cx, |tree, _| tree.id());
+ tree.read_with(cx, |tree, _| tree.as_local().unwrap().scan_complete())
+ .await;
+ tree.flush_fs_events(cx).await;
+
+ project.read_with(cx, |project, cx| {
+ let containing = project
+ .git_store()
+ .read(cx)
+ .repository_and_path_for_project_path(&(tree_id, "a.txt").into(), cx);
+ assert!(containing.is_none());
+ });
+
+ let project = Project::test(fs.clone(), [path!("/root/home").as_ref()], cx).await;
+ let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
+ let tree_id = tree.read_with(cx, |tree, _| tree.id());
+ tree.read_with(cx, |tree, _| tree.as_local().unwrap().scan_complete())
+ .await;
+ tree.flush_fs_events(cx).await;
+
+ project.read_with(cx, |project, cx| {
+ let containing = project
+ .git_store()
+ .read(cx)
+ .repository_and_path_for_project_path(&(tree_id, "project/a.txt").into(), cx);
+ assert_eq!(
+ containing
+ .unwrap()
+ .0
+ .read(cx)
+ .repository_entry
+ .work_directory_abs_path,
+ Path::new(path!("/root/home"))
+ );
+ });
+}
+
async fn search(
project: &Entity<Project>,
query: SearchQuery,
@@ -26,7 +26,10 @@ use smol::{
};
use text::ReplicaId;
use util::{paths::SanitizedPath, ResultExt};
-use worktree::{Entry, ProjectEntryId, UpdatedEntriesSet, Worktree, WorktreeId, WorktreeSettings};
+use worktree::{
+ Entry, ProjectEntryId, UpdatedEntriesSet, UpdatedGitRepositoriesSet, Worktree, WorktreeId,
+ WorktreeSettings,
+};
use crate::{search::SearchQuery, ProjectPath};
@@ -66,7 +69,7 @@ pub enum WorktreeStoreEvent {
WorktreeOrderChanged,
WorktreeUpdateSent(Entity<Worktree>),
WorktreeUpdatedEntries(WorktreeId, UpdatedEntriesSet),
- WorktreeUpdatedGitRepositories(WorktreeId),
+ WorktreeUpdatedGitRepositories(WorktreeId, UpdatedGitRepositoriesSet),
WorktreeDeletedEntry(WorktreeId, ProjectEntryId),
}
@@ -156,6 +159,11 @@ impl WorktreeStore {
None
}
+ pub fn absolutize(&self, project_path: &ProjectPath, cx: &App) -> Option<PathBuf> {
+ let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
+ worktree.read(cx).absolutize(&project_path.path).ok()
+ }
+
pub fn find_or_create_worktree(
&mut self,
abs_path: impl AsRef<Path>,
@@ -367,9 +375,10 @@ impl WorktreeStore {
changes.clone(),
));
}
- worktree::Event::UpdatedGitRepositories(_) => {
+ worktree::Event::UpdatedGitRepositories(set) => {
cx.emit(WorktreeStoreEvent::WorktreeUpdatedGitRepositories(
worktree_id,
+ set.clone(),
));
}
worktree::Event::DeletedEntry(id) => {
@@ -561,44 +570,12 @@ impl WorktreeStore {
let client = client.clone();
async move {
if client.is_via_collab() {
- match update {
- proto::WorktreeRelatedMessage::UpdateWorktree(
- update,
- ) => {
- client
- .request(update)
- .map(|result| result.log_err().is_some())
- .await
- }
- proto::WorktreeRelatedMessage::UpdateRepository(
- update,
- ) => {
- client
- .request(update)
- .map(|result| result.log_err().is_some())
- .await
- }
- proto::WorktreeRelatedMessage::RemoveRepository(
- update,
- ) => {
- client
- .request(update)
- .map(|result| result.log_err().is_some())
- .await
- }
- }
+ client
+ .request(update)
+ .map(|result| result.log_err().is_some())
+ .await
} else {
- match update {
- proto::WorktreeRelatedMessage::UpdateWorktree(
- update,
- ) => client.send(update).log_err().is_some(),
- proto::WorktreeRelatedMessage::UpdateRepository(
- update,
- ) => client.send(update).log_err().is_some(),
- proto::WorktreeRelatedMessage::RemoveRepository(
- update,
- ) => client.send(update).log_err().is_some(),
- }
+ client.send(update).log_err().is_some()
}
}
}
@@ -334,7 +334,8 @@ impl ProjectPanel {
this.update_visible_entries(None, cx);
cx.notify();
}
- project::Event::WorktreeUpdatedGitRepositories(_)
+ project::Event::GitStateUpdated
+ | project::Event::ActiveRepositoryChanged
| project::Event::WorktreeUpdatedEntries(_, _)
| project::Event::WorktreeAdded(_)
| project::Event::WorktreeOrderChanged => {
@@ -1553,6 +1554,7 @@ impl ProjectPanel {
.map(|entry| entry.worktree_id)
.filter_map(|id| project.worktree_for_id(id, cx).map(|w| (id, w.read(cx))))
.max_by(|(_, a), (_, b)| a.root_name().cmp(b.root_name()))?;
+ let git_store = project.git_store().read(cx);
let marked_entries_in_worktree = sanitized_entries
.iter()
@@ -1577,18 +1579,20 @@ impl ProjectPanel {
let parent_entry = worktree.entry_for_path(parent_path)?;
// Remove all siblings that are being deleted except the last marked entry
- let snapshot = worktree.snapshot();
+ let repo_snapshots = git_store.repo_snapshots(cx);
+ let worktree_snapshot = worktree.snapshot();
let hide_gitignore = ProjectPanelSettings::get_global(cx).hide_gitignore;
- let mut siblings: Vec<_> = ChildEntriesGitIter::new(&snapshot, parent_path)
- .filter(|sibling| {
- (sibling.id == latest_entry.id)
- || (!marked_entries_in_worktree.contains(&&SelectedEntry {
- worktree_id,
- entry_id: sibling.id,
- }) && (!hide_gitignore || !sibling.is_ignored))
- })
- .map(|entry| entry.to_owned())
- .collect();
+ let mut siblings: Vec<_> =
+ ChildEntriesGitIter::new(&repo_snapshots, &worktree_snapshot, parent_path)
+ .filter(|sibling| {
+ (sibling.id == latest_entry.id)
+ || (!marked_entries_in_worktree.contains(&&SelectedEntry {
+ worktree_id,
+ entry_id: sibling.id,
+ }) && (!hide_gitignore || !sibling.is_ignored))
+ })
+ .map(|entry| entry.to_owned())
+ .collect();
project::sort_worktree_entries(&mut siblings);
let sibling_entry_index = siblings
@@ -2605,6 +2609,7 @@ impl ProjectPanel {
let auto_collapse_dirs = settings.auto_fold_dirs;
let hide_gitignore = settings.hide_gitignore;
let project = self.project.read(cx);
+ let repo_snapshots = project.git_store().read(cx).repo_snapshots(cx);
self.last_worktree_root_id = project
.visible_worktrees(cx)
.next_back()
@@ -2615,15 +2620,15 @@ impl ProjectPanel {
self.visible_entries.clear();
let mut max_width_item = None;
for worktree in project.visible_worktrees(cx) {
- let snapshot = worktree.read(cx).snapshot();
- let worktree_id = snapshot.id();
+ let worktree_snapshot = worktree.read(cx).snapshot();
+ let worktree_id = worktree_snapshot.id();
let expanded_dir_ids = match self.expanded_dir_ids.entry(worktree_id) {
hash_map::Entry::Occupied(e) => e.into_mut(),
hash_map::Entry::Vacant(e) => {
// The first time a worktree's root entry becomes available,
// mark that root entry as expanded.
- if let Some(entry) = snapshot.root_entry() {
+ if let Some(entry) = worktree_snapshot.root_entry() {
e.insert(vec![entry.id]).as_slice()
} else {
&[]
@@ -2645,14 +2650,15 @@ impl ProjectPanel {
}
let mut visible_worktree_entries = Vec::new();
- let mut entry_iter = GitTraversal::new(snapshot.entries(true, 0));
+ let mut entry_iter =
+ GitTraversal::new(&repo_snapshots, worktree_snapshot.entries(true, 0));
let mut auto_folded_ancestors = vec![];
while let Some(entry) = entry_iter.entry() {
if auto_collapse_dirs && entry.kind.is_dir() {
auto_folded_ancestors.push(entry.id);
if !self.unfolded_dir_ids.contains(&entry.id) {
- if let Some(root_path) = snapshot.root_entry() {
- let mut child_entries = snapshot.child_entries(&entry.path);
+ if let Some(root_path) = worktree_snapshot.root_entry() {
+ let mut child_entries = worktree_snapshot.child_entries(&entry.path);
if let Some(child) = child_entries.next() {
if entry.path != root_path.path
&& child_entries.next().is_none()
@@ -3297,10 +3303,16 @@ impl ProjectPanel {
.cloned();
}
+ let repo_snapshots = self
+ .project
+ .read(cx)
+ .git_store()
+ .read(cx)
+ .repo_snapshots(cx);
let worktree = self.project.read(cx).worktree_for_id(worktree_id, cx)?;
worktree.update(cx, |tree, _| {
utils::ReversibleIterable::new(
- GitTraversal::new(tree.entries(true, 0usize)),
+ GitTraversal::new(&repo_snapshots, tree.entries(true, 0usize)),
reverse_search,
)
.find_single_ended(|ele| predicate(*ele, worktree_id))
@@ -3320,6 +3332,12 @@ impl ProjectPanel {
.iter()
.map(|(worktree_id, _, _)| *worktree_id)
.collect();
+ let repo_snapshots = self
+ .project
+ .read(cx)
+ .git_store()
+ .read(cx)
+ .repo_snapshots(cx);
let mut last_found: Option<SelectedEntry> = None;
@@ -3334,12 +3352,10 @@ impl ProjectPanel {
let root_entry = tree.root_entry()?;
let tree_id = tree.id();
- let mut first_iter = GitTraversal::new(tree.traverse_from_path(
- true,
- true,
- true,
- entry.path.as_ref(),
- ));
+ let mut first_iter = GitTraversal::new(
+ &repo_snapshots,
+ tree.traverse_from_path(true, true, true, entry.path.as_ref()),
+ );
if reverse_search {
first_iter.next();
@@ -3352,7 +3368,7 @@ impl ProjectPanel {
.find(|ele| predicate(*ele, tree_id))
.map(|ele| ele.to_owned());
- let second_iter = GitTraversal::new(tree.entries(true, 0usize));
+ let second_iter = GitTraversal::new(&repo_snapshots, tree.entries(true, 0usize));
let second = if reverse_search {
second_iter
@@ -2240,7 +2240,7 @@ message OpenUncommittedDiffResponse {
message SetIndexText {
uint64 project_id = 1;
- uint64 worktree_id = 2;
+ reserved 2;
uint64 work_directory_id = 3;
string path = 4;
optional string text = 5;
@@ -3350,7 +3350,7 @@ message GetPanicFiles {
message GitShow {
uint64 project_id = 1;
- uint64 worktree_id = 2;
+ reserved 2;
uint64 work_directory_id = 3;
string commit = 4;
}
@@ -3365,7 +3365,7 @@ message GitCommitDetails {
message GitReset {
uint64 project_id = 1;
- uint64 worktree_id = 2;
+ reserved 2;
uint64 work_directory_id = 3;
string commit = 4;
ResetMode mode = 5;
@@ -3377,7 +3377,7 @@ message GitReset {
message GitCheckoutFiles {
uint64 project_id = 1;
- uint64 worktree_id = 2;
+ reserved 2;
uint64 work_directory_id = 3;
string commit = 4;
repeated string paths = 5;
@@ -3432,21 +3432,21 @@ message RegisterBufferWithLanguageServers{
message Stage {
uint64 project_id = 1;
- uint64 worktree_id = 2;
+ reserved 2;
uint64 work_directory_id = 3;
repeated string paths = 4;
}
message Unstage {
uint64 project_id = 1;
- uint64 worktree_id = 2;
+ reserved 2;
uint64 work_directory_id = 3;
repeated string paths = 4;
}
message Commit {
uint64 project_id = 1;
- uint64 worktree_id = 2;
+ reserved 2;
uint64 work_directory_id = 3;
optional string name = 4;
optional string email = 5;
@@ -3455,13 +3455,13 @@ message Commit {
message OpenCommitMessageBuffer {
uint64 project_id = 1;
- uint64 worktree_id = 2;
+ reserved 2;
uint64 work_directory_id = 3;
}
message Push {
uint64 project_id = 1;
- uint64 worktree_id = 2;
+ reserved 2;
uint64 work_directory_id = 3;
string remote_name = 4;
string branch_name = 5;
@@ -3476,14 +3476,14 @@ message Push {
message Fetch {
uint64 project_id = 1;
- uint64 worktree_id = 2;
+ reserved 2;
uint64 work_directory_id = 3;
uint64 askpass_id = 4;
}
message GetRemotes {
uint64 project_id = 1;
- uint64 worktree_id = 2;
+ reserved 2;
uint64 work_directory_id = 3;
optional string branch_name = 4;
}
@@ -3498,7 +3498,7 @@ message GetRemotesResponse {
message Pull {
uint64 project_id = 1;
- uint64 worktree_id = 2;
+ reserved 2;
uint64 work_directory_id = 3;
string remote_name = 4;
string branch_name = 5;
@@ -3512,7 +3512,7 @@ message RemoteMessageResponse {
message AskPassRequest {
uint64 project_id = 1;
- uint64 worktree_id = 2;
+ reserved 2;
uint64 work_directory_id = 3;
uint64 askpass_id = 4;
string prompt = 5;
@@ -3524,27 +3524,27 @@ message AskPassResponse {
message GitGetBranches {
uint64 project_id = 1;
- uint64 worktree_id = 2;
+ reserved 2;
uint64 work_directory_id = 3;
}
message GitCreateBranch {
uint64 project_id = 1;
- uint64 worktree_id = 2;
+ reserved 2;
uint64 work_directory_id = 3;
string branch_name = 4;
}
message GitChangeBranch {
uint64 project_id = 1;
- uint64 worktree_id = 2;
+ reserved 2;
uint64 work_directory_id = 3;
string branch_name = 4;
}
message CheckForPushedCommits {
uint64 project_id = 1;
- uint64 worktree_id = 2;
+ reserved 2;
uint64 work_directory_id = 3;
}
@@ -3554,7 +3554,7 @@ message CheckForPushedCommitsResponse {
message GitDiff {
uint64 project_id = 1;
- uint64 worktree_id = 2;
+ reserved 2;
uint64 work_directory_id = 3;
DiffType diff_type = 4;
@@ -793,31 +793,6 @@ pub const MAX_WORKTREE_UPDATE_MAX_CHUNK_SIZE: usize = 2;
#[cfg(not(any(test, feature = "test-support")))]
pub const MAX_WORKTREE_UPDATE_MAX_CHUNK_SIZE: usize = 256;
-#[derive(Clone, Debug)]
-pub enum WorktreeRelatedMessage {
- UpdateWorktree(UpdateWorktree),
- UpdateRepository(UpdateRepository),
- RemoveRepository(RemoveRepository),
-}
-
-impl From<UpdateWorktree> for WorktreeRelatedMessage {
- fn from(value: UpdateWorktree) -> Self {
- Self::UpdateWorktree(value)
- }
-}
-
-impl From<UpdateRepository> for WorktreeRelatedMessage {
- fn from(value: UpdateRepository) -> Self {
- Self::UpdateRepository(value)
- }
-}
-
-impl From<RemoveRepository> for WorktreeRelatedMessage {
- fn from(value: RemoveRepository) -> Self {
- Self::RemoveRepository(value)
- }
-}
-
pub fn split_worktree_update(mut message: UpdateWorktree) -> impl Iterator<Item = UpdateWorktree> {
let mut done = false;
@@ -924,20 +899,6 @@ pub fn split_repository_update(
})
}
-pub fn split_worktree_related_message(
- message: WorktreeRelatedMessage,
-) -> Box<dyn Iterator<Item = WorktreeRelatedMessage> + Send> {
- match message {
- WorktreeRelatedMessage::UpdateWorktree(message) => {
- Box::new(split_worktree_update(message).map(WorktreeRelatedMessage::UpdateWorktree))
- }
- WorktreeRelatedMessage::UpdateRepository(message) => {
- Box::new(split_repository_update(message).map(WorktreeRelatedMessage::UpdateRepository))
- }
- WorktreeRelatedMessage::RemoveRepository(update) => Box::new([update.into()].into_iter()),
- }
-}
-
#[cfg(test)]
mod tests {
use super::*;
@@ -41,7 +41,7 @@ use postage::{
watch,
};
use rpc::{
- proto::{self, split_worktree_related_message, FromProto, ToProto, WorktreeRelatedMessage},
+ proto::{self, split_worktree_update, FromProto, ToProto},
AnyProtoClient,
};
pub use settings::WorktreeId;
@@ -138,12 +138,12 @@ struct ScanRequest {
pub struct RemoteWorktree {
snapshot: Snapshot,
- background_snapshot: Arc<Mutex<(Snapshot, Vec<WorktreeRelatedMessage>)>>,
+ background_snapshot: Arc<Mutex<(Snapshot, Vec<proto::UpdateWorktree>)>>,
project_id: u64,
client: AnyProtoClient,
file_scan_inclusions: PathMatcher,
- updates_tx: Option<UnboundedSender<WorktreeRelatedMessage>>,
- update_observer: Option<mpsc::UnboundedSender<WorktreeRelatedMessage>>,
+ updates_tx: Option<UnboundedSender<proto::UpdateWorktree>>,
+ update_observer: Option<mpsc::UnboundedSender<proto::UpdateWorktree>>,
snapshot_subscriptions: VecDeque<(usize, oneshot::Sender<()>)>,
replica_id: ReplicaId,
visible: bool,
@@ -196,28 +196,25 @@ pub struct RepositoryEntry {
/// - my_sub_folder_1/project_root/changed_file_1
/// - my_sub_folder_2/changed_file_2
pub statuses_by_path: SumTree<StatusEntry>,
- work_directory_id: ProjectEntryId,
- pub work_directory: WorkDirectory,
- work_directory_abs_path: PathBuf,
- pub(crate) current_branch: Option<Branch>,
+ pub work_directory_id: ProjectEntryId,
+ pub work_directory_abs_path: PathBuf,
+ pub worktree_scan_id: usize,
+ pub current_branch: Option<Branch>,
pub current_merge_conflicts: TreeSet<RepoPath>,
}
impl RepositoryEntry {
- pub fn relativize(&self, path: &Path) -> Result<RepoPath> {
- self.work_directory.relativize(path)
- }
-
- pub fn try_unrelativize(&self, path: &RepoPath) -> Option<Arc<Path>> {
- self.work_directory.try_unrelativize(path)
- }
-
- pub fn unrelativize(&self, path: &RepoPath) -> Arc<Path> {
- self.work_directory.unrelativize(path)
+ pub fn relativize_abs_path(&self, abs_path: &Path) -> Option<RepoPath> {
+ Some(
+ abs_path
+ .strip_prefix(&self.work_directory_abs_path)
+ .ok()?
+ .into(),
+ )
}
- pub fn directory_contains(&self, path: impl AsRef<Path>) -> bool {
- self.work_directory.directory_contains(path)
+ pub fn directory_contains_abs_path(&self, abs_path: impl AsRef<Path>) -> bool {
+ abs_path.as_ref().starts_with(&self.work_directory_abs_path)
}
pub fn branch(&self) -> Option<&Branch> {
@@ -246,11 +243,7 @@ impl RepositoryEntry {
.cloned()
}
- pub fn initial_update(
- &self,
- project_id: u64,
- worktree_scan_id: usize,
- ) -> proto::UpdateRepository {
+ pub fn initial_update(&self, project_id: u64) -> proto::UpdateRepository {
proto::UpdateRepository {
branch_summary: self.current_branch.as_ref().map(branch_to_proto),
updated_statuses: self
@@ -274,16 +267,11 @@ impl RepositoryEntry {
entry_ids: vec![self.work_directory_id().to_proto()],
// This is also semantically wrong, and should be replaced once we separate git repo updates
// from worktree scans.
- scan_id: worktree_scan_id as u64,
+ scan_id: self.worktree_scan_id as u64,
}
}
- pub fn build_update(
- &self,
- old: &Self,
- project_id: u64,
- scan_id: usize,
- ) -> proto::UpdateRepository {
+ pub fn build_update(&self, old: &Self, project_id: u64) -> proto::UpdateRepository {
let mut updated_statuses: Vec<proto::StatusEntry> = Vec::new();
let mut removed_statuses: Vec<String> = Vec::new();
@@ -338,7 +326,7 @@ impl RepositoryEntry {
id: self.work_directory_id.to_proto(),
abs_path: self.work_directory_abs_path.as_path().to_proto(),
entry_ids: vec![self.work_directory_id.to_proto()],
- scan_id: scan_id as u64,
+ scan_id: self.worktree_scan_id as u64,
}
}
}
@@ -428,28 +416,21 @@ impl WorkDirectory {
}
}
- #[cfg(test)]
- fn canonicalize(&self) -> Self {
- match self {
- WorkDirectory::InProject { relative_path } => WorkDirectory::InProject {
- relative_path: relative_path.clone(),
- },
- WorkDirectory::AboveProject {
- absolute_path,
- location_in_repo,
- } => WorkDirectory::AboveProject {
- absolute_path: absolute_path.canonicalize().unwrap().into(),
- location_in_repo: location_in_repo.clone(),
- },
- }
- }
-
- pub fn is_above_project(&self) -> bool {
- match self {
- WorkDirectory::InProject { .. } => false,
- WorkDirectory::AboveProject { .. } => true,
- }
- }
+ //#[cfg(test)]
+ //fn canonicalize(&self) -> Self {
+ // match self {
+ // WorkDirectory::InProject { relative_path } => WorkDirectory::InProject {
+ // relative_path: relative_path.clone(),
+ // },
+ // WorkDirectory::AboveProject {
+ // absolute_path,
+ // location_in_repo,
+ // } => WorkDirectory::AboveProject {
+ // absolute_path: absolute_path.canonicalize().unwrap().into(),
+ // location_in_repo: location_in_repo.clone(),
+ // },
+ // }
+ //}
fn path_key(&self) -> PathKey {
match self {
@@ -699,8 +680,7 @@ enum ScanState {
}
struct UpdateObservationState {
- snapshots_tx:
- mpsc::UnboundedSender<(LocalSnapshot, UpdatedEntriesSet, UpdatedGitRepositoriesSet)>,
+ snapshots_tx: mpsc::UnboundedSender<(LocalSnapshot, UpdatedEntriesSet)>,
resume_updates: watch::Sender<()>,
_maintain_remote_snapshot: Task<Option<()>>,
}
@@ -824,10 +804,10 @@ impl Worktree {
let background_snapshot = Arc::new(Mutex::new((
snapshot.clone(),
- Vec::<WorktreeRelatedMessage>::new(),
+ Vec::<proto::UpdateWorktree>::new(),
)));
let (background_updates_tx, mut background_updates_rx) =
- mpsc::unbounded::<WorktreeRelatedMessage>();
+ mpsc::unbounded::<proto::UpdateWorktree>();
let (mut snapshot_updated_tx, mut snapshot_updated_rx) = watch::channel();
let worktree_id = snapshot.id();
@@ -872,25 +852,14 @@ impl Worktree {
cx.spawn(async move |this, cx| {
while (snapshot_updated_rx.recv().await).is_some() {
this.update(cx, |this, cx| {
- let mut git_repos_changed = false;
let mut entries_changed = false;
let this = this.as_remote_mut().unwrap();
{
let mut lock = this.background_snapshot.lock();
this.snapshot = lock.0.clone();
for update in lock.1.drain(..) {
- entries_changed |= match &update {
- WorktreeRelatedMessage::UpdateWorktree(update_worktree) => {
- !update_worktree.updated_entries.is_empty()
- || !update_worktree.removed_entries.is_empty()
- }
- _ => false,
- };
- git_repos_changed |= matches!(
- update,
- WorktreeRelatedMessage::UpdateRepository(_)
- | WorktreeRelatedMessage::RemoveRepository(_)
- );
+ entries_changed |= !update.updated_entries.is_empty()
+ || !update.removed_entries.is_empty();
if let Some(tx) = &this.update_observer {
tx.unbounded_send(update).ok();
}
@@ -900,9 +869,6 @@ impl Worktree {
if entries_changed {
cx.emit(Event::UpdatedEntries(Arc::default()));
}
- if git_repos_changed {
- cx.emit(Event::UpdatedGitRepositories(Arc::default()));
- }
cx.notify();
while let Some((scan_id, _)) = this.snapshot_subscriptions.front() {
if this.observed_snapshot(*scan_id) {
@@ -1027,7 +993,7 @@ impl Worktree {
pub fn observe_updates<F, Fut>(&mut self, project_id: u64, cx: &Context<Worktree>, callback: F)
where
- F: 'static + Send + Fn(WorktreeRelatedMessage) -> Fut,
+ F: 'static + Send + Fn(proto::UpdateWorktree) -> Fut,
Fut: 'static + Send + Future<Output = bool>,
{
match self {
@@ -1070,7 +1036,7 @@ impl Worktree {
let path = Arc::from(path);
let snapshot = this.snapshot();
cx.spawn(async move |cx| {
- if let Some(repo) = snapshot.repository_for_path(&path) {
+ if let Some(repo) = snapshot.local_repo_containing_path(&path) {
if let Some(repo_path) = repo.relativize(&path).log_err() {
if let Some(git_repo) =
snapshot.git_repositories.get(&repo.work_directory_id)
@@ -1097,7 +1063,7 @@ impl Worktree {
let path = Arc::from(path);
let snapshot = this.snapshot();
cx.spawn(async move |cx| {
- if let Some(repo) = snapshot.repository_for_path(&path) {
+ if let Some(repo) = snapshot.local_repo_containing_path(&path) {
if let Some(repo_path) = repo.relativize(&path).log_err() {
if let Some(git_repo) =
snapshot.git_repositories.get(&repo.work_directory_id)
@@ -1611,11 +1577,7 @@ impl LocalWorktree {
if let Some(share) = self.update_observer.as_mut() {
share
.snapshots_tx
- .unbounded_send((
- self.snapshot.clone(),
- entry_changes.clone(),
- repo_changes.clone(),
- ))
+ .unbounded_send((self.snapshot.clone(), entry_changes.clone()))
.ok();
}
@@ -1656,10 +1618,8 @@ impl LocalWorktree {
|| new_repo.status_scan_id != old_repo.status_scan_id
{
if let Some(entry) = new_snapshot.entry_for_id(new_entry_id) {
- let old_repo = old_snapshot
- .repositories
- .get(&PathKey(entry.path.clone()), &())
- .cloned();
+ let old_repo =
+ old_snapshot.repository_for_id(old_entry_id).cloned();
changes.push((
entry.clone(),
GitRepositoryChange {
@@ -1673,10 +1633,8 @@ impl LocalWorktree {
}
Ordering::Greater => {
if let Some(entry) = old_snapshot.entry_for_id(old_entry_id) {
- let old_repo = old_snapshot
- .repositories
- .get(&PathKey(entry.path.clone()), &())
- .cloned();
+ let old_repo =
+ old_snapshot.repository_for_id(old_entry_id).cloned();
changes.push((
entry.clone(),
GitRepositoryChange {
@@ -1701,10 +1659,7 @@ impl LocalWorktree {
}
(None, Some((entry_id, _))) => {
if let Some(entry) = old_snapshot.entry_for_id(entry_id) {
- let old_repo = old_snapshot
- .repositories
- .get(&PathKey(entry.path.clone()), &())
- .cloned();
+ let old_repo = old_snapshot.repository_for_id(entry_id).cloned();
changes.push((
entry.clone(),
GitRepositoryChange {
@@ -2320,7 +2275,7 @@ impl LocalWorktree {
fn observe_updates<F, Fut>(&mut self, project_id: u64, cx: &Context<Worktree>, callback: F)
where
- F: 'static + Send + Fn(WorktreeRelatedMessage) -> Fut,
+ F: 'static + Send + Fn(proto::UpdateWorktree) -> Fut,
Fut: 'static + Send + Future<Output = bool>,
{
if let Some(observer) = self.update_observer.as_mut() {
@@ -2330,26 +2285,23 @@ impl LocalWorktree {
let (resume_updates_tx, mut resume_updates_rx) = watch::channel::<()>();
let (snapshots_tx, mut snapshots_rx) =
- mpsc::unbounded::<(LocalSnapshot, UpdatedEntriesSet, UpdatedGitRepositoriesSet)>();
+ mpsc::unbounded::<(LocalSnapshot, UpdatedEntriesSet)>();
snapshots_tx
- .unbounded_send((self.snapshot(), Arc::default(), Arc::default()))
+ .unbounded_send((self.snapshot(), Arc::default()))
.ok();
let worktree_id = cx.entity_id().as_u64();
let _maintain_remote_snapshot = cx.background_spawn(async move {
let mut is_first = true;
- while let Some((snapshot, entry_changes, repo_changes)) = snapshots_rx.next().await {
- let updates = if is_first {
+ while let Some((snapshot, entry_changes)) = snapshots_rx.next().await {
+ let update = if is_first {
is_first = false;
snapshot.build_initial_update(project_id, worktree_id)
} else {
- snapshot.build_update(project_id, worktree_id, entry_changes, repo_changes)
+ snapshot.build_update(project_id, worktree_id, entry_changes)
};
- for update in updates
- .into_iter()
- .flat_map(proto::split_worktree_related_message)
- {
+ for update in proto::split_worktree_update(update) {
let _ = resume_updates_rx.try_recv();
loop {
let result = callback(update.clone());
@@ -2412,7 +2364,7 @@ impl RemoteWorktree {
self.disconnected = true;
}
- pub fn update_from_remote(&self, update: WorktreeRelatedMessage) {
+ pub fn update_from_remote(&self, update: proto::UpdateWorktree) {
if let Some(updates_tx) = &self.updates_tx {
updates_tx
.unbounded_send(update)
@@ -2422,41 +2374,29 @@ impl RemoteWorktree {
fn observe_updates<F, Fut>(&mut self, project_id: u64, cx: &Context<Worktree>, callback: F)
where
- F: 'static + Send + Fn(WorktreeRelatedMessage) -> Fut,
+ F: 'static + Send + Fn(proto::UpdateWorktree) -> Fut,
Fut: 'static + Send + Future<Output = bool>,
{
let (tx, mut rx) = mpsc::unbounded();
- let initial_updates = self
+ let initial_update = self
.snapshot
.build_initial_update(project_id, self.id().to_proto());
self.update_observer = Some(tx);
cx.spawn(async move |this, cx| {
- let mut updates = initial_updates;
+ let mut update = initial_update;
'outer: loop {
- for mut update in updates {
- // SSH projects use a special project ID of 0, and we need to
- // remap it to the correct one here.
- match &mut update {
- WorktreeRelatedMessage::UpdateWorktree(update_worktree) => {
- update_worktree.project_id = project_id;
- }
- WorktreeRelatedMessage::UpdateRepository(update_repository) => {
- update_repository.project_id = project_id;
- }
- WorktreeRelatedMessage::RemoveRepository(remove_repository) => {
- remove_repository.project_id = project_id;
- }
- };
+ // SSH projects use a special project ID of 0, and we need to
+ // remap it to the correct one here.
+ update.project_id = project_id;
- for chunk in split_worktree_related_message(update) {
- if !callback(chunk).await {
- break 'outer;
- }
+ for chunk in split_worktree_update(update) {
+ if !callback(chunk).await {
+ break 'outer;
}
}
if let Some(next_update) = rx.next().await {
- updates = vec![next_update];
+ update = next_update;
} else {
break;
}
@@ -2616,11 +2556,7 @@ impl Snapshot {
self.abs_path.as_path()
}
- fn build_initial_update(
- &self,
- project_id: u64,
- worktree_id: u64,
- ) -> Vec<WorktreeRelatedMessage> {
+ fn build_initial_update(&self, project_id: u64, worktree_id: u64) -> proto::UpdateWorktree {
let mut updated_entries = self
.entries_by_path
.iter()
@@ -2628,7 +2564,7 @@ impl Snapshot {
.collect::<Vec<_>>();
updated_entries.sort_unstable_by_key(|e| e.id);
- [proto::UpdateWorktree {
+ proto::UpdateWorktree {
project_id,
worktree_id,
abs_path: self.abs_path().to_proto(),
@@ -2641,14 +2577,15 @@ impl Snapshot {
updated_repositories: Vec::new(),
removed_repositories: Vec::new(),
}
- .into()]
- .into_iter()
- .chain(
- self.repositories
- .iter()
- .map(|repository| repository.initial_update(project_id, self.scan_id).into()),
- )
- .collect()
+ }
+
+ pub fn work_directory_abs_path(&self, work_directory: &WorkDirectory) -> Result<PathBuf> {
+ match work_directory {
+ WorkDirectory::InProject { relative_path } => self.absolutize(relative_path),
+ WorkDirectory::AboveProject { absolute_path, .. } => {
+ Ok(absolute_path.as_ref().to_owned())
+ }
+ }
}
pub fn absolutize(&self, path: &Path) -> Result<PathBuf> {
@@ -2712,15 +2649,24 @@ impl Snapshot {
Some(removed_entry.path)
}
+ //#[cfg(any(test, feature = "test-support"))]
+ //pub fn status_for_file(&self, path: impl AsRef<Path>) -> Option<FileStatus> {
+ // let path = path.as_ref();
+ // self.repository_for_path(path).and_then(|repo| {
+ // let repo_path = repo.relativize(path).unwrap();
+ // repo.statuses_by_path
+ // .get(&PathKey(repo_path.0), &())
+ // .map(|entry| entry.status)
+ // })
+ //}
+
#[cfg(any(test, feature = "test-support"))]
- pub fn status_for_file(&self, path: impl AsRef<Path>) -> Option<FileStatus> {
- let path = path.as_ref();
- self.repository_for_path(path).and_then(|repo| {
- let repo_path = repo.relativize(path).unwrap();
- repo.statuses_by_path
- .get(&PathKey(repo_path.0), &())
- .map(|entry| entry.status)
- })
+ pub fn status_for_file_abs_path(&self, abs_path: impl AsRef<Path>) -> Option<FileStatus> {
+ let abs_path = abs_path.as_ref();
+ let repo = self.repository_containing_abs_path(abs_path)?;
+ let repo_path = repo.relativize_abs_path(abs_path)?;
+ let status = repo.statuses_by_path.get(&PathKey(repo_path.0), &())?;
+ Some(status.status)
}
fn update_abs_path(&mut self, abs_path: SanitizedPath, root_name: String) {
@@ -2731,95 +2677,7 @@ impl Snapshot {
}
}
- pub(crate) fn apply_update_repository(
- &mut self,
- update: proto::UpdateRepository,
- ) -> Result<()> {
- // NOTE: this is practically but not semantically correct. For now we're using the
- // ID field to store the work directory ID, but eventually it will be a different
- // kind of ID.
- let work_directory_id = ProjectEntryId::from_proto(update.id);
-
- if let Some(work_dir_entry) = self.entry_for_id(work_directory_id) {
- let conflicted_paths = TreeSet::from_ordered_entries(
- update
- .current_merge_conflicts
- .into_iter()
- .map(|path| RepoPath(Path::new(&path).into())),
- );
-
- if self
- .repositories
- .contains(&PathKey(work_dir_entry.path.clone()), &())
- {
- let edits = update
- .removed_statuses
- .into_iter()
- .map(|path| Edit::Remove(PathKey(FromProto::from_proto(path))))
- .chain(
- update
- .updated_statuses
- .into_iter()
- .filter_map(|updated_status| {
- Some(Edit::Insert(updated_status.try_into().log_err()?))
- }),
- )
- .collect::<Vec<_>>();
-
- self.repositories
- .update(&PathKey(work_dir_entry.path.clone()), &(), |repo| {
- repo.current_branch = update.branch_summary.as_ref().map(proto_to_branch);
- repo.statuses_by_path.edit(edits, &());
- repo.current_merge_conflicts = conflicted_paths
- });
- } else {
- let statuses = SumTree::from_iter(
- update
- .updated_statuses
- .into_iter()
- .filter_map(|updated_status| updated_status.try_into().log_err()),
- &(),
- );
-
- self.repositories.insert_or_replace(
- RepositoryEntry {
- work_directory_id,
- // When syncing repository entries from a peer, we don't need
- // the location_in_repo field, since git operations don't happen locally
- // anyway.
- work_directory: WorkDirectory::InProject {
- relative_path: work_dir_entry.path.clone(),
- },
- current_branch: update.branch_summary.as_ref().map(proto_to_branch),
- statuses_by_path: statuses,
- current_merge_conflicts: conflicted_paths,
- work_directory_abs_path: update.abs_path.into(),
- },
- &(),
- );
- }
- } else {
- log::error!("no work directory entry for repository {:?}", update.id)
- }
-
- Ok(())
- }
-
- pub(crate) fn apply_remove_repository(
- &mut self,
- update: proto::RemoveRepository,
- ) -> Result<()> {
- // NOTE: this is practically but not semantically correct. For now we're using the
- // ID field to store the work directory ID, but eventually it will be a different
- // kind of ID.
- let work_directory_id = ProjectEntryId::from_proto(update.id);
- self.repositories.retain(&(), |entry: &RepositoryEntry| {
- entry.work_directory_id != work_directory_id
- });
- Ok(())
- }
-
- pub(crate) fn apply_update_worktree(
+ pub(crate) fn apply_remote_update(
&mut self,
update: proto::UpdateWorktree,
always_included_paths: &PathMatcher,
@@ -2875,24 +2733,6 @@ impl Snapshot {
Ok(())
}
- pub(crate) fn apply_remote_update(
- &mut self,
- update: WorktreeRelatedMessage,
- always_included_paths: &PathMatcher,
- ) -> Result<()> {
- match update {
- WorktreeRelatedMessage::UpdateWorktree(update) => {
- self.apply_update_worktree(update, always_included_paths)
- }
- WorktreeRelatedMessage::UpdateRepository(update) => {
- self.apply_update_repository(update)
- }
- WorktreeRelatedMessage::RemoveRepository(update) => {
- self.apply_remove_repository(update)
- }
- }
- }
-
pub fn entry_count(&self) -> usize {
self.entries_by_path.summary().count
}
@@ -2972,48 +2812,18 @@ impl Snapshot {
&self.repositories
}
- /// Get the repository whose work directory corresponds to the given path.
- fn repository(&self, work_directory: PathKey) -> Option<RepositoryEntry> {
- self.repositories.get(&work_directory, &()).cloned()
- }
-
/// Get the repository whose work directory contains the given path.
- #[track_caller]
- pub fn repository_for_path(&self, path: &Path) -> Option<&RepositoryEntry> {
+ fn repository_containing_abs_path(&self, abs_path: &Path) -> Option<&RepositoryEntry> {
self.repositories
.iter()
- .filter(|repo| repo.directory_contains(path))
+ .filter(|repo| repo.directory_contains_abs_path(abs_path))
.last()
}
- /// Given an ordered iterator of entries, returns an iterator of those entries,
- /// along with their containing git repository.
- #[cfg(test)]
- #[track_caller]
- fn entries_with_repositories<'a>(
- &'a self,
- entries: impl 'a + Iterator<Item = &'a Entry>,
- ) -> impl 'a + Iterator<Item = (&'a Entry, Option<&'a RepositoryEntry>)> {
- let mut containing_repos = Vec::<&RepositoryEntry>::new();
- let mut repositories = self.repositories.iter().peekable();
- entries.map(move |entry| {
- while let Some(repository) = containing_repos.last() {
- if repository.directory_contains(&entry.path) {
- break;
- } else {
- containing_repos.pop();
- }
- }
- while let Some(repository) = repositories.peek() {
- if repository.directory_contains(&entry.path) {
- containing_repos.push(repositories.next().unwrap());
- } else {
- break;
- }
- }
- let repo = containing_repos.last().copied();
- (entry, repo)
- })
+ fn repository_for_id(&self, id: ProjectEntryId) -> Option<&RepositoryEntry> {
+ self.repositories
+ .iter()
+ .find(|repo| repo.work_directory_id == id)
}
pub fn paths(&self) -> impl Iterator<Item = &Arc<Path>> {
@@ -3098,10 +2908,18 @@ impl Snapshot {
}
impl LocalSnapshot {
- pub fn local_repo_for_path(&self, path: &Path) -> Option<&LocalRepositoryEntry> {
- let repository_entry = self.repository_for_path(path)?;
- let work_directory_id = repository_entry.work_directory_id();
- self.git_repositories.get(&work_directory_id)
+ pub fn local_repo_for_work_directory_path(&self, path: &Path) -> Option<&LocalRepositoryEntry> {
+ self.git_repositories
+ .iter()
+ .map(|(_, entry)| entry)
+ .find(|entry| entry.work_directory.path_key() == PathKey(path.into()))
+ }
+
+ pub fn local_repo_containing_path(&self, path: &Path) -> Option<&LocalRepositoryEntry> {
+ self.git_repositories
+ .values()
+ .filter(|local_repo| path.starts_with(&local_repo.path_key().0))
+ .max_by_key(|local_repo| local_repo.path_key())
}
fn build_update(
@@ -3109,11 +2927,9 @@ impl LocalSnapshot {
project_id: u64,
worktree_id: u64,
entry_changes: UpdatedEntriesSet,
- repo_changes: UpdatedGitRepositoriesSet,
- ) -> Vec<WorktreeRelatedMessage> {
+ ) -> proto::UpdateWorktree {
let mut updated_entries = Vec::new();
let mut removed_entries = Vec::new();
- let mut updates = Vec::new();
for (_, entry_id, path_change) in entry_changes.iter() {
if let PathChange::Removed = path_change {
@@ -3123,55 +2939,25 @@ impl LocalSnapshot {
}
}
- for (entry, change) in repo_changes.iter() {
- let new_repo = self.repositories.get(&PathKey(entry.path.clone()), &());
- match (&change.old_repository, new_repo) {
- (Some(old_repo), Some(new_repo)) => {
- updates.push(
- new_repo
- .build_update(old_repo, project_id, self.scan_id)
- .into(),
- );
- }
- (None, Some(new_repo)) => {
- updates.push(new_repo.initial_update(project_id, self.scan_id).into());
- }
- (Some(old_repo), None) => {
- updates.push(
- proto::RemoveRepository {
- project_id,
- id: old_repo.work_directory_id.to_proto(),
- }
- .into(),
- );
- }
- _ => {}
- }
- }
-
removed_entries.sort_unstable();
updated_entries.sort_unstable_by_key(|e| e.id);
// TODO - optimize, knowing that removed_entries are sorted.
removed_entries.retain(|id| updated_entries.binary_search_by_key(id, |e| e.id).is_err());
- updates.push(
- proto::UpdateWorktree {
- project_id,
- worktree_id,
- abs_path: self.abs_path().to_proto(),
- root_name: self.root_name().to_string(),
- updated_entries,
- removed_entries,
- scan_id: self.scan_id as u64,
- is_last_update: self.completed_scan_id == self.scan_id,
- // Sent in separate messages.
- updated_repositories: Vec::new(),
- removed_repositories: Vec::new(),
- }
- .into(),
- );
- updates
+ proto::UpdateWorktree {
+ project_id,
+ worktree_id,
+ abs_path: self.abs_path().to_proto(),
+ root_name: self.root_name().to_string(),
+ updated_entries,
+ removed_entries,
+ scan_id: self.scan_id as u64,
+ is_last_update: self.completed_scan_id == self.scan_id,
+ // Sent in separate messages.
+ updated_repositories: Vec::new(),
+ removed_repositories: Vec::new(),
+ }
}
fn insert_entry(&mut self, mut entry: Entry, fs: &dyn Fs) -> Entry {
@@ -3351,7 +3137,7 @@ impl LocalSnapshot {
let work_dir_paths = self
.repositories
.iter()
- .map(|repo| repo.work_directory.path_key())
+ .map(|repo| repo.work_directory_abs_path.clone())
.collect::<HashSet<_>>();
assert_eq!(dotgit_paths.len(), work_dir_paths.len());
assert_eq!(self.repositories.iter().count(), work_dir_paths.len());
@@ -3560,14 +3346,9 @@ impl BackgroundScannerState {
.git_repositories
.retain(|id, _| removed_ids.binary_search(id).is_err());
self.snapshot.repositories.retain(&(), |repository| {
- let retain = !repository.work_directory.path_key().0.starts_with(path);
- if !retain {
- log::info!(
- "dropping repository entry for {:?}",
- repository.work_directory
- );
- }
- retain
+ removed_ids
+ .binary_search(&repository.work_directory_id)
+ .is_err()
});
#[cfg(test)]
@@ -3622,9 +3403,13 @@ impl BackgroundScannerState {
fs: &dyn Fs,
watcher: &dyn Watcher,
) -> Option<LocalRepositoryEntry> {
+ // TODO canonicalize here
log::info!("insert git repository for {dot_git_path:?}");
let work_dir_entry = self.snapshot.entry_for_path(work_directory.path_key().0)?;
- let work_directory_abs_path = self.snapshot.absolutize(&work_dir_entry.path).log_err()?;
+ let work_directory_abs_path = self
+ .snapshot
+ .work_directory_abs_path(&work_directory)
+ .log_err()?;
if self
.snapshot
@@ -3676,18 +3461,18 @@ impl BackgroundScannerState {
self.snapshot.repositories.insert_or_replace(
RepositoryEntry {
work_directory_id,
- work_directory: work_directory.clone(),
work_directory_abs_path,
current_branch: None,
statuses_by_path: Default::default(),
current_merge_conflicts: Default::default(),
+ worktree_scan_id: 0,
},
&(),
);
let local_repository = LocalRepositoryEntry {
work_directory_id,
- work_directory: work_directory.clone(),
+ work_directory,
git_dir_scan_id: 0,
status_scan_id: 0,
repo_ptr: repository.clone(),
@@ -4120,22 +3905,53 @@ impl<'a, S: Summary> sum_tree::Dimension<'a, PathSummary<S>> for PathProgress<'a
}
}
+#[derive(Clone, Debug)]
+pub struct AbsPathSummary {
+ max_path: Arc<Path>,
+}
+
+impl Summary for AbsPathSummary {
+ type Context = ();
+
+ fn zero(_: &Self::Context) -> Self {
+ Self {
+ max_path: Path::new("").into(),
+ }
+ }
+
+ fn add_summary(&mut self, rhs: &Self, _: &Self::Context) {
+ self.max_path = rhs.max_path.clone();
+ }
+}
+
impl sum_tree::Item for RepositoryEntry {
- type Summary = PathSummary<Unit>;
+ type Summary = AbsPathSummary;
fn summary(&self, _: &<Self::Summary as Summary>::Context) -> Self::Summary {
- PathSummary {
- max_path: self.work_directory.path_key().0,
- item_summary: Unit,
+ AbsPathSummary {
+ max_path: self.work_directory_abs_path.as_path().into(),
}
}
}
+#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
+pub struct AbsPathKey(pub Arc<Path>);
+
+impl<'a> sum_tree::Dimension<'a, AbsPathSummary> for AbsPathKey {
+ fn zero(_: &()) -> Self {
+ Self(Path::new("").into())
+ }
+
+ fn add_summary(&mut self, summary: &'a AbsPathSummary, _: &()) {
+ self.0 = summary.max_path.clone();
+ }
+}
+
impl sum_tree::KeyedItem for RepositoryEntry {
- type Key = PathKey;
+ type Key = AbsPathKey;
fn key(&self) -> Self::Key {
- self.work_directory.path_key()
+ AbsPathKey(self.work_directory_abs_path.as_path().into())
}
}
@@ -4375,7 +4191,7 @@ impl<'a> sum_tree::Dimension<'a, PathEntrySummary> for ProjectEntryId {
}
#[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash)]
-pub struct PathKey(Arc<Path>);
+pub struct PathKey(pub Arc<Path>);
impl Default for PathKey {
fn default() -> Self {
@@ -5191,11 +5007,11 @@ impl BackgroundScanner {
// Group all relative paths by their git repository.
let mut paths_by_git_repo = HashMap::default();
- for relative_path in relative_paths.iter() {
+ for (relative_path, abs_path) in relative_paths.iter().zip(&abs_paths) {
let repository_data = state
.snapshot
- .local_repo_for_path(relative_path)
- .zip(state.snapshot.repository_for_path(relative_path));
+ .local_repo_containing_path(relative_path)
+ .zip(state.snapshot.repository_containing_abs_path(abs_path));
if let Some((local_repo, entry)) = repository_data {
if let Ok(repo_path) = local_repo.relativize(relative_path) {
paths_by_git_repo
@@ -5210,7 +5026,7 @@ impl BackgroundScanner {
}
}
- for (work_directory, mut paths) in paths_by_git_repo {
+ for (_work_directory, mut paths) in paths_by_git_repo {
if let Ok(status) = paths.repo.status(&paths.repo_paths) {
let mut changed_path_statuses = Vec::new();
let statuses = paths.entry.statuses_by_path.clone();
@@ -5239,7 +5055,7 @@ impl BackgroundScanner {
if !changed_path_statuses.is_empty() {
let work_directory_id = state.snapshot.repositories.update(
- &work_directory.path_key(),
+ &AbsPathKey(paths.entry.work_directory_abs_path.as_path().into()),
&(),
move |repository_entry| {
repository_entry
@@ -5324,14 +5140,13 @@ impl BackgroundScanner {
.components()
.any(|component| component.as_os_str() == *DOT_GIT)
{
- if let Some(repository) = snapshot.repository(PathKey(path.clone())) {
- snapshot
- .git_repositories
- .remove(&repository.work_directory_id);
+ if let Some(local_repo) = snapshot.local_repo_for_work_directory_path(path) {
+ let id = local_repo.work_directory_id;
+ log::debug!("remove repo path: {:?}", path);
+ snapshot.git_repositories.remove(&id);
snapshot
- .snapshot
.repositories
- .remove(&repository.work_directory.path_key(), &());
+ .retain(&(), |repo_entry| repo_entry.work_directory_id != id);
return Some(());
}
}
@@ -5540,6 +5355,17 @@ impl BackgroundScanner {
entry.status_scan_id = scan_id;
},
);
+ if let Some(repo_entry) = state
+ .snapshot
+ .repository_for_id(local_repository.work_directory_id)
+ {
+ let abs_path_key =
+ AbsPathKey(repo_entry.work_directory_abs_path.as_path().into());
+ state
+ .snapshot
+ .repositories
+ .update(&abs_path_key, &(), |repo| repo.worktree_scan_id = scan_id);
+ }
local_repository
}
@@ -5674,8 +5500,11 @@ async fn update_branches(
let branches = repository.repo().branches().await?;
let snapshot = state.lock().snapshot.snapshot.clone();
let mut repository = snapshot
- .repository(repository.work_directory.path_key())
- .context("Missing repository")?;
+ .repositories
+ .iter()
+ .find(|repo_entry| repo_entry.work_directory_id == repository.work_directory_id)
+ .context("missing repository")?
+ .clone();
repository.current_branch = branches.into_iter().find(|branch| branch.is_head);
let mut state = state.lock();
@@ -5717,9 +5546,10 @@ async fn do_git_status_update(
let snapshot = job_state.lock().snapshot.snapshot.clone();
let Some(mut repository) = snapshot
- .repository(local_repository.work_directory.path_key())
- .context("Tried to update git statuses for a repository that isn't in the snapshot")
+ .repository_for_id(local_repository.work_directory_id)
+ .context("tried to update git statuses for a repository that isn't in the snapshot")
.log_err()
+ .cloned()
else {
return;
};
@@ -5731,7 +5561,7 @@ async fn do_git_status_update(
let mut new_entries_by_path = SumTree::new(&());
for (repo_path, status) in statuses.entries.iter() {
- let project_path = repository.work_directory.try_unrelativize(repo_path);
+ let project_path = local_repository.work_directory.try_unrelativize(repo_path);
new_entries_by_path.insert_or_replace(
StatusEntry {
@@ -5749,6 +5579,7 @@ async fn do_git_status_update(
}
}
+ log::trace!("statuses: {:#?}", new_entries_by_path);
repository.statuses_by_path = new_entries_by_path;
let mut state = job_state.lock();
state
@@ -1,6 +1,6 @@
use crate::{
- worktree_settings::WorktreeSettings, Entry, EntryKind, Event, PathChange, WorkDirectory,
- Worktree, WorktreeModelHandle,
+ worktree_settings::WorktreeSettings, Entry, EntryKind, Event, PathChange, StatusEntry,
+ WorkDirectory, Worktree, WorktreeModelHandle,
};
use anyhow::Result;
use fs::{FakeFs, Fs, RealFs, RemoveOptions};
@@ -15,7 +15,7 @@ use parking_lot::Mutex;
use postage::stream::Stream;
use pretty_assertions::assert_eq;
use rand::prelude::*;
-use rpc::proto::WorktreeRelatedMessage;
+
use serde_json::json;
use settings::{Settings, SettingsStore};
use std::{
@@ -1665,12 +1665,7 @@ async fn test_random_worktree_operations_during_initial_scan(
for (i, snapshot) in snapshots.into_iter().enumerate().rev() {
let mut updated_snapshot = snapshot.clone();
for update in updates.lock().iter() {
- let scan_id = match update {
- WorktreeRelatedMessage::UpdateWorktree(update) => update.scan_id,
- WorktreeRelatedMessage::UpdateRepository(update) => update.scan_id,
- WorktreeRelatedMessage::RemoveRepository(_) => u64::MAX,
- };
- if scan_id >= updated_snapshot.scan_id() as u64 {
+ if update.scan_id >= updated_snapshot.scan_id() as u64 {
updated_snapshot
.apply_remote_update(update.clone(), &settings.file_scan_inclusions)
.unwrap();
@@ -1807,12 +1802,7 @@ async fn test_random_worktree_changes(cx: &mut TestAppContext, mut rng: StdRng)
for (i, mut prev_snapshot) in snapshots.into_iter().enumerate().rev() {
for update in updates.lock().iter() {
- let scan_id = match update {
- WorktreeRelatedMessage::UpdateWorktree(update) => update.scan_id,
- WorktreeRelatedMessage::UpdateRepository(update) => update.scan_id,
- WorktreeRelatedMessage::RemoveRepository(_) => u64::MAX,
- };
- if scan_id >= prev_snapshot.scan_id() as u64 {
+ if update.scan_id >= prev_snapshot.scan_id() as u64 {
prev_snapshot
.apply_remote_update(update.clone(), &settings.file_scan_inclusions)
.unwrap();
@@ -2157,15 +2147,15 @@ async fn test_rename_work_directory(cx: &mut TestAppContext) {
let tree = tree.read(cx);
let repo = tree.repositories.iter().next().unwrap();
assert_eq!(
- repo.work_directory,
- WorkDirectory::in_project("projects/project1")
+ repo.work_directory_abs_path,
+ root_path.join("projects/project1")
);
assert_eq!(
- tree.status_for_file(Path::new("projects/project1/a")),
+ repo.status_for_path(&"a".into()).map(|entry| entry.status),
Some(StatusCode::Modified.worktree()),
);
assert_eq!(
- tree.status_for_file(Path::new("projects/project1/b")),
+ repo.status_for_path(&"b".into()).map(|entry| entry.status),
Some(FileStatus::Untracked),
);
});
@@ -2181,199 +2171,18 @@ async fn test_rename_work_directory(cx: &mut TestAppContext) {
let tree = tree.read(cx);
let repo = tree.repositories.iter().next().unwrap();
assert_eq!(
- repo.work_directory,
- WorkDirectory::in_project("projects/project2")
- );
- assert_eq!(
- tree.status_for_file(Path::new("projects/project2/a")),
- Some(StatusCode::Modified.worktree()),
+ repo.work_directory_abs_path,
+ root_path.join("projects/project2")
);
assert_eq!(
- tree.status_for_file(Path::new("projects/project2/b")),
- Some(FileStatus::Untracked),
- );
- });
-}
-
-#[gpui::test]
-async fn test_home_dir_as_git_repository(cx: &mut TestAppContext) {
- init_test(cx);
- cx.executor().allow_parking();
- let fs = FakeFs::new(cx.background_executor.clone());
- fs.insert_tree(
- "/root",
- json!({
- "home": {
- ".git": {},
- "project": {
- "a.txt": "A"
- },
- },
- }),
- )
- .await;
- fs.set_home_dir(Path::new(path!("/root/home")).to_owned());
-
- let tree = Worktree::local(
- Path::new(path!("/root/home/project")),
- true,
- fs.clone(),
- Default::default(),
- &mut cx.to_async(),
- )
- .await
- .unwrap();
-
- cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
- .await;
- tree.flush_fs_events(cx).await;
-
- tree.read_with(cx, |tree, _cx| {
- let tree = tree.as_local().unwrap();
-
- let repo = tree.repository_for_path(path!("a.txt").as_ref());
- assert!(repo.is_none());
- });
-
- let home_tree = Worktree::local(
- Path::new(path!("/root/home")),
- true,
- fs.clone(),
- Default::default(),
- &mut cx.to_async(),
- )
- .await
- .unwrap();
-
- cx.read(|cx| home_tree.read(cx).as_local().unwrap().scan_complete())
- .await;
- home_tree.flush_fs_events(cx).await;
-
- home_tree.read_with(cx, |home_tree, _cx| {
- let home_tree = home_tree.as_local().unwrap();
-
- let repo = home_tree.repository_for_path(path!("project/a.txt").as_ref());
- assert_eq!(
- repo.map(|repo| &repo.work_directory),
- Some(&WorkDirectory::InProject {
- relative_path: Path::new("").into()
- })
- );
- })
-}
-
-#[gpui::test]
-async fn test_git_repository_for_path(cx: &mut TestAppContext) {
- init_test(cx);
- cx.executor().allow_parking();
- let root = TempTree::new(json!({
- "c.txt": "",
- "dir1": {
- ".git": {},
- "deps": {
- "dep1": {
- ".git": {},
- "src": {
- "a.txt": ""
- }
- }
- },
- "src": {
- "b.txt": ""
- }
- },
- }));
-
- let tree = Worktree::local(
- root.path(),
- true,
- Arc::new(RealFs::default()),
- Default::default(),
- &mut cx.to_async(),
- )
- .await
- .unwrap();
-
- cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
- .await;
- tree.flush_fs_events(cx).await;
-
- tree.read_with(cx, |tree, _cx| {
- let tree = tree.as_local().unwrap();
-
- assert!(tree.repository_for_path("c.txt".as_ref()).is_none());
-
- let repo = tree.repository_for_path("dir1/src/b.txt".as_ref()).unwrap();
- assert_eq!(repo.work_directory, WorkDirectory::in_project("dir1"));
-
- let repo = tree
- .repository_for_path("dir1/deps/dep1/src/a.txt".as_ref())
- .unwrap();
- assert_eq!(
- repo.work_directory,
- WorkDirectory::in_project("dir1/deps/dep1")
+ repo.status_for_path(&"a".into()).unwrap().status,
+ StatusCode::Modified.worktree(),
);
-
- let entries = tree.files(false, 0);
-
- let paths_with_repos = tree
- .entries_with_repositories(entries)
- .map(|(entry, repo)| {
- (
- entry.path.as_ref(),
- repo.map(|repo| repo.work_directory.clone()),
- )
- })
- .collect::<Vec<_>>();
-
assert_eq!(
- paths_with_repos,
- &[
- (Path::new("c.txt"), None),
- (
- Path::new("dir1/deps/dep1/src/a.txt"),
- Some(WorkDirectory::in_project("dir1/deps/dep1"))
- ),
- (
- Path::new("dir1/src/b.txt"),
- Some(WorkDirectory::in_project("dir1"))
- ),
- ]
+ repo.status_for_path(&"b".into()).unwrap().status,
+ FileStatus::Untracked,
);
});
-
- let repo_update_events = Arc::new(Mutex::new(vec![]));
- tree.update(cx, |_, cx| {
- let repo_update_events = repo_update_events.clone();
- cx.subscribe(&tree, move |_, _, event, _| {
- if let Event::UpdatedGitRepositories(update) = event {
- repo_update_events.lock().push(update.clone());
- }
- })
- .detach();
- });
-
- std::fs::write(root.path().join("dir1/.git/random_new_file"), "hello").unwrap();
- tree.flush_fs_events(cx).await;
-
- assert_eq!(
- repo_update_events.lock()[0]
- .iter()
- .map(|(entry, _)| entry.path.clone())
- .collect::<Vec<Arc<Path>>>(),
- vec![Path::new("dir1").into()]
- );
-
- std::fs::remove_dir_all(root.path().join("dir1/.git")).unwrap();
- tree.flush_fs_events(cx).await;
-
- tree.read_with(cx, |tree, _cx| {
- let tree = tree.as_local().unwrap();
-
- assert!(tree
- .repository_for_path("dir1/src/b.txt".as_ref())
- .is_none());
- });
}
// NOTE: This test always fails on Windows, because on Windows, unlike on Unix,
@@ -2411,7 +2220,6 @@ async fn test_file_status(cx: &mut TestAppContext) {
const F_TXT: &str = "f.txt";
const DOTGITIGNORE: &str = ".gitignore";
const BUILD_FILE: &str = "target/build_file";
- let project_path = Path::new("project");
// Set up git repository before creating the worktree.
let work_dir = root.path().join("project");
@@ -2431,6 +2239,7 @@ async fn test_file_status(cx: &mut TestAppContext) {
)
.await
.unwrap();
+ let root_path = root.path();
tree.flush_fs_events(cx).await;
cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
@@ -2443,17 +2252,17 @@ async fn test_file_status(cx: &mut TestAppContext) {
assert_eq!(snapshot.repositories.iter().count(), 1);
let repo_entry = snapshot.repositories.iter().next().unwrap();
assert_eq!(
- repo_entry.work_directory,
- WorkDirectory::in_project("project")
+ repo_entry.work_directory_abs_path,
+ root_path.join("project")
);
assert_eq!(
- snapshot.status_for_file(project_path.join(B_TXT)),
- Some(FileStatus::Untracked),
+ repo_entry.status_for_path(&B_TXT.into()).unwrap().status,
+ FileStatus::Untracked,
);
assert_eq!(
- snapshot.status_for_file(project_path.join(F_TXT)),
- Some(FileStatus::Untracked),
+ repo_entry.status_for_path(&F_TXT.into()).unwrap().status,
+ FileStatus::Untracked,
);
});
@@ -2465,9 +2274,11 @@ async fn test_file_status(cx: &mut TestAppContext) {
// The worktree detects that the file's git status has changed.
tree.read_with(cx, |tree, _cx| {
let snapshot = tree.snapshot();
+ assert_eq!(snapshot.repositories.iter().count(), 1);
+ let repo_entry = snapshot.repositories.iter().next().unwrap();
assert_eq!(
- snapshot.status_for_file(project_path.join(A_TXT)),
- Some(StatusCode::Modified.worktree()),
+ repo_entry.status_for_path(&A_TXT.into()).unwrap().status,
+ StatusCode::Modified.worktree(),
);
});
@@ -2481,12 +2292,14 @@ async fn test_file_status(cx: &mut TestAppContext) {
// The worktree detects that the files' git status have changed.
tree.read_with(cx, |tree, _cx| {
let snapshot = tree.snapshot();
+ assert_eq!(snapshot.repositories.iter().count(), 1);
+ let repo_entry = snapshot.repositories.iter().next().unwrap();
assert_eq!(
- snapshot.status_for_file(project_path.join(F_TXT)),
- Some(FileStatus::Untracked),
+ repo_entry.status_for_path(&F_TXT.into()).unwrap().status,
+ FileStatus::Untracked,
);
- assert_eq!(snapshot.status_for_file(project_path.join(B_TXT)), None);
- assert_eq!(snapshot.status_for_file(project_path.join(A_TXT)), None);
+ assert_eq!(repo_entry.status_for_path(&B_TXT.into()), None);
+ assert_eq!(repo_entry.status_for_path(&A_TXT.into()), None);
});
// Modify files in the working copy and perform git operations on other files.
@@ -2501,15 +2314,17 @@ async fn test_file_status(cx: &mut TestAppContext) {
// Check that more complex repo changes are tracked
tree.read_with(cx, |tree, _cx| {
let snapshot = tree.snapshot();
+ assert_eq!(snapshot.repositories.iter().count(), 1);
+ let repo_entry = snapshot.repositories.iter().next().unwrap();
- assert_eq!(snapshot.status_for_file(project_path.join(A_TXT)), None);
+ assert_eq!(repo_entry.status_for_path(&A_TXT.into()), None);
assert_eq!(
- snapshot.status_for_file(project_path.join(B_TXT)),
- Some(FileStatus::Untracked),
+ repo_entry.status_for_path(&B_TXT.into()).unwrap().status,
+ FileStatus::Untracked,
);
assert_eq!(
- snapshot.status_for_file(project_path.join(E_TXT)),
- Some(StatusCode::Modified.worktree()),
+ repo_entry.status_for_path(&E_TXT.into()).unwrap().status,
+ StatusCode::Modified.worktree(),
);
});
@@ -2542,9 +2357,14 @@ async fn test_file_status(cx: &mut TestAppContext) {
tree.read_with(cx, |tree, _cx| {
let snapshot = tree.snapshot();
+ assert_eq!(snapshot.repositories.iter().count(), 1);
+ let repo_entry = snapshot.repositories.iter().next().unwrap();
assert_eq!(
- snapshot.status_for_file(project_path.join(renamed_dir_name).join(RENAMED_FILE)),
- Some(FileStatus::Untracked),
+ repo_entry
+ .status_for_path(&Path::new(renamed_dir_name).join(RENAMED_FILE).into())
+ .unwrap()
+ .status,
+ FileStatus::Untracked,
);
});
@@ -2561,14 +2381,15 @@ async fn test_file_status(cx: &mut TestAppContext) {
tree.read_with(cx, |tree, _cx| {
let snapshot = tree.snapshot();
+ assert_eq!(snapshot.repositories.iter().count(), 1);
+ let repo_entry = snapshot.repositories.iter().next().unwrap();
assert_eq!(
- snapshot.status_for_file(
- project_path
- .join(Path::new(renamed_dir_name))
- .join(RENAMED_FILE)
- ),
- Some(FileStatus::Untracked),
+ repo_entry
+ .status_for_path(&Path::new(renamed_dir_name).join(RENAMED_FILE).into())
+ .unwrap()
+ .status,
+ FileStatus::Untracked,
);
});
}
@@ -2619,17 +2440,26 @@ async fn test_git_repository_status(cx: &mut TestAppContext) {
let repo = snapshot.repositories.iter().next().unwrap();
let entries = repo.status().collect::<Vec<_>>();
- assert_eq!(entries.len(), 3);
- assert_eq!(entries[0].repo_path.as_ref(), Path::new("a.txt"));
- assert_eq!(entries[0].status, StatusCode::Modified.worktree());
- assert_eq!(entries[1].repo_path.as_ref(), Path::new("b.txt"));
- assert_eq!(entries[1].status, FileStatus::Untracked);
- assert_eq!(entries[2].repo_path.as_ref(), Path::new("d.txt"));
- assert_eq!(entries[2].status, StatusCode::Deleted.worktree());
+ assert_eq!(
+ entries,
+ [
+ StatusEntry {
+ repo_path: "a.txt".into(),
+ status: StatusCode::Modified.worktree(),
+ },
+ StatusEntry {
+ repo_path: "b.txt".into(),
+ status: FileStatus::Untracked,
+ },
+ StatusEntry {
+ repo_path: "d.txt".into(),
+ status: StatusCode::Deleted.worktree(),
+ },
+ ]
+ );
});
std::fs::write(work_dir.join("c.txt"), "some changes").unwrap();
- eprintln!("File c.txt has been modified");
tree.flush_fs_events(cx).await;
cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
@@ -2641,16 +2471,27 @@ async fn test_git_repository_status(cx: &mut TestAppContext) {
let repository = snapshot.repositories.iter().next().unwrap();
let entries = repository.status().collect::<Vec<_>>();
- std::assert_eq!(entries.len(), 4, "entries: {entries:?}");
- assert_eq!(entries[0].repo_path.as_ref(), Path::new("a.txt"));
- assert_eq!(entries[0].status, StatusCode::Modified.worktree());
- assert_eq!(entries[1].repo_path.as_ref(), Path::new("b.txt"));
- assert_eq!(entries[1].status, FileStatus::Untracked);
- // Status updated
- assert_eq!(entries[2].repo_path.as_ref(), Path::new("c.txt"));
- assert_eq!(entries[2].status, StatusCode::Modified.worktree());
- assert_eq!(entries[3].repo_path.as_ref(), Path::new("d.txt"));
- assert_eq!(entries[3].status, StatusCode::Deleted.worktree());
+ assert_eq!(
+ entries,
+ [
+ StatusEntry {
+ repo_path: "a.txt".into(),
+ status: StatusCode::Modified.worktree(),
+ },
+ StatusEntry {
+ repo_path: "b.txt".into(),
+ status: FileStatus::Untracked,
+ },
+ StatusEntry {
+ repo_path: "c.txt".into(),
+ status: StatusCode::Modified.worktree(),
+ },
+ StatusEntry {
+ repo_path: "d.txt".into(),
+ status: StatusCode::Deleted.worktree(),
+ },
+ ]
+ );
});
git_add("a.txt", &repo);
@@ -2677,13 +2518,12 @@ async fn test_git_repository_status(cx: &mut TestAppContext) {
// Deleting an untracked entry, b.txt, should leave no status
// a.txt was tracked, and so should have a status
assert_eq!(
- entries.len(),
- 1,
- "Entries length was incorrect\n{:#?}",
- &entries
+ entries,
+ [StatusEntry {
+ repo_path: "a.txt".into(),
+ status: StatusCode::Deleted.worktree(),
+ }]
);
- assert_eq!(entries[0].repo_path.as_ref(), Path::new("a.txt"));
- assert_eq!(entries[0].status, StatusCode::Deleted.worktree());
});
}
@@ -2729,17 +2569,18 @@ async fn test_git_status_postprocessing(cx: &mut TestAppContext) {
let entries = repo.status().collect::<Vec<_>>();
// `sub` doesn't appear in our computed statuses.
- assert_eq!(entries.len(), 1);
- assert_eq!(entries[0].repo_path.as_ref(), Path::new("a.txt"));
// a.txt appears with a combined `DA` status.
assert_eq!(
- entries[0].status,
- TrackedStatus {
- index_status: StatusCode::Deleted,
- worktree_status: StatusCode::Added
- }
- .into()
- );
+ entries,
+ [StatusEntry {
+ repo_path: "a.txt".into(),
+ status: TrackedStatus {
+ index_status: StatusCode::Deleted,
+ worktree_status: StatusCode::Added
+ }
+ .into(),
+ }]
+ )
});
}
@@ -2797,19 +2638,14 @@ async fn test_repository_subfolder_git_status(cx: &mut TestAppContext) {
assert_eq!(snapshot.repositories.iter().count(), 1);
let repo = snapshot.repositories.iter().next().unwrap();
assert_eq!(
- repo.work_directory.canonicalize(),
- WorkDirectory::AboveProject {
- absolute_path: Arc::from(root.path().join("my-repo").canonicalize().unwrap()),
- location_in_repo: Arc::from(Path::new(util::separator!(
- "sub-folder-1/sub-folder-2"
- )))
- }
+ repo.work_directory_abs_path.canonicalize().unwrap(),
+ root.path().join("my-repo").canonicalize().unwrap()
);
- assert_eq!(snapshot.status_for_file("c.txt"), None);
+ assert_eq!(repo.status_for_path(&C_TXT.into()), None);
assert_eq!(
- snapshot.status_for_file("d/e.txt"),
- Some(FileStatus::Untracked)
+ repo.status_for_path(&E_TXT.into()).unwrap().status,
+ FileStatus::Untracked
);
});
@@ -2823,11 +2659,14 @@ async fn test_repository_subfolder_git_status(cx: &mut TestAppContext) {
tree.read_with(cx, |tree, _cx| {
let snapshot = tree.snapshot();
+ let repos = snapshot.repositories().iter().cloned().collect::<Vec<_>>();
+ assert_eq!(repos.len(), 1);
+ let repo_entry = repos.into_iter().next().unwrap();
assert!(snapshot.repositories.iter().next().is_some());
- assert_eq!(snapshot.status_for_file("c.txt"), None);
- assert_eq!(snapshot.status_for_file("d/e.txt"), None);
+ assert_eq!(repo_entry.status_for_path(&C_TXT.into()), None);
+ assert_eq!(repo_entry.status_for_path(&E_TXT.into()), None);
});
}
@@ -3140,7 +2979,12 @@ fn assert_entry_git_state(
is_ignored: bool,
) {
let entry = tree.entry_for_path(path).expect("entry {path} not found");
- let status = tree.status_for_file(Path::new(path));
+ let repos = tree.repositories().iter().cloned().collect::<Vec<_>>();
+ assert_eq!(repos.len(), 1);
+ let repo_entry = repos.into_iter().next().unwrap();
+ let status = repo_entry
+ .status_for_path(&path.into())
+ .map(|entry| entry.status);
let expected = index_status.map(|index_status| {
TrackedStatus {
index_status,