Detailed changes
@@ -2784,7 +2784,8 @@ dependencies = [
name = "collections"
version = "0.1.0"
dependencies = [
- "rustc-hash 1.1.0",
+ "indexmap",
+ "rustc-hash 2.1.0",
]
[[package]]
@@ -5193,6 +5194,7 @@ dependencies = [
"util",
"windows 0.58.0",
"workspace",
+ "worktree",
]
[[package]]
@@ -389,7 +389,7 @@ hyper = "0.14"
http = "1.1"
ignore = "0.4.22"
image = "0.25.1"
-indexmap = { version = "2", features = ["serde"] }
+indexmap = { version = "2.7.0", features = ["serde"] }
indoc = "2"
itertools = "0.13.0"
jsonwebtoken = "9.3"
@@ -440,9 +440,10 @@ runtimelib = { version = "0.24.0", default-features = false, features = [
] }
rustc-demangle = "0.1.23"
rust-embed = { version = "8.4", features = ["include-exclude"] }
+rustc-hash = "2.1.0"
rustls = "0.21.12"
rustls-native-certs = "0.8.0"
-schemars = { version = "0.8", features = ["impl_json_schema"] }
+schemars = { version = "0.8", features = ["impl_json_schema", "indexmap2"] }
semver = "1.0"
serde = { version = "1.0", features = ["derive", "rc"] }
serde_derive = { version = "1.0", features = ["deserialize_in_place"] }
@@ -122,7 +122,7 @@ pub fn init(cx: &mut AppContext) {
cx.observe_new_views(
|terminal_panel: &mut TerminalPanel, cx: &mut ViewContext<TerminalPanel>| {
let settings = AssistantSettings::get_global(cx);
- terminal_panel.asssistant_enabled(settings.enabled, cx);
+ terminal_panel.set_assistant_enabled(settings.enabled, cx);
},
)
.detach();
@@ -133,7 +133,7 @@ impl InlineAssistant {
};
let enabled = AssistantSettings::get_global(cx).enabled;
terminal_panel.update(cx, |terminal_panel, cx| {
- terminal_panel.asssistant_enabled(enabled, cx)
+ terminal_panel.set_assistant_enabled(enabled, cx)
});
})
.detach();
@@ -118,7 +118,7 @@ impl InlineAssistant {
};
let enabled = AssistantSettings::get_global(cx).enabled;
terminal_panel.update(cx, |terminal_panel, cx| {
- terminal_panel.asssistant_enabled(enabled, cx)
+ terminal_panel.set_assistant_enabled(enabled, cx)
});
})
.detach();
@@ -79,7 +79,7 @@ fn parse_path_with_position(argument_str: &str) -> anyhow::Result<String> {
Ok(existing_path) => PathWithPosition::from_path(existing_path),
Err(_) => {
let path = PathWithPosition::parse_str(argument_str);
- let curdir = env::current_dir().context("reteiving current directory")?;
+ let curdir = env::current_dir().context("retrieving current directory")?;
path.map_path(|path| match fs::canonicalize(&path) {
Ok(path) => Ok(path),
Err(e) => {
@@ -106,6 +106,22 @@ CREATE TABLE "worktree_repositories" (
CREATE INDEX "index_worktree_repositories_on_project_id" ON "worktree_repositories" ("project_id");
CREATE INDEX "index_worktree_repositories_on_project_id_and_worktree_id" ON "worktree_repositories" ("project_id", "worktree_id");
+CREATE TABLE "worktree_repository_statuses" (
+ "project_id" INTEGER NOT NULL,
+ "worktree_id" INT8 NOT NULL,
+ "work_directory_id" INT8 NOT NULL,
+ "repo_path" VARCHAR NOT NULL,
+ "status" INT8 NOT NULL,
+ "scan_id" INT8 NOT NULL,
+ "is_deleted" BOOL NOT NULL,
+ PRIMARY KEY(project_id, worktree_id, work_directory_id, repo_path),
+ FOREIGN KEY(project_id, worktree_id) REFERENCES worktrees (project_id, id) ON DELETE CASCADE,
+ FOREIGN KEY(project_id, worktree_id, work_directory_id) REFERENCES worktree_entries (project_id, worktree_id, id) ON DELETE CASCADE
+);
+CREATE INDEX "index_wt_repos_statuses_on_project_id" ON "worktree_repository_statuses" ("project_id");
+CREATE INDEX "index_wt_repos_statuses_on_project_id_and_wt_id" ON "worktree_repository_statuses" ("project_id", "worktree_id");
+CREATE INDEX "index_wt_repos_statuses_on_project_id_and_wt_id_and_wd_id" ON "worktree_repository_statuses" ("project_id", "worktree_id", "work_directory_id");
+
CREATE TABLE "worktree_settings_files" (
"project_id" INTEGER NOT NULL,
"worktree_id" INTEGER NOT NULL,
@@ -1,4 +1,5 @@
use anyhow::Context as _;
+
use util::ResultExt;
use super::*;
@@ -274,8 +275,8 @@ impl Database {
mtime_nanos: ActiveValue::set(mtime.nanos as i32),
canonical_path: ActiveValue::set(entry.canonical_path.clone()),
is_ignored: ActiveValue::set(entry.is_ignored),
+ git_status: ActiveValue::set(None),
is_external: ActiveValue::set(entry.is_external),
- git_status: ActiveValue::set(entry.git_status.map(|status| status as i64)),
is_deleted: ActiveValue::set(false),
scan_id: ActiveValue::set(update.scan_id as i64),
is_fifo: ActiveValue::set(entry.is_fifo),
@@ -295,7 +296,6 @@ impl Database {
worktree_entry::Column::MtimeNanos,
worktree_entry::Column::CanonicalPath,
worktree_entry::Column::IsIgnored,
- worktree_entry::Column::GitStatus,
worktree_entry::Column::ScanId,
])
.to_owned(),
@@ -349,6 +349,79 @@ impl Database {
)
.exec(&*tx)
.await?;
+
+ let has_any_statuses = update
+ .updated_repositories
+ .iter()
+ .any(|repository| !repository.updated_statuses.is_empty());
+
+ if has_any_statuses {
+ worktree_repository_statuses::Entity::insert_many(
+ update.updated_repositories.iter().flat_map(
+ |repository: &proto::RepositoryEntry| {
+ repository.updated_statuses.iter().map(|status_entry| {
+ worktree_repository_statuses::ActiveModel {
+ project_id: ActiveValue::set(project_id),
+ worktree_id: ActiveValue::set(worktree_id),
+ work_directory_id: ActiveValue::set(
+ repository.work_directory_id as i64,
+ ),
+ scan_id: ActiveValue::set(update.scan_id as i64),
+ is_deleted: ActiveValue::set(false),
+ repo_path: ActiveValue::set(status_entry.repo_path.clone()),
+ status: ActiveValue::set(status_entry.status as i64),
+ }
+ })
+ },
+ ),
+ )
+ .on_conflict(
+ OnConflict::columns([
+ worktree_repository_statuses::Column::ProjectId,
+ worktree_repository_statuses::Column::WorktreeId,
+ worktree_repository_statuses::Column::WorkDirectoryId,
+ worktree_repository_statuses::Column::RepoPath,
+ ])
+ .update_columns([
+ worktree_repository_statuses::Column::ScanId,
+ worktree_repository_statuses::Column::Status,
+ ])
+ .to_owned(),
+ )
+ .exec(&*tx)
+ .await?;
+ }
+
+ let has_any_removed_statuses = update
+ .updated_repositories
+ .iter()
+ .any(|repository| !repository.removed_statuses.is_empty());
+
+ if has_any_removed_statuses {
+ worktree_repository_statuses::Entity::update_many()
+ .filter(
+ worktree_repository_statuses::Column::ProjectId
+ .eq(project_id)
+ .and(
+ worktree_repository_statuses::Column::WorktreeId
+ .eq(worktree_id),
+ )
+ .and(
+ worktree_repository_statuses::Column::RepoPath.is_in(
+ update.updated_repositories.iter().flat_map(|repository| {
+ repository.removed_statuses.iter()
+ }),
+ ),
+ ),
+ )
+ .set(worktree_repository_statuses::ActiveModel {
+ is_deleted: ActiveValue::Set(true),
+ scan_id: ActiveValue::Set(update.scan_id as i64),
+ ..Default::default()
+ })
+ .exec(&*tx)
+ .await?;
+ }
}
if !update.removed_repositories.is_empty() {
@@ -643,7 +716,6 @@ impl Database {
canonical_path: db_entry.canonical_path,
is_ignored: db_entry.is_ignored,
is_external: db_entry.is_external,
- git_status: db_entry.git_status.map(|status| status as i32),
// This is only used in the summarization backlog, so if it's None,
// that just means we won't be able to detect when to resummarize
// based on total number of backlogged bytes - instead, we'd go
@@ -657,23 +729,49 @@ impl Database {
// Populate repository entries.
{
- let mut db_repository_entries = worktree_repository::Entity::find()
+ let db_repository_entries = worktree_repository::Entity::find()
.filter(
Condition::all()
.add(worktree_repository::Column::ProjectId.eq(project.id))
.add(worktree_repository::Column::IsDeleted.eq(false)),
)
- .stream(tx)
+ .all(tx)
.await?;
- while let Some(db_repository_entry) = db_repository_entries.next().await {
- let db_repository_entry = db_repository_entry?;
+ for db_repository_entry in db_repository_entries {
if let Some(worktree) = worktrees.get_mut(&(db_repository_entry.worktree_id as u64))
{
+ let mut repository_statuses = worktree_repository_statuses::Entity::find()
+ .filter(
+ Condition::all()
+ .add(worktree_repository_statuses::Column::ProjectId.eq(project.id))
+ .add(
+ worktree_repository_statuses::Column::WorktreeId
+ .eq(worktree.id),
+ )
+ .add(
+ worktree_repository_statuses::Column::WorkDirectoryId
+ .eq(db_repository_entry.work_directory_id),
+ )
+ .add(worktree_repository_statuses::Column::IsDeleted.eq(false)),
+ )
+ .stream(tx)
+ .await?;
+ let mut updated_statuses = Vec::new();
+ while let Some(status_entry) = repository_statuses.next().await {
+ let status_entry: worktree_repository_statuses::Model = status_entry?;
+ updated_statuses.push(proto::StatusEntry {
+ repo_path: status_entry.repo_path,
+ status: status_entry.status as i32,
+ });
+ }
+
worktree.repository_entries.insert(
db_repository_entry.work_directory_id as u64,
proto::RepositoryEntry {
work_directory_id: db_repository_entry.work_directory_id as u64,
branch: db_repository_entry.branch,
+ updated_statuses,
+ removed_statuses: Vec::new(),
},
);
}
@@ -662,7 +662,6 @@ impl Database {
canonical_path: db_entry.canonical_path,
is_ignored: db_entry.is_ignored,
is_external: db_entry.is_external,
- git_status: db_entry.git_status.map(|status| status as i32),
// This is only used in the summarization backlog, so if it's None,
// that just means we won't be able to detect when to resummarize
// based on total number of backlogged bytes - instead, we'd go
@@ -682,26 +681,69 @@ impl Database {
worktree_repository::Column::IsDeleted.eq(false)
};
- let mut db_repositories = worktree_repository::Entity::find()
+ let db_repositories = worktree_repository::Entity::find()
.filter(
Condition::all()
.add(worktree_repository::Column::ProjectId.eq(project.id))
.add(worktree_repository::Column::WorktreeId.eq(worktree.id))
.add(repository_entry_filter),
)
- .stream(tx)
+ .all(tx)
.await?;
- while let Some(db_repository) = db_repositories.next().await {
- let db_repository = db_repository?;
+ for db_repository in db_repositories.into_iter() {
if db_repository.is_deleted {
worktree
.removed_repositories
.push(db_repository.work_directory_id as u64);
} else {
+ let status_entry_filter = if let Some(rejoined_worktree) = rejoined_worktree
+ {
+ worktree_repository_statuses::Column::ScanId
+ .gt(rejoined_worktree.scan_id)
+ } else {
+ worktree_repository_statuses::Column::IsDeleted.eq(false)
+ };
+
+ let mut db_statuses = worktree_repository_statuses::Entity::find()
+ .filter(
+ Condition::all()
+ .add(
+ worktree_repository_statuses::Column::ProjectId
+ .eq(project.id),
+ )
+ .add(
+ worktree_repository_statuses::Column::WorktreeId
+ .eq(worktree.id),
+ )
+ .add(
+ worktree_repository_statuses::Column::WorkDirectoryId
+ .eq(db_repository.work_directory_id),
+ )
+ .add(status_entry_filter),
+ )
+ .stream(tx)
+ .await?;
+ let mut removed_statuses = Vec::new();
+ let mut updated_statuses = Vec::new();
+
+ while let Some(db_status) = db_statuses.next().await {
+ let db_status: worktree_repository_statuses::Model = db_status?;
+ if db_status.is_deleted {
+ removed_statuses.push(db_status.repo_path);
+ } else {
+ updated_statuses.push(proto::StatusEntry {
+ repo_path: db_status.repo_path,
+ status: db_status.status as i32,
+ });
+ }
+ }
+
worktree.updated_repositories.push(proto::RepositoryEntry {
work_directory_id: db_repository.work_directory_id as u64,
branch: db_repository.branch,
+ updated_statuses,
+ removed_statuses,
});
}
}
@@ -2925,8 +2925,6 @@ async fn test_git_status_sync(
assert_eq!(snapshot.status_for_file(file), status);
}
- // Smoke test status reading
-
project_local.read_with(cx_a, |project, cx| {
assert_status(&Path::new(A_TXT), Some(GitFileStatus::Added), project, cx);
assert_status(&Path::new(B_TXT), Some(GitFileStatus::Added), project, cx);
@@ -6669,6 +6667,10 @@ async fn test_remote_git_branches(
client_a
.fs()
.insert_branches(Path::new("/project/.git"), &branches);
+ let branches_set = branches
+ .into_iter()
+ .map(ToString::to_string)
+ .collect::<HashSet<_>>();
let (project_a, worktree_id) = client_a.build_local_project("/project", cx_a).await;
let project_id = active_call_a
@@ -6690,10 +6692,10 @@ async fn test_remote_git_branches(
let branches_b = branches_b
.into_iter()
- .map(|branch| branch.name)
- .collect::<Vec<_>>();
+ .map(|branch| branch.name.to_string())
+ .collect::<HashSet<_>>();
- assert_eq!(&branches_b, &branches);
+ assert_eq!(branches_b, branches_set);
cx_b.update(|cx| {
project_b.update(cx, |project, cx| {
@@ -229,6 +229,10 @@ async fn test_ssh_collaboration_git_branches(
.await;
let branches = ["main", "dev", "feature-1"];
+ let branches_set = branches
+ .iter()
+ .map(ToString::to_string)
+ .collect::<HashSet<_>>();
remote_fs.insert_branches(Path::new("/project/.git"), &branches);
// User A connects to the remote project via SSH.
@@ -281,10 +285,10 @@ async fn test_ssh_collaboration_git_branches(
let branches_b = branches_b
.into_iter()
- .map(|branch| branch.name)
- .collect::<Vec<_>>();
+ .map(|branch| branch.name.to_string())
+ .collect::<HashSet<_>>();
- assert_eq!(&branches_b, &branches);
+ assert_eq!(&branches_b, &branches_set);
cx_b.update(|cx| {
project_b.update(cx, |project, cx| {
@@ -16,4 +16,5 @@ doctest = false
test-support = []
[dependencies]
-rustc-hash = "1.1"
+indexmap.workspace = true
+rustc-hash.workspace = true
@@ -4,12 +4,24 @@ pub type HashMap<K, V> = FxHashMap<K, V>;
#[cfg(feature = "test-support")]
pub type HashSet<T> = FxHashSet<T>;
+#[cfg(feature = "test-support")]
+pub type IndexMap<K, V> = indexmap::IndexMap<K, V, rustc_hash::FxBuildHasher>;
+
+#[cfg(feature = "test-support")]
+pub type IndexSet<T> = indexmap::IndexSet<T, rustc_hash::FxBuildHasher>;
+
#[cfg(not(feature = "test-support"))]
pub type HashMap<K, V> = std::collections::HashMap<K, V>;
#[cfg(not(feature = "test-support"))]
pub type HashSet<T> = std::collections::HashSet<T>;
+#[cfg(not(feature = "test-support"))]
+pub type IndexMap<K, V> = indexmap::IndexMap<K, V>;
+
+#[cfg(not(feature = "test-support"))]
+pub type IndexSet<T> = indexmap::IndexSet<T>;
+
pub use rustc_hash::FxHasher;
pub use rustc_hash::{FxHashMap, FxHashSet};
pub use std::collections::*;
@@ -2748,7 +2748,7 @@ mod tests {
.iter()
.filter(|(_, block)| matches!(block, Block::FoldedBuffer { .. }))
.count(),
- "Should have one folded block, prodicing a header of the second buffer"
+ "Should have one folded block, producing a header of the second buffer"
);
assert_eq!(
blocks_snapshot.text(),
@@ -2994,7 +2994,7 @@ mod tests {
}
})
.count(),
- "Should have one folded block, prodicing a header of the second buffer"
+ "Should have one folded block, producing a header of the second buffer"
);
assert_eq!(blocks_snapshot.text(), "\n");
assert_eq!(
@@ -11780,7 +11780,7 @@ impl Editor {
}
/// Merges all anchor ranges for all context types ever set, picking the last highlight added in case of a row conflict.
- /// Rerturns a map of display rows that are highlighted and their corresponding highlight color.
+ /// Returns a map of display rows that are highlighted and their corresponding highlight color.
/// Allows to ignore certain kinds of highlights.
pub fn highlighted_display_rows(
&mut self,
@@ -12573,7 +12573,7 @@ impl Editor {
.file()
.is_none()
.then(|| {
- // Handle file-less buffers separately: those are not really the project items, so won't have a paroject path or entity id,
+ // Handle file-less buffers separately: those are not really the project items, so won't have a project path or entity id,
// so `workspace.open_project_item` will never find them, always opening a new editor.
// Instead, we try to activate the existing editor in the pane first.
let (editor, pane_item_index) =
@@ -194,14 +194,24 @@ impl ProjectDiffEditor {
let open_tasks = project
.update(&mut cx, |project, cx| {
let worktree = project.worktree_for_id(id, cx)?;
- let applicable_entries = worktree
- .read(cx)
- .entries(false, 0)
- .filter(|entry| !entry.is_external)
- .filter(|entry| entry.is_file())
- .filter_map(|entry| Some((entry.git_status?, entry)))
- .filter_map(|(git_status, entry)| {
- Some((git_status, entry.id, project.path_for_entry(entry.id, cx)?))
+ let snapshot = worktree.read(cx).snapshot();
+ let applicable_entries = snapshot
+ .repositories()
+ .flat_map(|entry| {
+ entry.status().map(|git_entry| {
+ (git_entry.status, entry.join(git_entry.repo_path))
+ })
+ })
+ .filter_map(|(status, path)| {
+ let id = snapshot.entry_for_path(&path)?.id;
+ Some((
+ status,
+ id,
+ ProjectPath {
+ worktree_id: snapshot.id(),
+ path: path.into(),
+ },
+ ))
})
.collect::<Vec<_>>();
Some(
@@ -615,9 +615,20 @@ impl Item for Editor {
.read(cx)
.as_singleton()
.and_then(|buffer| buffer.read(cx).project_path(cx))
- .and_then(|path| self.project.as_ref()?.read(cx).entry_for_path(&path, cx))
- .map(|entry| {
- entry_git_aware_label_color(entry.git_status, entry.is_ignored, params.selected)
+ .and_then(|path| {
+ let project = self.project.as_ref()?.read(cx);
+ let entry = project.entry_for_path(&path, cx)?;
+ let git_status = project
+ .worktree_for_id(path.worktree_id, cx)?
+ .read(cx)
+ .snapshot()
+ .status_for_file(path.path);
+
+ Some(entry_git_aware_label_color(
+ git_status,
+ entry.is_ignored,
+ params.selected,
+ ))
})
.unwrap_or_else(|| entry_label_color(params.selected))
} else {
@@ -1559,10 +1570,10 @@ pub fn entry_git_aware_label_color(
Color::Ignored
} else {
match git_status {
- Some(GitFileStatus::Added) => Color::Created,
+ Some(GitFileStatus::Added) | Some(GitFileStatus::Untracked) => Color::Created,
Some(GitFileStatus::Modified) => Color::Modified,
Some(GitFileStatus::Conflict) => Color::Conflict,
- None => entry_label_color(selected),
+ Some(GitFileStatus::Deleted) | None => entry_label_color(selected),
}
}
}
@@ -257,7 +257,8 @@ impl EditorLspTestContext {
Self::new(language, Default::default(), cx).await
}
- // Constructs lsp range using a marked string with '[', ']' range delimiters
+ /// Constructs lsp range using a marked string with '[', ']' range delimiters
+ #[track_caller]
pub fn lsp_range(&mut self, marked_text: &str) -> lsp::Range {
let ranges = self.ranges(marked_text);
self.to_lsp_range(ranges[0].clone())
@@ -230,6 +230,7 @@ impl EditorTestContext {
self.cx.background_executor.run_until_parked();
}
+ #[track_caller]
pub fn ranges(&mut self, marked_text: &str) -> Vec<Range<usize>> {
let (unmarked_text, ranges) = marked_text_ranges(marked_text, false);
assert_eq!(self.buffer_text(), unmarked_text);
@@ -16,6 +16,7 @@ use std::sync::LazyLock;
pub use crate::hosting_provider::*;
pub use crate::remote::*;
pub use git2 as libgit;
+pub use repository::WORK_DIRECTORY_REPO_PATH;
pub static DOT_GIT: LazyLock<&'static OsStr> = LazyLock::new(|| OsStr::new(".git"));
pub static COOKIES: LazyLock<&'static OsStr> = LazyLock::new(|| OsStr::new("cookies"));
@@ -7,6 +7,8 @@ use gpui::SharedString;
use parking_lot::Mutex;
use rope::Rope;
use serde::{Deserialize, Serialize};
+use std::borrow::Borrow;
+use std::sync::LazyLock;
use std::{
cmp::Ordering,
path::{Component, Path, PathBuf},
@@ -37,7 +39,8 @@ pub trait GitRepository: Send + Sync {
/// Returns the SHA of the current HEAD.
fn head_sha(&self) -> Option<String>;
- fn status(&self, path_prefixes: &[PathBuf]) -> Result<GitStatus>;
+ /// Returns the list of git statuses, sorted by path
+ fn status(&self, path_prefixes: &[RepoPath]) -> Result<GitStatus>;
fn branches(&self) -> Result<Vec<Branch>>;
fn change_branch(&self, _: &str) -> Result<()>;
@@ -132,7 +135,7 @@ impl GitRepository for RealGitRepository {
Some(self.repository.lock().head().ok()?.target()?.to_string())
}
- fn status(&self, path_prefixes: &[PathBuf]) -> Result<GitStatus> {
+ fn status(&self, path_prefixes: &[RepoPath]) -> Result<GitStatus> {
let working_directory = self
.repository
.lock()
@@ -289,8 +292,9 @@ impl GitRepository for FakeGitRepository {
state.dot_git_dir.clone()
}
- fn status(&self, path_prefixes: &[PathBuf]) -> Result<GitStatus> {
+ fn status(&self, path_prefixes: &[RepoPath]) -> Result<GitStatus> {
let state = self.state.lock();
+
let mut entries = state
.worktree_statuses
.iter()
@@ -306,6 +310,7 @@ impl GitRepository for FakeGitRepository {
})
.collect::<Vec<_>>();
entries.sort_unstable_by(|a, b| a.0.cmp(&b.0));
+
Ok(GitStatus {
entries: entries.into(),
})
@@ -394,6 +399,8 @@ pub enum GitFileStatus {
Added,
Modified,
Conflict,
+ Deleted,
+ Untracked,
}
impl GitFileStatus {
@@ -421,20 +428,34 @@ impl GitFileStatus {
}
}
+pub static WORK_DIRECTORY_REPO_PATH: LazyLock<RepoPath> =
+ LazyLock::new(|| RepoPath(Path::new("").into()));
+
#[derive(Clone, Debug, Ord, Hash, PartialOrd, Eq, PartialEq)]
-pub struct RepoPath(pub PathBuf);
+pub struct RepoPath(pub Arc<Path>);
impl RepoPath {
pub fn new(path: PathBuf) -> Self {
debug_assert!(path.is_relative(), "Repo paths must be relative");
- RepoPath(path)
+ RepoPath(path.into())
+ }
+
+ pub fn from_str(path: &str) -> Self {
+ let path = Path::new(path);
+ debug_assert!(path.is_relative(), "Repo paths must be relative");
+
+ RepoPath(path.into())
+ }
+
+ pub fn to_proto(&self) -> String {
+ self.0.to_string_lossy().to_string()
}
}
impl From<&Path> for RepoPath {
fn from(value: &Path) -> Self {
- RepoPath::new(value.to_path_buf())
+ RepoPath::new(value.into())
}
}
@@ -444,9 +465,15 @@ impl From<PathBuf> for RepoPath {
}
}
+impl From<&str> for RepoPath {
+ fn from(value: &str) -> Self {
+ Self::from_str(value)
+ }
+}
+
impl Default for RepoPath {
fn default() -> Self {
- RepoPath(PathBuf::new())
+ RepoPath(Path::new("").into())
}
}
@@ -457,13 +484,19 @@ impl AsRef<Path> for RepoPath {
}
impl std::ops::Deref for RepoPath {
- type Target = PathBuf;
+ type Target = Path;
fn deref(&self) -> &Self::Target {
&self.0
}
}
+impl Borrow<Path> for RepoPath {
+ fn borrow(&self) -> &Path {
+ self.0.as_ref()
+ }
+}
+
#[derive(Debug)]
pub struct RepoPathDescendants<'a>(pub &'a Path);
@@ -1,10 +1,6 @@
use crate::repository::{GitFileStatus, RepoPath};
use anyhow::{anyhow, Result};
-use std::{
- path::{Path, PathBuf},
- process::Stdio,
- sync::Arc,
-};
+use std::{path::Path, process::Stdio, sync::Arc};
#[derive(Clone)]
pub struct GitStatus {
@@ -15,7 +11,7 @@ impl GitStatus {
pub(crate) fn new(
git_binary: &Path,
working_directory: &Path,
- path_prefixes: &[PathBuf],
+ path_prefixes: &[RepoPath],
) -> Result<Self> {
let child = util::command::new_std_command(git_binary)
.current_dir(working_directory)
@@ -27,7 +23,7 @@ impl GitStatus {
"-z",
])
.args(path_prefixes.iter().map(|path_prefix| {
- if *path_prefix == Path::new("") {
+ if path_prefix.0.as_ref() == Path::new("") {
Path::new(".")
} else {
path_prefix
@@ -55,10 +51,12 @@ impl GitStatus {
let (status, path) = entry.split_at(3);
let status = status.trim();
Some((
- RepoPath(PathBuf::from(path)),
+ RepoPath(Path::new(path).into()),
match status {
- "A" | "??" => GitFileStatus::Added,
+ "A" => GitFileStatus::Added,
"M" => GitFileStatus::Modified,
+ "D" => GitFileStatus::Deleted,
+ "??" => GitFileStatus::Untracked,
_ => return None,
},
))
@@ -75,7 +73,7 @@ impl GitStatus {
pub fn get(&self, path: &Path) -> Option<GitFileStatus> {
self.entries
- .binary_search_by(|(repo_path, _)| repo_path.0.as_path().cmp(path))
+ .binary_search_by(|(repo_path, _)| repo_path.0.as_ref().cmp(path))
.ok()
.map(|index| self.entries[index].1)
}
@@ -14,9 +14,11 @@ path = "src/git_ui.rs"
[dependencies]
anyhow.workspace = true
+collections.workspace = true
db.workspace = true
editor.workspace = true
futures.workspace = true
+git.workspace = true
gpui.workspace = true
language.workspace = true
menu.workspace = true
@@ -29,8 +31,7 @@ settings.workspace = true
ui.workspace = true
util.workspace = true
workspace.workspace = true
-git.workspace = true
-collections.workspace = true
+worktree.workspace = true
[target.'cfg(windows)'.dependencies]
windows.workspace = true
@@ -1,11 +1,16 @@
+use crate::{git_status_icon, settings::GitPanelSettings};
+use crate::{CommitAllChanges, CommitStagedChanges, DiscardAll, StageAll, UnstageAll};
use anyhow::{Context as _, Result};
-use collections::HashMap;
use db::kvp::KEY_VALUE_STORE;
use editor::{
scroll::{Autoscroll, AutoscrollStrategy},
Editor, MultiBuffer, DEFAULT_MULTIBUFFER_CONTEXT,
};
-use git::{diff::DiffHunk, repository::GitFileStatus};
+use git::{
+ diff::DiffHunk,
+ repository::{GitFileStatus, RepoPath},
+};
+use gpui::*;
use gpui::{
actions, prelude::*, uniform_list, Action, AppContext, AsyncWindowContext, ClickEvent,
CursorStyle, EventEmitter, FocusHandle, FocusableView, KeyContext,
@@ -14,7 +19,7 @@ use gpui::{
};
use language::{Buffer, BufferRow, OffsetRangeExt};
use menu::{SelectNext, SelectPrev};
-use project::{Entry, EntryKind, Fs, Project, ProjectEntryId, WorktreeId};
+use project::{EntryKind, Fs, Project, ProjectEntryId, ProjectPath, WorktreeId};
use serde::{Deserialize, Serialize};
use settings::Settings as _;
use std::{
@@ -22,7 +27,7 @@ use std::{
collections::HashSet,
ffi::OsStr,
ops::{Deref, Range},
- path::{Path, PathBuf},
+ path::PathBuf,
rc::Rc,
sync::Arc,
time::Duration,
@@ -37,9 +42,7 @@ use workspace::{
dock::{DockPosition, Panel, PanelEvent},
ItemHandle, Workspace,
};
-
-use crate::{git_status_icon, settings::GitPanelSettings};
-use crate::{CommitAllChanges, CommitStagedChanges, DiscardAll, StageAll, UnstageAll};
+use worktree::StatusEntry;
actions!(git_panel, [ToggleFocus]);
@@ -69,7 +72,7 @@ pub struct GitStatusEntry {}
struct EntryDetails {
filename: String,
display_name: String,
- path: Arc<Path>,
+ path: RepoPath,
kind: EntryKind,
depth: usize,
is_expanded: bool,
@@ -101,7 +104,8 @@ pub struct GitPanel {
scrollbar_state: ScrollbarState,
selected_item: Option<usize>,
show_scrollbar: bool,
- expanded_dir_ids: HashMap<WorktreeId, Vec<ProjectEntryId>>,
+ // TODO Reintroduce expanded directories, once we're deriving directories from paths
+ // expanded_dir_ids: HashMap<WorktreeId, Vec<ProjectEntryId>>,
// The entries that are currently shown in the panel, aka
// not hidden by folding or such
@@ -115,18 +119,20 @@ pub struct GitPanel {
#[derive(Debug, Clone)]
struct WorktreeEntries {
worktree_id: WorktreeId,
+ // TODO support multiple repositories per worktree
+ work_directory: worktree::WorkDirectory,
visible_entries: Vec<GitPanelEntry>,
- paths: Rc<OnceCell<HashSet<Arc<Path>>>>,
+ paths: Rc<OnceCell<HashSet<RepoPath>>>,
}
#[derive(Debug, Clone)]
struct GitPanelEntry {
- entry: Entry,
+ entry: worktree::StatusEntry,
hunks: Rc<OnceCell<Vec<DiffHunk>>>,
}
impl Deref for GitPanelEntry {
- type Target = Entry;
+ type Target = worktree::StatusEntry;
fn deref(&self) -> &Self::Target {
&self.entry
@@ -134,11 +140,11 @@ impl Deref for GitPanelEntry {
}
impl WorktreeEntries {
- fn paths(&self) -> &HashSet<Arc<Path>> {
+ fn paths(&self) -> &HashSet<RepoPath> {
self.paths.get_or_init(|| {
self.visible_entries
.iter()
- .map(|e| (e.entry.path.clone()))
+ .map(|e| (e.entry.repo_path.clone()))
.collect()
})
}
@@ -165,8 +171,11 @@ impl GitPanel {
})
.detach();
cx.subscribe(&project, |this, _, event, cx| match event {
- project::Event::WorktreeRemoved(id) => {
- this.expanded_dir_ids.remove(id);
+ project::Event::GitRepositoryUpdated => {
+ this.update_visible_entries(None, None, cx);
+ }
+ project::Event::WorktreeRemoved(_id) => {
+ // this.expanded_dir_ids.remove(id);
this.update_visible_entries(None, None, cx);
cx.notify();
}
@@ -183,7 +192,7 @@ impl GitPanel {
project::Event::Closed => {
this.git_diff_editor_updates = Task::ready(());
this.reveal_in_editor = Task::ready(());
- this.expanded_dir_ids.clear();
+ // this.expanded_dir_ids.clear();
this.visible_entries.clear();
this.git_diff_editor = None;
}
@@ -200,8 +209,7 @@ impl GitPanel {
pending_serialization: Task::ready(None),
visible_entries: Vec::new(),
current_modifiers: cx.modifiers(),
- expanded_dir_ids: Default::default(),
-
+ // expanded_dir_ids: Default::default(),
width: Some(px(360.)),
scrollbar_state: ScrollbarState::new(scroll_handle.clone()).parent_view(cx.view()),
scroll_handle,
@@ -288,16 +296,16 @@ impl GitPanel {
}
fn calculate_depth_and_difference(
- entry: &Entry,
- visible_worktree_entries: &HashSet<Arc<Path>>,
+ entry: &StatusEntry,
+ visible_worktree_entries: &HashSet<RepoPath>,
) -> (usize, usize) {
let (depth, difference) = entry
- .path
+ .repo_path
.ancestors()
.skip(1) // Skip the entry itself
.find_map(|ancestor| {
if let Some(parent_entry) = visible_worktree_entries.get(ancestor) {
- let entry_path_components_count = entry.path.components().count();
+ let entry_path_components_count = entry.repo_path.components().count();
let parent_path_components_count = parent_entry.components().count();
let difference = entry_path_components_count - parent_path_components_count;
let depth = parent_entry
@@ -432,13 +440,7 @@ impl GitPanel {
fn entry_count(&self) -> usize {
self.visible_entries
.iter()
- .map(|worktree_entries| {
- worktree_entries
- .visible_entries
- .iter()
- .filter(|entry| entry.git_status.is_some())
- .count()
- })
+ .map(|worktree_entries| worktree_entries.visible_entries.len())
.sum()
}
@@ -446,7 +448,7 @@ impl GitPanel {
&self,
range: Range<usize>,
cx: &mut ViewContext<Self>,
- mut callback: impl FnMut(ProjectEntryId, EntryDetails, &mut ViewContext<Self>),
+ mut callback: impl FnMut(usize, EntryDetails, &mut ViewContext<Self>),
) {
let mut ix = 0;
for worktree_entries in &self.visible_entries {
@@ -468,11 +470,11 @@ impl GitPanel {
{
let snapshot = worktree.read(cx).snapshot();
let root_name = OsStr::new(snapshot.root_name());
- let expanded_entry_ids = self
- .expanded_dir_ids
- .get(&snapshot.id())
- .map(Vec::as_slice)
- .unwrap_or(&[]);
+ // let expanded_entry_ids = self
+ // .expanded_dir_ids
+ // .get(&snapshot.id())
+ // .map(Vec::as_slice)
+ // .unwrap_or(&[]);
let entry_range = range.start.saturating_sub(ix)..end_ix - ix;
let entries = worktree_entries.paths();
@@ -483,22 +485,22 @@ impl GitPanel {
.enumerate()
{
let index = index_start + i;
- let status = entry.git_status;
- let is_expanded = expanded_entry_ids.binary_search(&entry.id).is_ok();
+ let status = entry.status;
+ let is_expanded = true; //expanded_entry_ids.binary_search(&entry.id).is_ok();
let (depth, difference) = Self::calculate_depth_and_difference(entry, entries);
let filename = match difference {
diff if diff > 1 => entry
- .path
+ .repo_path
.iter()
- .skip(entry.path.components().count() - diff)
+ .skip(entry.repo_path.components().count() - diff)
.collect::<PathBuf>()
.to_str()
.unwrap_or_default()
.to_string(),
_ => entry
- .path
+ .repo_path
.file_name()
.map(|name| name.to_string_lossy().into_owned())
.unwrap_or_else(|| root_name.to_string_lossy().to_string()),
@@ -506,16 +508,17 @@ impl GitPanel {
let details = EntryDetails {
filename,
- display_name: entry.path.to_string_lossy().into_owned(),
- kind: entry.kind,
+ display_name: entry.repo_path.to_string_lossy().into_owned(),
+ // TODO get it from StatusEntry?
+ kind: EntryKind::File,
is_expanded,
- path: entry.path.clone(),
- status,
+ path: entry.repo_path.clone(),
+ status: Some(status),
hunks: entry.hunks.clone(),
depth,
index,
};
- callback(entry.id, details, cx);
+ callback(ix, details, cx);
}
}
ix = end_ix;
@@ -527,7 +530,7 @@ impl GitPanel {
fn update_visible_entries(
&mut self,
for_worktree: Option<WorktreeId>,
- new_selected_entry: Option<(WorktreeId, ProjectEntryId)>,
+ _new_selected_entry: Option<(WorktreeId, ProjectEntryId)>,
cx: &mut ViewContext<Self>,
) {
let project = self.project.read(cx);
@@ -549,24 +552,36 @@ impl GitPanel {
None => false,
});
for worktree in project.visible_worktrees(cx) {
- let worktree_id = worktree.read(cx).id();
+ let snapshot = worktree.read(cx).snapshot();
+ let worktree_id = snapshot.id();
+
if for_worktree.is_some() && for_worktree != Some(worktree_id) {
continue;
}
- let snapshot = worktree.read(cx).snapshot();
- let mut visible_worktree_entries = snapshot
- .entries(false, 0)
- .filter(|entry| !entry.is_external)
- .filter(|entry| entry.git_status.is_some())
- .cloned()
- .collect::<Vec<_>>();
- snapshot.propagate_git_statuses(&mut visible_worktree_entries);
- project::sort_worktree_entries(&mut visible_worktree_entries);
+ let mut visible_worktree_entries = Vec::new();
+ // Only use the first repository for now
+ let repositories = snapshot.repositories().take(1);
+ let mut work_directory = None;
+ for repository in repositories {
+ visible_worktree_entries.extend(repository.status());
+ work_directory = Some(worktree::WorkDirectory::clone(repository));
+ }
+
+ // TODO use the GitTraversal
+ // let mut visible_worktree_entries = snapshot
+ // .entries(false, 0)
+ // .filter(|entry| !entry.is_external)
+ // .filter(|entry| entry.git_status.is_some())
+ // .cloned()
+ // .collect::<Vec<_>>();
+ // snapshot.propagate_git_statuses(&mut visible_worktree_entries);
+ // project::sort_worktree_entries(&mut visible_worktree_entries);
if !visible_worktree_entries.is_empty() {
self.visible_entries.push(WorktreeEntries {
worktree_id,
+ work_directory: work_directory.unwrap(),
visible_entries: visible_worktree_entries
.into_iter()
.map(|entry| GitPanelEntry {
@@ -580,24 +595,25 @@ impl GitPanel {
}
self.visible_entries.extend(after_update);
- if let Some((worktree_id, entry_id)) = new_selected_entry {
- self.selected_item = self.visible_entries.iter().enumerate().find_map(
- |(worktree_index, worktree_entries)| {
- if worktree_entries.worktree_id == worktree_id {
- worktree_entries
- .visible_entries
- .iter()
- .position(|entry| entry.id == entry_id)
- .map(|entry_index| {
- worktree_index * worktree_entries.visible_entries.len()
- + entry_index
- })
- } else {
- None
- }
- },
- );
- }
+ // TODO re-implement this
+ // if let Some((worktree_id, entry_id)) = new_selected_entry {
+ // self.selected_item = self.visible_entries.iter().enumerate().find_map(
+ // |(worktree_index, worktree_entries)| {
+ // if worktree_entries.worktree_id == worktree_id {
+ // worktree_entries
+ // .visible_entries
+ // .iter()
+ // .position(|entry| entry.id == entry_id)
+ // .map(|entry_index| {
+ // worktree_index * worktree_entries.visible_entries.len()
+ // + entry_index
+ // })
+ // } else {
+ // None
+ // }
+ // },
+ // );
+ // }
let project = self.project.downgrade();
self.git_diff_editor_updates = cx.spawn(|git_panel, mut cx| async move {
@@ -612,12 +628,14 @@ impl GitPanel {
.visible_entries
.iter()
.filter_map(|entry| {
- let git_status = entry.git_status()?;
+ let git_status = entry.status;
let entry_hunks = entry.hunks.clone();
let (entry_path, unstaged_changes_task) =
project.update(cx, |project, cx| {
- let entry_path =
- project.path_for_entry(entry.id, cx)?;
+ let entry_path = ProjectPath {
+ worktree_id: worktree_entries.worktree_id,
+ path: worktree_entries.work_directory.unrelativize(&entry.repo_path)?,
+ };
let open_task =
project.open_path(entry_path.clone(), cx);
let unstaged_changes_task =
@@ -682,8 +700,8 @@ impl GitPanel {
)
.collect()
}
- // TODO support conflicts display
- GitFileStatus::Conflict => Vec::new(),
+ // TODO support these
+ GitFileStatus::Conflict | GitFileStatus::Deleted | GitFileStatus::Untracked => Vec::new(),
}
}).clone()
})?;
@@ -992,18 +1010,17 @@ impl GitPanel {
fn render_entry(
&self,
- id: ProjectEntryId,
+ ix: usize,
selected: bool,
details: EntryDetails,
cx: &ViewContext<Self>,
) -> impl IntoElement {
- let id = id.to_proto() as usize;
- let checkbox_id = ElementId::Name(format!("checkbox_{}", id).into());
+ let checkbox_id = ElementId::Name(format!("checkbox_{}", ix).into());
let is_staged = ToggleState::Selected;
let handle = cx.view().downgrade();
h_flex()
- .id(id)
+ .id(("git-panel-entry", ix))
.h(px(28.))
.w_full()
.pl(px(12. + 12. * details.depth as f32))
@@ -1019,7 +1036,7 @@ impl GitPanel {
this.child(git_status_icon(status))
})
.child(
- ListItem::new(("label", id))
+ ListItem::new(details.path.0.clone())
.toggle_state(selected)
.child(h_flex().gap_1p5().child(details.display_name.clone()))
.on_click(move |e, cx| {
@@ -44,10 +44,13 @@ const REMOVED_COLOR: Hsla = Hsla {
// TODO: Add updated status colors to theme
pub fn git_status_icon(status: GitFileStatus) -> impl IntoElement {
match status {
- GitFileStatus::Added => Icon::new(IconName::SquarePlus).color(Color::Custom(ADDED_COLOR)),
+ GitFileStatus::Added | GitFileStatus::Untracked => {
+ Icon::new(IconName::SquarePlus).color(Color::Custom(ADDED_COLOR))
+ }
GitFileStatus::Modified => {
Icon::new(IconName::SquareDot).color(Color::Custom(MODIFIED_COLOR))
}
GitFileStatus::Conflict => Icon::new(IconName::Warning).color(Color::Custom(REMOVED_COLOR)),
+ GitFileStatus::Deleted => Icon::new(IconName::Warning).color(Color::Custom(REMOVED_COLOR)),
}
}
@@ -1580,7 +1580,7 @@ impl LinuxClient for X11Client {
}
}
-// Adatpted from:
+// Adapted from:
// https://docs.rs/winit/0.29.11/src/winit/platform_impl/linux/x11/monitor.rs.html#103-111
pub fn mode_refresh_rate(mode: &randr::ModeInfo) -> Duration {
if mode.dot_clock == 0 || mode.htotal == 0 || mode.vtotal == 0 {
@@ -322,7 +322,7 @@ unsafe fn parse_keystroke(native_event: id) -> Keystroke {
let mut chars_with_shift = chars_for_modified_key(native_event.keyCode(), SHIFT_MOD);
let always_use_cmd_layout = always_use_command_layout();
- // Handle Dvorak+QWERTY / Russian / Armeniam
+ // Handle Dvorak+QWERTY / Russian / Armenian
if command || always_use_cmd_layout {
let chars_with_cmd = chars_for_modified_key(native_event.keyCode(), CMD_MOD);
let chars_with_both =
@@ -4880,6 +4880,8 @@ pub enum ElementId {
FocusHandle(FocusId),
/// A combination of a name and an integer.
NamedInteger(SharedString, usize),
+ /// A path
+ Path(Arc<std::path::Path>),
}
impl Display for ElementId {
@@ -4891,6 +4893,7 @@ impl Display for ElementId {
ElementId::FocusHandle(_) => write!(f, "FocusHandle")?,
ElementId::NamedInteger(s, i) => write!(f, "{}-{}", s, i)?,
ElementId::Uuid(uuid) => write!(f, "{}", uuid)?,
+ ElementId::Path(path) => write!(f, "{}", path.display())?,
}
Ok(())
@@ -4927,6 +4930,12 @@ impl From<SharedString> for ElementId {
}
}
+impl From<Arc<std::path::Path>> for ElementId {
+ fn from(path: Arc<std::path::Path>) -> Self {
+ ElementId::Path(path)
+ }
+}
+
impl From<&'static str> for ElementId {
fn from(name: &'static str) -> Self {
ElementId::Name(name.into())
@@ -96,12 +96,18 @@ impl Item for ImageView {
fn tab_content(&self, params: TabContentParams, cx: &WindowContext) -> AnyElement {
let project_path = self.image_item.read(cx).project_path(cx);
+
let label_color = if ItemSettings::get_global(cx).git_status {
+ let git_status = self
+ .project
+ .read(cx)
+ .project_path_git_status(&project_path, cx);
+
self.project
.read(cx)
.entry_for_path(&project_path, cx)
.map(|entry| {
- entry_git_aware_label_color(entry.git_status, entry.is_ignored, params.selected)
+ entry_git_aware_label_color(git_status, entry.is_ignored, params.selected)
})
.unwrap_or_else(|| params.text_color())
} else {
@@ -19,8 +19,8 @@ db.workspace = true
editor.workspace = true
file_icons.workspace = true
fuzzy.workspace = true
-itertools.workspace = true
gpui.workspace = true
+itertools.workspace = true
language.workspace = true
log.workspace = true
menu.workspace = true
@@ -36,8 +36,8 @@ smol.workspace = true
theme.workspace = true
ui.workspace = true
util.workspace = true
-worktree.workspace = true
workspace.workspace = true
+worktree.workspace = true
[dev-dependencies]
search = { workspace = true, features = ["test-support"] }
@@ -56,7 +56,7 @@ use workspace::{
},
OpenInTerminal, WeakItemHandle, Workspace,
};
-use worktree::{Entry, ProjectEntryId, WorktreeId};
+use worktree::{Entry, GitEntry, ProjectEntryId, WorktreeId};
actions!(
outline_panel,
@@ -348,10 +348,17 @@ enum ExcerptOutlines {
NotFetched,
}
+#[derive(Clone, Debug, PartialEq, Eq)]
+struct FoldedDirsEntry {
+ worktree_id: WorktreeId,
+ entries: Vec<GitEntry>,
+}
+
+// TODO: collapse the inner enums into panel entry
#[derive(Clone, Debug)]
enum PanelEntry {
Fs(FsEntry),
- FoldedDirs(WorktreeId, Vec<Entry>),
+ FoldedDirs(FoldedDirsEntry),
Outline(OutlineEntry),
Search(SearchEntry),
}
@@ -383,7 +390,18 @@ impl PartialEq for PanelEntry {
fn eq(&self, other: &Self) -> bool {
match (self, other) {
(Self::Fs(a), Self::Fs(b)) => a == b,
- (Self::FoldedDirs(a1, a2), Self::FoldedDirs(b1, b2)) => a1 == b1 && a2 == b2,
+ (
+ Self::FoldedDirs(FoldedDirsEntry {
+ worktree_id: worktree_id_a,
+ entries: entries_a,
+ ..
+ }),
+ Self::FoldedDirs(FoldedDirsEntry {
+ worktree_id: worktree_id_b,
+ entries: entries_b,
+ ..
+ }),
+ ) => worktree_id_a == worktree_id_b && entries_a == entries_b,
(Self::Outline(a), Self::Outline(b)) => a == b,
(
Self::Search(SearchEntry {
@@ -505,54 +523,124 @@ impl SearchData {
}
}
-#[derive(Clone, Debug, PartialEq, Eq)]
-enum OutlineEntry {
- Excerpt(BufferId, ExcerptId, ExcerptRange<language::Anchor>),
- Outline(BufferId, ExcerptId, Outline),
+#[derive(Clone, Debug, Eq)]
+struct OutlineEntryExcerpt {
+ id: ExcerptId,
+ buffer_id: BufferId,
+ range: ExcerptRange<language::Anchor>,
+}
+
+impl PartialEq for OutlineEntryExcerpt {
+ fn eq(&self, other: &Self) -> bool {
+ self.buffer_id == other.buffer_id && self.id == other.id
+ }
+}
+
+impl Hash for OutlineEntryExcerpt {
+ fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
+ (self.buffer_id, self.id).hash(state)
+ }
}
#[derive(Clone, Debug, Eq)]
-enum FsEntry {
- ExternalFile(BufferId, Vec<ExcerptId>),
- Directory(WorktreeId, Entry),
- File(WorktreeId, Entry, BufferId, Vec<ExcerptId>),
+struct OutlineEntryOutline {
+ buffer_id: BufferId,
+ excerpt_id: ExcerptId,
+ outline: Outline,
}
-impl PartialEq for FsEntry {
+impl PartialEq for OutlineEntryOutline {
fn eq(&self, other: &Self) -> bool {
- match (self, other) {
- (Self::ExternalFile(id_a, _), Self::ExternalFile(id_b, _)) => id_a == id_b,
- (Self::Directory(id_a, entry_a), Self::Directory(id_b, entry_b)) => {
- id_a == id_b && entry_a.id == entry_b.id
- }
- (
- Self::File(worktree_a, entry_a, id_a, ..),
- Self::File(worktree_b, entry_b, id_b, ..),
- ) => worktree_a == worktree_b && entry_a.id == entry_b.id && id_a == id_b,
- _ => false,
- }
+ self.buffer_id == other.buffer_id && self.excerpt_id == other.excerpt_id
}
}
-impl Hash for FsEntry {
+impl Hash for OutlineEntryOutline {
fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
+ (self.buffer_id, self.excerpt_id).hash(state);
+ }
+}
+
+#[derive(Clone, Debug, PartialEq, Eq)]
+enum OutlineEntry {
+ Excerpt(OutlineEntryExcerpt),
+ Outline(OutlineEntryOutline),
+}
+
+impl OutlineEntry {
+ fn ids(&self) -> (BufferId, ExcerptId) {
match self {
- Self::ExternalFile(buffer_id, _) => {
- buffer_id.hash(state);
- }
- Self::Directory(worktree_id, entry) => {
- worktree_id.hash(state);
- entry.id.hash(state);
- }
- Self::File(worktree_id, entry, buffer_id, _) => {
- worktree_id.hash(state);
- entry.id.hash(state);
- buffer_id.hash(state);
- }
+ OutlineEntry::Excerpt(excerpt) => (excerpt.buffer_id, excerpt.id),
+ OutlineEntry::Outline(outline) => (outline.buffer_id, outline.excerpt_id),
}
}
}
+#[derive(Debug, Clone, Eq)]
+struct FsEntryFile {
+ worktree_id: WorktreeId,
+ entry: GitEntry,
+ buffer_id: BufferId,
+ excerpts: Vec<ExcerptId>,
+}
+
+impl PartialEq for FsEntryFile {
+ fn eq(&self, other: &Self) -> bool {
+ self.worktree_id == other.worktree_id
+ && self.entry.id == other.entry.id
+ && self.buffer_id == other.buffer_id
+ }
+}
+
+impl Hash for FsEntryFile {
+ fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
+ (self.buffer_id, self.entry.id, self.worktree_id).hash(state);
+ }
+}
+
+#[derive(Debug, Clone, Eq)]
+struct FsEntryDirectory {
+ worktree_id: WorktreeId,
+ entry: GitEntry,
+}
+
+impl PartialEq for FsEntryDirectory {
+ fn eq(&self, other: &Self) -> bool {
+ self.worktree_id == other.worktree_id && self.entry.id == other.entry.id
+ }
+}
+
+impl Hash for FsEntryDirectory {
+ fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
+ (self.worktree_id, self.entry.id).hash(state);
+ }
+}
+
+#[derive(Debug, Clone, Eq)]
+struct FsEntryExternalFile {
+ buffer_id: BufferId,
+ excerpts: Vec<ExcerptId>,
+}
+
+impl PartialEq for FsEntryExternalFile {
+ fn eq(&self, other: &Self) -> bool {
+ self.buffer_id == other.buffer_id
+ }
+}
+
+impl Hash for FsEntryExternalFile {
+ fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
+ self.buffer_id.hash(state);
+ }
+}
+
+#[derive(Clone, Debug, Eq, PartialEq)]
+enum FsEntry {
+ ExternalFile(FsEntryExternalFile),
+ Directory(FsEntryDirectory),
+ File(FsEntryFile),
+}
+
struct ActiveItem {
item_handle: Box<dyn WeakItemHandle>,
active_editor: WeakView<Editor>,
@@ -775,7 +863,12 @@ impl OutlinePanel {
}
fn unfold_directory(&mut self, _: &UnfoldDirectory, cx: &mut ViewContext<Self>) {
- if let Some(PanelEntry::FoldedDirs(worktree_id, entries)) = self.selected_entry().cloned() {
+ if let Some(PanelEntry::FoldedDirs(FoldedDirsEntry {
+ worktree_id,
+ entries,
+ ..
+ })) = self.selected_entry().cloned()
+ {
self.unfolded_dirs
.entry(worktree_id)
.or_default()
@@ -786,11 +879,11 @@ impl OutlinePanel {
fn fold_directory(&mut self, _: &FoldDirectory, cx: &mut ViewContext<Self>) {
let (worktree_id, entry) = match self.selected_entry().cloned() {
- Some(PanelEntry::Fs(FsEntry::Directory(worktree_id, entry))) => {
- (worktree_id, Some(entry))
+ Some(PanelEntry::Fs(FsEntry::Directory(directory))) => {
+ (directory.worktree_id, Some(directory.entry))
}
- Some(PanelEntry::FoldedDirs(worktree_id, entries)) => {
- (worktree_id, entries.last().cloned())
+ Some(PanelEntry::FoldedDirs(folded_dirs)) => {
+ (folded_dirs.worktree_id, folded_dirs.entries.last().cloned())
}
_ => return,
};
@@ -875,12 +968,12 @@ impl OutlinePanel {
let mut scroll_to_buffer = None;
let scroll_target = match entry {
PanelEntry::FoldedDirs(..) | PanelEntry::Fs(FsEntry::Directory(..)) => None,
- PanelEntry::Fs(FsEntry::ExternalFile(buffer_id, _)) => {
+ PanelEntry::Fs(FsEntry::ExternalFile(file)) => {
change_selection = false;
- scroll_to_buffer = Some(*buffer_id);
+ scroll_to_buffer = Some(file.buffer_id);
multi_buffer_snapshot.excerpts().find_map(
|(excerpt_id, buffer_snapshot, excerpt_range)| {
- if &buffer_snapshot.remote_id() == buffer_id {
+ if buffer_snapshot.remote_id() == file.buffer_id {
multi_buffer_snapshot
.anchor_in_excerpt(excerpt_id, excerpt_range.context.start)
} else {
@@ -889,13 +982,14 @@ impl OutlinePanel {
},
)
}
- PanelEntry::Fs(FsEntry::File(_, file_entry, buffer_id, _)) => {
+
+ PanelEntry::Fs(FsEntry::File(file)) => {
change_selection = false;
- scroll_to_buffer = Some(*buffer_id);
+ scroll_to_buffer = Some(file.buffer_id);
self.project
.update(cx, |project, cx| {
project
- .path_for_entry(file_entry.id, cx)
+ .path_for_entry(file.entry.id, cx)
.and_then(|path| project.get_open_buffer(&path, cx))
})
.map(|buffer| {
@@ -909,18 +1003,17 @@ impl OutlinePanel {
.anchor_in_excerpt(*excerpt_id, excerpt_range.context.start)
})
}
- PanelEntry::Outline(OutlineEntry::Outline(_, excerpt_id, outline)) => {
- multi_buffer_snapshot
- .anchor_in_excerpt(*excerpt_id, outline.range.start)
- .or_else(|| {
- multi_buffer_snapshot.anchor_in_excerpt(*excerpt_id, outline.range.end)
- })
- }
- PanelEntry::Outline(OutlineEntry::Excerpt(_, excerpt_id, excerpt_range)) => {
+ PanelEntry::Outline(OutlineEntry::Outline(outline)) => multi_buffer_snapshot
+ .anchor_in_excerpt(outline.excerpt_id, outline.outline.range.start)
+ .or_else(|| {
+ multi_buffer_snapshot
+ .anchor_in_excerpt(outline.excerpt_id, outline.outline.range.end)
+ }),
+ PanelEntry::Outline(OutlineEntry::Excerpt(excerpt)) => {
change_selection = false;
- multi_buffer_snapshot.anchor_in_excerpt(*excerpt_id, excerpt_range.context.start)
+ multi_buffer_snapshot.anchor_in_excerpt(excerpt.id, excerpt.range.context.start)
}
- PanelEntry::Search(SearchEntry { match_range, .. }) => Some(match_range.start),
+ PanelEntry::Search(search_entry) => Some(search_entry.match_range.start),
};
if let Some(anchor) = scroll_target {
@@ -960,8 +1053,10 @@ impl OutlinePanel {
.iter()
.rev()
.filter_map(|entry| match entry {
- FsEntry::File(_, _, buffer_id, _)
- | FsEntry::ExternalFile(buffer_id, _) => Some(*buffer_id),
+ FsEntry::File(file) => Some(file.buffer_id),
+ FsEntry::ExternalFile(external_file) => {
+ Some(external_file.buffer_id)
+ }
FsEntry::Directory(..) => None,
})
.skip_while(|id| *id != buffer_id)
@@ -1044,69 +1139,68 @@ impl OutlinePanel {
match &selected_entry {
PanelEntry::Fs(fs_entry) => match fs_entry {
FsEntry::ExternalFile(..) => None,
- FsEntry::File(worktree_id, entry, ..)
- | FsEntry::Directory(worktree_id, entry) => {
- entry.path.parent().and_then(|parent_path| {
- previous_entries.find(|entry| match entry {
- PanelEntry::Fs(FsEntry::Directory(dir_worktree_id, dir_entry)) => {
- dir_worktree_id == worktree_id
- && dir_entry.path.as_ref() == parent_path
- }
- PanelEntry::FoldedDirs(dirs_worktree_id, dirs) => {
- dirs_worktree_id == worktree_id
- && dirs
- .last()
- .map_or(false, |dir| dir.path.as_ref() == parent_path)
- }
- _ => false,
- })
+ FsEntry::File(FsEntryFile {
+ worktree_id, entry, ..
+ })
+ | FsEntry::Directory(FsEntryDirectory {
+ worktree_id, entry, ..
+ }) => entry.path.parent().and_then(|parent_path| {
+ previous_entries.find(|entry| match entry {
+ PanelEntry::Fs(FsEntry::Directory(directory)) => {
+ directory.worktree_id == *worktree_id
+ && directory.entry.path.as_ref() == parent_path
+ }
+ PanelEntry::FoldedDirs(FoldedDirsEntry {
+ worktree_id: dirs_worktree_id,
+ entries: dirs,
+ ..
+ }) => {
+ dirs_worktree_id == worktree_id
+ && dirs
+ .last()
+ .map_or(false, |dir| dir.path.as_ref() == parent_path)
+ }
+ _ => false,
})
- }
+ }),
},
- PanelEntry::FoldedDirs(worktree_id, entries) => entries
+ PanelEntry::FoldedDirs(folded_dirs) => folded_dirs
+ .entries
.first()
.and_then(|entry| entry.path.parent())
.and_then(|parent_path| {
previous_entries.find(|entry| {
- if let PanelEntry::Fs(FsEntry::Directory(dir_worktree_id, dir_entry)) =
- entry
- {
- dir_worktree_id == worktree_id
- && dir_entry.path.as_ref() == parent_path
+ if let PanelEntry::Fs(FsEntry::Directory(directory)) = entry {
+ directory.worktree_id == folded_dirs.worktree_id
+ && directory.entry.path.as_ref() == parent_path
} else {
false
}
})
}),
- PanelEntry::Outline(OutlineEntry::Excerpt(excerpt_buffer_id, excerpt_id, _)) => {
+ PanelEntry::Outline(OutlineEntry::Excerpt(excerpt)) => {
previous_entries.find(|entry| match entry {
- PanelEntry::Fs(FsEntry::File(_, _, file_buffer_id, file_excerpts)) => {
- file_buffer_id == excerpt_buffer_id
- && file_excerpts.contains(excerpt_id)
+ PanelEntry::Fs(FsEntry::File(file)) => {
+ file.buffer_id == excerpt.buffer_id
+ && file.excerpts.contains(&excerpt.id)
}
- PanelEntry::Fs(FsEntry::ExternalFile(file_buffer_id, file_excerpts)) => {
- file_buffer_id == excerpt_buffer_id
- && file_excerpts.contains(excerpt_id)
+ PanelEntry::Fs(FsEntry::ExternalFile(external_file)) => {
+ external_file.buffer_id == excerpt.buffer_id
+ && external_file.excerpts.contains(&excerpt.id)
}
_ => false,
})
}
- PanelEntry::Outline(OutlineEntry::Outline(
- outline_buffer_id,
- outline_excerpt_id,
- _,
- )) => previous_entries.find(|entry| {
- if let PanelEntry::Outline(OutlineEntry::Excerpt(
- excerpt_buffer_id,
- excerpt_id,
- _,
- )) = entry
- {
- outline_buffer_id == excerpt_buffer_id && outline_excerpt_id == excerpt_id
- } else {
- false
- }
- }),
+ PanelEntry::Outline(OutlineEntry::Outline(outline)) => {
+ previous_entries.find(|entry| {
+ if let PanelEntry::Outline(OutlineEntry::Excerpt(excerpt)) = entry {
+ outline.buffer_id == excerpt.buffer_id
+ && outline.excerpt_id == excerpt.id
+ } else {
+ false
+ }
+ })
+ }
PanelEntry::Search(_) => {
previous_entries.find(|entry| !matches!(entry, PanelEntry::Search(_)))
}
@@ -1164,8 +1258,12 @@ impl OutlinePanel {
) {
self.select_entry(entry.clone(), true, cx);
let is_root = match &entry {
- PanelEntry::Fs(FsEntry::File(worktree_id, entry, ..))
- | PanelEntry::Fs(FsEntry::Directory(worktree_id, entry)) => self
+ PanelEntry::Fs(FsEntry::File(FsEntryFile {
+ worktree_id, entry, ..
+ }))
+ | PanelEntry::Fs(FsEntry::Directory(FsEntryDirectory {
+ worktree_id, entry, ..
+ })) => self
.project
.read(cx)
.worktree_for_id(*worktree_id, cx)
@@ -1173,7 +1271,11 @@ impl OutlinePanel {
worktree.read(cx).root_entry().map(|entry| entry.id) == Some(entry.id)
})
.unwrap_or(false),
- PanelEntry::FoldedDirs(worktree_id, entries) => entries
+ PanelEntry::FoldedDirs(FoldedDirsEntry {
+ worktree_id,
+ entries,
+ ..
+ }) => entries
.first()
.and_then(|entry| {
self.project
@@ -1232,9 +1334,11 @@ impl OutlinePanel {
fn is_foldable(&self, entry: &PanelEntry) -> bool {
let (directory_worktree, directory_entry) = match entry {
- PanelEntry::Fs(FsEntry::Directory(directory_worktree, directory_entry)) => {
- (*directory_worktree, Some(directory_entry))
- }
+ PanelEntry::Fs(FsEntry::Directory(FsEntryDirectory {
+ worktree_id,
+ entry: directory_entry,
+ ..
+ })) => (*worktree_id, Some(directory_entry)),
_ => return false,
};
let Some(directory_entry) = directory_entry else {
@@ -1270,24 +1374,34 @@ impl OutlinePanel {
};
let mut buffers_to_unfold = HashSet::default();
let entry_to_expand = match &selected_entry {
- PanelEntry::FoldedDirs(worktree_id, dir_entries) => dir_entries.last().map(|entry| {
+ PanelEntry::FoldedDirs(FoldedDirsEntry {
+ entries: dir_entries,
+ worktree_id,
+ ..
+ }) => dir_entries.last().map(|entry| {
buffers_to_unfold.extend(self.buffers_inside_directory(*worktree_id, entry));
CollapsedEntry::Dir(*worktree_id, entry.id)
}),
- PanelEntry::Fs(FsEntry::Directory(worktree_id, dir_entry)) => {
- buffers_to_unfold.extend(self.buffers_inside_directory(*worktree_id, dir_entry));
- Some(CollapsedEntry::Dir(*worktree_id, dir_entry.id))
+ PanelEntry::Fs(FsEntry::Directory(FsEntryDirectory {
+ worktree_id, entry, ..
+ })) => {
+ buffers_to_unfold.extend(self.buffers_inside_directory(*worktree_id, entry));
+ Some(CollapsedEntry::Dir(*worktree_id, entry.id))
}
- PanelEntry::Fs(FsEntry::File(worktree_id, _, buffer_id, _)) => {
+ PanelEntry::Fs(FsEntry::File(FsEntryFile {
+ worktree_id,
+ buffer_id,
+ ..
+ })) => {
buffers_to_unfold.insert(*buffer_id);
Some(CollapsedEntry::File(*worktree_id, *buffer_id))
}
- PanelEntry::Fs(FsEntry::ExternalFile(buffer_id, _)) => {
- buffers_to_unfold.insert(*buffer_id);
- Some(CollapsedEntry::ExternalFile(*buffer_id))
+ PanelEntry::Fs(FsEntry::ExternalFile(external_file)) => {
+ buffers_to_unfold.insert(external_file.buffer_id);
+ Some(CollapsedEntry::ExternalFile(external_file.buffer_id))
}
- PanelEntry::Outline(OutlineEntry::Excerpt(buffer_id, excerpt_id, _)) => {
- Some(CollapsedEntry::Excerpt(*buffer_id, *excerpt_id))
+ PanelEntry::Outline(OutlineEntry::Excerpt(excerpt)) => {
+ Some(CollapsedEntry::Excerpt(excerpt.buffer_id, excerpt.id))
}
PanelEntry::Search(_) | PanelEntry::Outline(..) => return,
};
@@ -1330,19 +1444,24 @@ impl OutlinePanel {
let mut buffers_to_fold = HashSet::default();
let collapsed = match &selected_entry {
- PanelEntry::Fs(FsEntry::Directory(worktree_id, selected_dir_entry)) => {
+ PanelEntry::Fs(FsEntry::Directory(FsEntryDirectory {
+ worktree_id, entry, ..
+ })) => {
if self
.collapsed_entries
- .insert(CollapsedEntry::Dir(*worktree_id, selected_dir_entry.id))
+ .insert(CollapsedEntry::Dir(*worktree_id, entry.id))
{
- buffers_to_fold
- .extend(self.buffers_inside_directory(*worktree_id, selected_dir_entry));
+ buffers_to_fold.extend(self.buffers_inside_directory(*worktree_id, entry));
true
} else {
false
}
}
- PanelEntry::Fs(FsEntry::File(worktree_id, _, buffer_id, _)) => {
+ PanelEntry::Fs(FsEntry::File(FsEntryFile {
+ worktree_id,
+ buffer_id,
+ ..
+ })) => {
if self
.collapsed_entries
.insert(CollapsedEntry::File(*worktree_id, *buffer_id))
@@ -1353,34 +1472,35 @@ impl OutlinePanel {
false
}
}
- PanelEntry::Fs(FsEntry::ExternalFile(buffer_id, _)) => {
+ PanelEntry::Fs(FsEntry::ExternalFile(external_file)) => {
if self
.collapsed_entries
- .insert(CollapsedEntry::ExternalFile(*buffer_id))
+ .insert(CollapsedEntry::ExternalFile(external_file.buffer_id))
{
- buffers_to_fold.insert(*buffer_id);
+ buffers_to_fold.insert(external_file.buffer_id);
true
} else {
false
}
}
- PanelEntry::FoldedDirs(worktree_id, dir_entries) => {
+ PanelEntry::FoldedDirs(folded_dirs) => {
let mut folded = false;
- if let Some(dir_entry) = dir_entries.last() {
+ if let Some(dir_entry) = folded_dirs.entries.last() {
if self
.collapsed_entries
- .insert(CollapsedEntry::Dir(*worktree_id, dir_entry.id))
+ .insert(CollapsedEntry::Dir(folded_dirs.worktree_id, dir_entry.id))
{
folded = true;
- buffers_to_fold
- .extend(self.buffers_inside_directory(*worktree_id, dir_entry));
+ buffers_to_fold.extend(
+ self.buffers_inside_directory(folded_dirs.worktree_id, dir_entry),
+ );
}
}
folded
}
- PanelEntry::Outline(OutlineEntry::Excerpt(buffer_id, excerpt_id, _)) => self
+ PanelEntry::Outline(OutlineEntry::Excerpt(excerpt)) => self
.collapsed_entries
- .insert(CollapsedEntry::Excerpt(*buffer_id, *excerpt_id)),
+ .insert(CollapsedEntry::Excerpt(excerpt.buffer_id, excerpt.id)),
PanelEntry::Search(_) | PanelEntry::Outline(..) => false,
};
@@ -1409,31 +1529,42 @@ impl OutlinePanel {
.iter()
.fold(HashSet::default(), |mut entries, fs_entry| {
match fs_entry {
- FsEntry::ExternalFile(buffer_id, _) => {
- buffers_to_unfold.insert(*buffer_id);
- entries.insert(CollapsedEntry::ExternalFile(*buffer_id));
- entries.extend(self.excerpts.get(buffer_id).into_iter().flat_map(
- |excerpts| {
- excerpts.iter().map(|(excerpt_id, _)| {
- CollapsedEntry::Excerpt(*buffer_id, *excerpt_id)
- })
- },
- ));
- }
- FsEntry::Directory(worktree_id, entry) => {
- entries.insert(CollapsedEntry::Dir(*worktree_id, entry.id));
+ FsEntry::ExternalFile(external_file) => {
+ buffers_to_unfold.insert(external_file.buffer_id);
+ entries.insert(CollapsedEntry::ExternalFile(external_file.buffer_id));
+ entries.extend(
+ self.excerpts
+ .get(&external_file.buffer_id)
+ .into_iter()
+ .flat_map(|excerpts| {
+ excerpts.iter().map(|(excerpt_id, _)| {
+ CollapsedEntry::Excerpt(
+ external_file.buffer_id,
+ *excerpt_id,
+ )
+ })
+ }),
+ );
}
- FsEntry::File(worktree_id, _, buffer_id, _) => {
- buffers_to_unfold.insert(*buffer_id);
- entries.insert(CollapsedEntry::File(*worktree_id, *buffer_id));
- entries.extend(self.excerpts.get(buffer_id).into_iter().flat_map(
- |excerpts| {
- excerpts.iter().map(|(excerpt_id, _)| {
- CollapsedEntry::Excerpt(*buffer_id, *excerpt_id)
- })
- },
+ FsEntry::Directory(directory) => {
+ entries.insert(CollapsedEntry::Dir(
+ directory.worktree_id,
+ directory.entry.id,
));
}
+ FsEntry::File(file) => {
+ buffers_to_unfold.insert(file.buffer_id);
+ entries.insert(CollapsedEntry::File(file.worktree_id, file.buffer_id));
+ entries.extend(
+ self.excerpts.get(&file.buffer_id).into_iter().flat_map(
+ |excerpts| {
+ excerpts.iter().map(|(excerpt_id, _)| {
+ CollapsedEntry::Excerpt(file.buffer_id, *excerpt_id)
+ })
+ },
+ ),
+ );
+ }
};
entries
});
@@ -1459,22 +1590,28 @@ impl OutlinePanel {
.cached_entries
.iter()
.flat_map(|cached_entry| match &cached_entry.entry {
- PanelEntry::Fs(FsEntry::Directory(worktree_id, entry)) => {
- Some(CollapsedEntry::Dir(*worktree_id, entry.id))
- }
- PanelEntry::Fs(FsEntry::File(worktree_id, _, buffer_id, _)) => {
+ PanelEntry::Fs(FsEntry::Directory(FsEntryDirectory {
+ worktree_id, entry, ..
+ })) => Some(CollapsedEntry::Dir(*worktree_id, entry.id)),
+ PanelEntry::Fs(FsEntry::File(FsEntryFile {
+ worktree_id,
+ buffer_id,
+ ..
+ })) => {
buffers_to_fold.insert(*buffer_id);
Some(CollapsedEntry::File(*worktree_id, *buffer_id))
}
- PanelEntry::Fs(FsEntry::ExternalFile(buffer_id, _)) => {
- buffers_to_fold.insert(*buffer_id);
- Some(CollapsedEntry::ExternalFile(*buffer_id))
- }
- PanelEntry::FoldedDirs(worktree_id, entries) => {
- Some(CollapsedEntry::Dir(*worktree_id, entries.last()?.id))
+ PanelEntry::Fs(FsEntry::ExternalFile(external_file)) => {
+ buffers_to_fold.insert(external_file.buffer_id);
+ Some(CollapsedEntry::ExternalFile(external_file.buffer_id))
}
- PanelEntry::Outline(OutlineEntry::Excerpt(buffer_id, excerpt_id, _)) => {
- Some(CollapsedEntry::Excerpt(*buffer_id, *excerpt_id))
+ PanelEntry::FoldedDirs(FoldedDirsEntry {
+ worktree_id,
+ entries,
+ ..
+ }) => Some(CollapsedEntry::Dir(*worktree_id, entries.last()?.id)),
+ PanelEntry::Outline(OutlineEntry::Excerpt(excerpt)) => {
+ Some(CollapsedEntry::Excerpt(excerpt.buffer_id, excerpt.id))
}
PanelEntry::Search(_) | PanelEntry::Outline(..) => None,
})
@@ -1498,7 +1635,11 @@ impl OutlinePanel {
let mut fold = false;
let mut buffers_to_toggle = HashSet::default();
match entry {
- PanelEntry::Fs(FsEntry::Directory(worktree_id, dir_entry)) => {
+ PanelEntry::Fs(FsEntry::Directory(FsEntryDirectory {
+ worktree_id,
+ entry: dir_entry,
+ ..
+ })) => {
let entry_id = dir_entry.id;
let collapsed_entry = CollapsedEntry::Dir(*worktree_id, entry_id);
buffers_to_toggle.extend(self.buffers_inside_directory(*worktree_id, dir_entry));
@@ -1514,7 +1655,11 @@ impl OutlinePanel {
fold = true;
}
}
- PanelEntry::Fs(FsEntry::File(worktree_id, _, buffer_id, _)) => {
+ PanelEntry::Fs(FsEntry::File(FsEntryFile {
+ worktree_id,
+ buffer_id,
+ ..
+ })) => {
let collapsed_entry = CollapsedEntry::File(*worktree_id, *buffer_id);
buffers_to_toggle.insert(*buffer_id);
if !self.collapsed_entries.remove(&collapsed_entry) {
@@ -1522,15 +1667,19 @@ impl OutlinePanel {
fold = true;
}
}
- PanelEntry::Fs(FsEntry::ExternalFile(buffer_id, _)) => {
- let collapsed_entry = CollapsedEntry::ExternalFile(*buffer_id);
- buffers_to_toggle.insert(*buffer_id);
+ PanelEntry::Fs(FsEntry::ExternalFile(external_file)) => {
+ let collapsed_entry = CollapsedEntry::ExternalFile(external_file.buffer_id);
+ buffers_to_toggle.insert(external_file.buffer_id);
if !self.collapsed_entries.remove(&collapsed_entry) {
self.collapsed_entries.insert(collapsed_entry);
fold = true;
}
}
- PanelEntry::FoldedDirs(worktree_id, dir_entries) => {
+ PanelEntry::FoldedDirs(FoldedDirsEntry {
+ worktree_id,
+ entries: dir_entries,
+ ..
+ }) => {
if let Some(dir_entry) = dir_entries.first() {
let entry_id = dir_entry.id;
let collapsed_entry = CollapsedEntry::Dir(*worktree_id, entry_id);
@@ -1549,8 +1698,8 @@ impl OutlinePanel {
}
}
}
- PanelEntry::Outline(OutlineEntry::Excerpt(buffer_id, excerpt_id, _)) => {
- let collapsed_entry = CollapsedEntry::Excerpt(*buffer_id, *excerpt_id);
+ PanelEntry::Outline(OutlineEntry::Excerpt(excerpt)) => {
+ let collapsed_entry = CollapsedEntry::Excerpt(excerpt.buffer_id, excerpt.id);
if !self.collapsed_entries.remove(&collapsed_entry) {
self.collapsed_entries.insert(collapsed_entry);
}
@@ -1625,7 +1774,9 @@ impl OutlinePanel {
.selected_entry()
.and_then(|entry| match entry {
PanelEntry::Fs(entry) => self.relative_path(entry, cx),
- PanelEntry::FoldedDirs(_, dirs) => dirs.last().map(|entry| entry.path.clone()),
+ PanelEntry::FoldedDirs(folded_dirs) => {
+ folded_dirs.entries.last().map(|entry| entry.path.clone())
+ }
PanelEntry::Search(_) | PanelEntry::Outline(..) => None,
})
.map(|p| p.to_string_lossy().to_string())
@@ -1679,23 +1830,24 @@ impl OutlinePanel {
return Ok(());
};
let related_buffer_entry = match &entry_with_selection {
- PanelEntry::Fs(FsEntry::File(worktree_id, _, buffer_id, _)) => {
- project.update(&mut cx, |project, cx| {
- let entry_id = project
- .buffer_for_id(*buffer_id, cx)
- .and_then(|buffer| buffer.read(cx).entry_id(cx));
- project
- .worktree_for_id(*worktree_id, cx)
- .zip(entry_id)
- .and_then(|(worktree, entry_id)| {
- let entry = worktree.read(cx).entry_for_id(entry_id)?.clone();
- Some((worktree, entry))
- })
- })?
- }
+ PanelEntry::Fs(FsEntry::File(FsEntryFile {
+ worktree_id,
+ buffer_id,
+ ..
+ })) => project.update(&mut cx, |project, cx| {
+ let entry_id = project
+ .buffer_for_id(*buffer_id, cx)
+ .and_then(|buffer| buffer.read(cx).entry_id(cx));
+ project
+ .worktree_for_id(*worktree_id, cx)
+ .zip(entry_id)
+ .and_then(|(worktree, entry_id)| {
+ let entry = worktree.read(cx).entry_for_id(entry_id)?.clone();
+ Some((worktree, entry))
+ })
+ })?,
PanelEntry::Outline(outline_entry) => {
- let &(OutlineEntry::Outline(buffer_id, excerpt_id, _)
- | OutlineEntry::Excerpt(buffer_id, excerpt_id, _)) = outline_entry;
+ let (buffer_id, excerpt_id) = outline_entry.ids();
outline_panel.update(&mut cx, |outline_panel, cx| {
outline_panel
.collapsed_entries
@@ -1808,25 +1960,21 @@ impl OutlinePanel {
fn render_excerpt(
&self,
- buffer_id: BufferId,
- excerpt_id: ExcerptId,
- range: &ExcerptRange<language::Anchor>,
+ excerpt: &OutlineEntryExcerpt,
depth: usize,
cx: &mut ViewContext<OutlinePanel>,
) -> Option<Stateful<Div>> {
- let item_id = ElementId::from(excerpt_id.to_proto() as usize);
+ let item_id = ElementId::from(excerpt.id.to_proto() as usize);
let is_active = match self.selected_entry() {
- Some(PanelEntry::Outline(OutlineEntry::Excerpt(
- selected_buffer_id,
- selected_excerpt_id,
- _,
- ))) => selected_buffer_id == &buffer_id && selected_excerpt_id == &excerpt_id,
+ Some(PanelEntry::Outline(OutlineEntry::Excerpt(selected_excerpt))) => {
+ selected_excerpt.buffer_id == excerpt.buffer_id && selected_excerpt.id == excerpt.id
+ }
_ => false,
};
let has_outlines = self
.excerpts
- .get(&buffer_id)
- .and_then(|excerpts| match &excerpts.get(&excerpt_id)?.outlines {
+ .get(&excerpt.buffer_id)
+ .and_then(|excerpts| match &excerpts.get(&excerpt.id)?.outlines {
ExcerptOutlines::Outlines(outlines) => Some(outlines),
ExcerptOutlines::Invalidated(outlines) => Some(outlines),
ExcerptOutlines::NotFetched => None,
@@ -1834,7 +1982,7 @@ impl OutlinePanel {
.map_or(false, |outlines| !outlines.is_empty());
let is_expanded = !self
.collapsed_entries
- .contains(&CollapsedEntry::Excerpt(buffer_id, excerpt_id));
+ .contains(&CollapsedEntry::Excerpt(excerpt.buffer_id, excerpt.id));
let color = entry_git_aware_label_color(None, false, is_active);
let icon = if has_outlines {
FileIcons::get_chevron_icon(is_expanded, cx)
@@ -1844,14 +1992,14 @@ impl OutlinePanel {
}
.unwrap_or_else(empty_icon);
- let label = self.excerpt_label(buffer_id, range, cx)?;
+ let label = self.excerpt_label(excerpt.buffer_id, &excerpt.range, cx)?;
let label_element = Label::new(label)
.single_line()
.color(color)
.into_any_element();
Some(self.entry_element(
- PanelEntry::Outline(OutlineEntry::Excerpt(buffer_id, excerpt_id, range.clone())),
+ PanelEntry::Outline(OutlineEntry::Excerpt(excerpt.clone())),
item_id,
depth,
Some(icon),
@@ -1878,50 +2026,40 @@ impl OutlinePanel {
fn render_outline(
&self,
- buffer_id: BufferId,
- excerpt_id: ExcerptId,
- rendered_outline: &Outline,
+ outline: &OutlineEntryOutline,
depth: usize,
string_match: Option<&StringMatch>,
cx: &mut ViewContext<Self>,
) -> Stateful<Div> {
- let (item_id, label_element) = (
- ElementId::from(SharedString::from(format!(
- "{buffer_id:?}|{excerpt_id:?}{:?}|{:?}",
- rendered_outline.range, &rendered_outline.text,
- ))),
- outline::render_item(
- rendered_outline,
- string_match
- .map(|string_match| string_match.ranges().collect::<Vec<_>>())
- .unwrap_or_default(),
- cx,
- )
- .into_any_element(),
- );
+ let item_id = ElementId::from(SharedString::from(format!(
+ "{:?}|{:?}{:?}|{:?}",
+ outline.buffer_id, outline.excerpt_id, outline.outline.range, &outline.outline.text,
+ )));
+
+ let label_element = outline::render_item(
+ &outline.outline,
+ string_match
+ .map(|string_match| string_match.ranges().collect::<Vec<_>>())
+ .unwrap_or_default(),
+ cx,
+ )
+ .into_any_element();
+
let is_active = match self.selected_entry() {
- Some(PanelEntry::Outline(OutlineEntry::Outline(
- selected_buffer_id,
- selected_excerpt_id,
- selected_entry,
- ))) => {
- selected_buffer_id == &buffer_id
- && selected_excerpt_id == &excerpt_id
- && selected_entry == rendered_outline
+ Some(PanelEntry::Outline(OutlineEntry::Outline(selected))) => {
+ outline == selected && outline.outline == selected.outline
}
_ => false,
};
+
let icon = if self.is_singleton_active(cx) {
None
} else {
Some(empty_icon())
};
+
self.entry_element(
- PanelEntry::Outline(OutlineEntry::Outline(
- buffer_id,
- excerpt_id,
- rendered_outline.clone(),
- )),
+ PanelEntry::Outline(OutlineEntry::Outline(outline.clone())),
item_id,
depth,
icon,
@@ -1944,7 +2082,9 @@ impl OutlinePanel {
_ => false,
};
let (item_id, label_element, icon) = match rendered_entry {
- FsEntry::File(worktree_id, entry, ..) => {
+ FsEntry::File(FsEntryFile {
+ worktree_id, entry, ..
+ }) => {
let name = self.entry_name(worktree_id, entry, cx);
let color =
entry_git_aware_label_color(entry.git_status, entry.is_ignored, is_active);
@@ -1967,14 +2107,18 @@ impl OutlinePanel {
icon.unwrap_or_else(empty_icon),
)
}
- FsEntry::Directory(worktree_id, entry) => {
- let name = self.entry_name(worktree_id, entry, cx);
-
- let is_expanded = !self
- .collapsed_entries
- .contains(&CollapsedEntry::Dir(*worktree_id, entry.id));
- let color =
- entry_git_aware_label_color(entry.git_status, entry.is_ignored, is_active);
+ FsEntry::Directory(directory) => {
+ let name = self.entry_name(&directory.worktree_id, &directory.entry, cx);
+
+ let is_expanded = !self.collapsed_entries.contains(&CollapsedEntry::Dir(
+ directory.worktree_id,
+ directory.entry.id,
+ ));
+ let color = entry_git_aware_label_color(
+ directory.entry.git_status,
+ directory.entry.is_ignored,
+ is_active,
+ );
let icon = if settings.folder_icons {
FileIcons::get_folder_icon(is_expanded, cx)
} else {
@@ -1983,7 +2127,7 @@ impl OutlinePanel {
.map(Icon::from_path)
.map(|icon| icon.color(color).into_any_element());
(
- ElementId::from(entry.id.to_proto() as usize),
+ ElementId::from(directory.entry.id.to_proto() as usize),
HighlightedLabel::new(
name,
string_match
@@ -1995,9 +2139,9 @@ impl OutlinePanel {
icon.unwrap_or_else(empty_icon),
)
}
- FsEntry::ExternalFile(buffer_id, _) => {
+ FsEntry::ExternalFile(external_file) => {
let color = entry_label_color(is_active);
- let (icon, name) = match self.buffer_snapshot_for_id(*buffer_id, cx) {
+ let (icon, name) = match self.buffer_snapshot_for_id(external_file.buffer_id, cx) {
Some(buffer_snapshot) => match buffer_snapshot.file() {
Some(file) => {
let path = file.path();
@@ -2015,7 +2159,7 @@ impl OutlinePanel {
None => (None, "Unknown buffer".to_string()),
};
(
- ElementId::from(buffer_id.to_proto() as usize),
+ ElementId::from(external_file.buffer_id.to_proto() as usize),
HighlightedLabel::new(
name,
string_match
@@ -569,9 +569,9 @@ impl LocalBufferStore {
buffer_change_sets
.into_iter()
.filter_map(|(change_set, buffer_snapshot, path)| {
- let (repo_entry, local_repo_entry) = snapshot.repo_for_path(&path)?;
- let relative_path = repo_entry.relativize(&snapshot, &path).ok()?;
- let base_text = local_repo_entry.repo().load_index_text(&relative_path);
+ let local_repo = snapshot.local_repo_for_path(&path)?;
+ let relative_path = local_repo.relativize(&path).ok()?;
+ let base_text = local_repo.repo().load_index_text(&relative_path);
Some((change_set, buffer_snapshot, base_text))
})
.collect::<Vec<_>>()
@@ -1161,16 +1161,16 @@ impl BufferStore {
Worktree::Local(worktree) => {
let worktree = worktree.snapshot();
let blame_params = maybe!({
- let (repo_entry, local_repo_entry) = match worktree.repo_for_path(&file.path) {
+ let local_repo = match worktree.local_repo_for_path(&file.path) {
Some(repo_for_path) => repo_for_path,
None => return Ok(None),
};
- let relative_path = repo_entry
- .relativize(&worktree, &file.path)
+ let relative_path = local_repo
+ .relativize(&file.path)
.context("failed to relativize buffer path")?;
- let repo = local_repo_entry.repo().clone();
+ let repo = local_repo.repo().clone();
let content = match version {
Some(version) => buffer.rope_for_version(&version).clone(),
@@ -1247,7 +1247,7 @@ impl BufferStore {
});
};
- let path = match repo_entry.relativize(worktree, file.path()) {
+ let path = match repo_entry.relativize(file.path()) {
Ok(RepoPath(path)) => path,
Err(e) => return Task::ready(Err(e)),
};
@@ -87,9 +87,8 @@ pub use language::Location;
#[cfg(any(test, feature = "test-support"))]
pub use prettier::FORMAT_SUFFIX as TEST_PRETTIER_FORMAT_SUFFIX;
pub use worktree::{
- Entry, EntryKind, File, LocalWorktree, PathChange, ProjectEntryId, RepositoryEntry,
- UpdatedEntriesSet, UpdatedGitRepositoriesSet, Worktree, WorktreeId, WorktreeSettings,
- FS_WATCH_LATENCY,
+ Entry, EntryKind, File, LocalWorktree, PathChange, ProjectEntryId, UpdatedEntriesSet,
+ UpdatedGitRepositoriesSet, Worktree, WorktreeId, WorktreeSettings, FS_WATCH_LATENCY,
};
const SERVER_LAUNCHING_BEFORE_SHUTDOWN_TIMEOUT: Duration = Duration::from_secs(5);
@@ -3109,6 +3108,7 @@ impl LspStore {
WorktreeStoreEvent::WorktreeUpdateSent(worktree) => {
worktree.update(cx, |worktree, _cx| self.send_diagnostic_summaries(worktree));
}
+ WorktreeStoreEvent::GitRepositoryUpdated => {}
}
}
@@ -39,7 +39,10 @@ use futures::{
pub use image_store::{ImageItem, ImageStore};
use image_store::{ImageItemEvent, ImageStoreEvent};
-use git::{blame::Blame, repository::GitRepository};
+use git::{
+ blame::Blame,
+ repository::{GitFileStatus, GitRepository},
+};
use gpui::{
AnyModel, AppContext, AsyncAppContext, BorrowAppContext, Context as _, EventEmitter, Hsla,
Model, ModelContext, SharedString, Task, WeakModel, WindowContext,
@@ -95,9 +98,8 @@ pub use task_inventory::{
BasicContextProvider, ContextProviderWithTasks, Inventory, TaskSourceKind,
};
pub use worktree::{
- Entry, EntryKind, File, LocalWorktree, PathChange, ProjectEntryId, RepositoryEntry,
- UpdatedEntriesSet, UpdatedGitRepositoriesSet, Worktree, WorktreeId, WorktreeSettings,
- FS_WATCH_LATENCY,
+ Entry, EntryKind, File, LocalWorktree, PathChange, ProjectEntryId, UpdatedEntriesSet,
+ UpdatedGitRepositoriesSet, Worktree, WorktreeId, WorktreeSettings, FS_WATCH_LATENCY,
};
pub use buffer_store::ProjectTransaction;
@@ -242,6 +244,7 @@ pub enum Event {
ActivateProjectPanel,
WorktreeAdded(WorktreeId),
WorktreeOrderChanged,
+ GitRepositoryUpdated,
WorktreeRemoved(WorktreeId),
WorktreeUpdatedEntries(WorktreeId, UpdatedEntriesSet),
WorktreeUpdatedGitRepositories(WorktreeId),
@@ -1433,6 +1436,15 @@ impl Project {
.unwrap_or(false)
}
+ pub fn project_path_git_status(
+ &self,
+ project_path: &ProjectPath,
+ cx: &AppContext,
+ ) -> Option<GitFileStatus> {
+ self.worktree_for_id(project_path.worktree_id, cx)
+ .and_then(|worktree| worktree.read(cx).status_for_file(&project_path.path))
+ }
+
pub fn visibility_for_paths(&self, paths: &[PathBuf], cx: &AppContext) -> Option<bool> {
paths
.iter()
@@ -2295,6 +2307,7 @@ impl Project {
}
WorktreeStoreEvent::WorktreeOrderChanged => cx.emit(Event::WorktreeOrderChanged),
WorktreeStoreEvent::WorktreeUpdateSent(_) => {}
+ WorktreeStoreEvent::GitRepositoryUpdated => cx.emit(Event::GitRepositoryUpdated),
}
}
@@ -3516,17 +3529,6 @@ impl Project {
)
}
- pub fn get_repo(
- &self,
- project_path: &ProjectPath,
- cx: &AppContext,
- ) -> Option<Arc<dyn GitRepository>> {
- self.worktree_for_id(project_path.worktree_id, cx)?
- .read(cx)
- .as_local()?
- .local_git_repo(&project_path.path)
- }
-
pub fn get_first_worktree_root_repo(&self, cx: &AppContext) -> Option<Arc<dyn GitRepository>> {
let worktree = self.visible_worktrees(cx).next()?.read(cx).as_local()?;
let root_entry = worktree.root_git_entry()?;
@@ -4426,8 +4428,10 @@ impl Completion {
}
}
-pub fn sort_worktree_entries(entries: &mut [Entry]) {
+pub fn sort_worktree_entries(entries: &mut [impl AsRef<Entry>]) {
entries.sort_by(|entry_a, entry_b| {
+ let entry_a = entry_a.as_ref();
+ let entry_b = entry_b.as_ref();
compare_paths(
(&entry_a.path, entry_a.is_file()),
(&entry_b.path, entry_b.is_file()),
@@ -109,7 +109,7 @@ impl Inventory {
/// Pulls its task sources relevant to the worktree and the language given and resolves them with the [`TaskContext`] given.
/// Joins the new resolutions with the resolved tasks that were used (spawned) before,
/// orders them so that the most recently used come first, all equally used ones are ordered so that the most specific tasks come first.
- /// Deduplicates the tasks by their labels and contenxt and splits the ordered list into two: used tasks and the rest, newly resolved tasks.
+ /// Deduplicates the tasks by their labels and context and splits the ordered list into two: used tasks and the rest, newly resolved tasks.
pub fn used_and_current_resolved_tasks(
&self,
worktree: Option<WorktreeId>,
@@ -62,6 +62,7 @@ pub enum WorktreeStoreEvent {
WorktreeReleased(EntityId, WorktreeId),
WorktreeOrderChanged,
WorktreeUpdateSent(Model<Worktree>),
+ GitRepositoryUpdated,
}
impl EventEmitter<WorktreeStoreEvent> for WorktreeStore {}
@@ -322,6 +323,7 @@ impl WorktreeStore {
let worktree = Worktree::local(path.clone(), visible, fs, next_entry_id, &mut cx).await;
let worktree = worktree?;
+
this.update(&mut cx, |this, cx| this.add(&worktree, cx))?;
if visible {
@@ -374,6 +376,17 @@ impl WorktreeStore {
this.send_project_updates(cx);
})
.detach();
+
+ cx.subscribe(
+ worktree,
+ |_this, _, event: &worktree::Event, cx| match event {
+ worktree::Event::UpdatedGitRepositories(_) => {
+ cx.emit(WorktreeStoreEvent::GitRepositoryUpdated);
+ }
+ worktree::Event::DeletedEntry(_) | worktree::Event::UpdatedEntries(_) => {}
+ },
+ )
+ .detach();
}
pub fn remove_worktree(&mut self, id_to_remove: WorktreeId, cx: &mut ModelContext<Self>) {
@@ -583,11 +596,11 @@ impl WorktreeStore {
pub fn shared(
&mut self,
remote_id: u64,
- downsteam_client: AnyProtoClient,
+ downstream_client: AnyProtoClient,
cx: &mut ModelContext<Self>,
) {
self.retain_worktrees = true;
- self.downstream_client = Some((downsteam_client, remote_id));
+ self.downstream_client = Some((downstream_client, remote_id));
// When shared, retain all worktrees
for worktree_handle in self.worktrees.iter_mut() {
@@ -63,7 +63,7 @@ use workspace::{
notifications::{DetachAndPromptErr, NotifyTaskExt},
DraggedSelection, OpenInTerminal, PreviewTabsSettings, SelectedEntry, Workspace,
};
-use worktree::CreatedEntry;
+use worktree::{CreatedEntry, GitEntry, GitEntryRef};
const PROJECT_PANEL_KEY: &str = "ProjectPanel";
const NEW_ENTRY_ID: ProjectEntryId = ProjectEntryId::MAX;
@@ -76,7 +76,7 @@ pub struct ProjectPanel {
// An update loop that keeps incrementing/decrementing scroll offset while there is a dragged entry that's
// hovered over the start/end of a list.
hover_scroll_task: Option<Task<()>>,
- visible_entries: Vec<(WorktreeId, Vec<Entry>, OnceCell<HashSet<Arc<Path>>>)>,
+ visible_entries: Vec<(WorktreeId, Vec<GitEntry>, OnceCell<HashSet<Arc<Path>>>)>,
/// Maps from leaf project entry ID to the currently selected ancestor.
/// Relevant only for auto-fold dirs, where a single project panel entry may actually consist of several
/// project entries (and all non-leaf nodes are guaranteed to be directories).
@@ -311,7 +311,8 @@ impl ProjectPanel {
this.update_visible_entries(None, cx);
cx.notify();
}
- project::Event::WorktreeUpdatedEntries(_, _)
+ project::Event::GitRepositoryUpdated
+ | project::Event::WorktreeUpdatedEntries(_, _)
| project::Event::WorktreeAdded(_)
| project::Event::WorktreeOrderChanged => {
this.update_visible_entries(None, cx);
@@ -1366,9 +1367,10 @@ impl ProjectPanel {
let parent_entry = worktree.entry_for_path(parent_path)?;
// Remove all siblings that are being deleted except the last marked entry
- let mut siblings: Vec<Entry> = worktree
+ let mut siblings: Vec<_> = worktree
.snapshot()
.child_entries(parent_path)
+ .with_git_statuses()
.filter(|sibling| {
sibling.id == latest_entry.id
|| !marked_entries_in_worktree.contains(&&SelectedEntry {
@@ -1376,7 +1378,7 @@ impl ProjectPanel {
entry_id: sibling.id,
})
})
- .cloned()
+ .map(|entry| entry.to_owned())
.collect();
project::sort_worktree_entries(&mut siblings);
@@ -2334,7 +2336,7 @@ impl ProjectPanel {
}
let mut visible_worktree_entries = Vec::new();
- let mut entry_iter = snapshot.entries(true, 0);
+ let mut entry_iter = snapshot.entries(true, 0).with_git_statuses();
let mut auto_folded_ancestors = vec![];
while let Some(entry) = entry_iter.entry() {
if auto_collapse_dirs && entry.kind.is_dir() {
@@ -2376,7 +2378,7 @@ impl ProjectPanel {
}
}
auto_folded_ancestors.clear();
- visible_worktree_entries.push(entry.clone());
+ visible_worktree_entries.push(entry.to_owned());
let precedes_new_entry = if let Some(new_entry_id) = new_entry_parent_id {
entry.id == new_entry_id || {
self.ancestors.get(&entry.id).map_or(false, |entries| {
@@ -2390,25 +2392,27 @@ impl ProjectPanel {
false
};
if precedes_new_entry {
- visible_worktree_entries.push(Entry {
- id: NEW_ENTRY_ID,
- kind: new_entry_kind,
- path: entry.path.join("\0").into(),
- inode: 0,
- mtime: entry.mtime,
- size: entry.size,
- is_ignored: entry.is_ignored,
- is_external: false,
- is_private: false,
- is_always_included: entry.is_always_included,
+ visible_worktree_entries.push(GitEntry {
+ entry: Entry {
+ id: NEW_ENTRY_ID,
+ kind: new_entry_kind,
+ path: entry.path.join("\0").into(),
+ inode: 0,
+ mtime: entry.mtime,
+ size: entry.size,
+ is_ignored: entry.is_ignored,
+ is_external: false,
+ is_private: false,
+ is_always_included: entry.is_always_included,
+ canonical_path: entry.canonical_path.clone(),
+ char_bag: entry.char_bag,
+ is_fifo: entry.is_fifo,
+ },
git_status: entry.git_status,
- canonical_path: entry.canonical_path.clone(),
- char_bag: entry.char_bag,
- is_fifo: entry.is_fifo,
});
}
let worktree_abs_path = worktree.read(cx).abs_path();
- let (depth, path) = if Some(entry) == worktree.read(cx).root_entry() {
+ let (depth, path) = if Some(entry.entry) == worktree.read(cx).root_entry() {
let Some(path_name) = worktree_abs_path
.file_name()
.with_context(|| {
@@ -2485,8 +2489,8 @@ impl ProjectPanel {
entry_iter.advance();
}
- snapshot.propagate_git_statuses(&mut visible_worktree_entries);
project::sort_worktree_entries(&mut visible_worktree_entries);
+
self.visible_entries
.push((worktree_id, visible_worktree_entries, OnceCell::new()));
}
@@ -2714,13 +2718,13 @@ impl ProjectPanel {
None
}
- fn entry_at_index(&self, index: usize) -> Option<(WorktreeId, &Entry)> {
+ fn entry_at_index(&self, index: usize) -> Option<(WorktreeId, GitEntryRef)> {
let mut offset = 0;
for (worktree_id, visible_worktree_entries, _) in &self.visible_entries {
if visible_worktree_entries.len() > offset + index {
return visible_worktree_entries
.get(index)
- .map(|entry| (*worktree_id, entry));
+ .map(|entry| (*worktree_id, entry.to_ref()));
}
offset += visible_worktree_entries.len();
}
@@ -2753,7 +2757,7 @@ impl ProjectPanel {
.collect()
});
for entry in visible_worktree_entries[entry_range].iter() {
- callback(entry, entries, cx);
+ callback(&entry, entries, cx);
}
ix = end_ix;
}
@@ -2822,7 +2826,7 @@ impl ProjectPanel {
};
let (depth, difference) =
- ProjectPanel::calculate_depth_and_difference(entry, entries);
+ ProjectPanel::calculate_depth_and_difference(&entry, entries);
let filename = match difference {
diff if diff > 1 => entry
@@ -2951,9 +2955,9 @@ impl ProjectPanel {
worktree_id: WorktreeId,
reverse_search: bool,
only_visible_entries: bool,
- predicate: impl Fn(&Entry, WorktreeId) -> bool,
+ predicate: impl Fn(GitEntryRef, WorktreeId) -> bool,
cx: &mut ViewContext<Self>,
- ) -> Option<Entry> {
+ ) -> Option<GitEntry> {
if only_visible_entries {
let entries = self
.visible_entries
@@ -2968,15 +2972,18 @@ impl ProjectPanel {
.clone();
return utils::ReversibleIterable::new(entries.iter(), reverse_search)
- .find(|ele| predicate(ele, worktree_id))
+ .find(|ele| predicate(ele.to_ref(), worktree_id))
.cloned();
}
let worktree = self.project.read(cx).worktree_for_id(worktree_id, cx)?;
worktree.update(cx, |tree, _| {
- utils::ReversibleIterable::new(tree.entries(true, 0usize), reverse_search)
- .find_single_ended(|ele| predicate(ele, worktree_id))
- .cloned()
+ utils::ReversibleIterable::new(
+ tree.entries(true, 0usize).with_git_statuses(),
+ reverse_search,
+ )
+ .find_single_ended(|ele| predicate(*ele, worktree_id))
+ .map(|ele| ele.to_owned())
})
}
@@ -2984,7 +2991,7 @@ impl ProjectPanel {
&self,
start: Option<&SelectedEntry>,
reverse_search: bool,
- predicate: impl Fn(&Entry, WorktreeId) -> bool,
+ predicate: impl Fn(GitEntryRef, WorktreeId) -> bool,
cx: &mut ViewContext<Self>,
) -> Option<SelectedEntry> {
let mut worktree_ids: Vec<_> = self
@@ -3006,7 +3013,9 @@ impl ProjectPanel {
let root_entry = tree.root_entry()?;
let tree_id = tree.id();
- let mut first_iter = tree.traverse_from_path(true, true, true, entry.path.as_ref());
+ let mut first_iter = tree
+ .traverse_from_path(true, true, true, entry.path.as_ref())
+ .with_git_statuses();
if reverse_search {
first_iter.next();
@@ -3014,25 +3023,25 @@ impl ProjectPanel {
let first = first_iter
.enumerate()
- .take_until(|(count, ele)| *ele == root_entry && *count != 0usize)
- .map(|(_, ele)| ele)
- .find(|ele| predicate(ele, tree_id))
- .cloned();
+ .take_until(|(count, entry)| entry.entry == root_entry && *count != 0usize)
+ .map(|(_, entry)| entry)
+ .find(|ele| predicate(*ele, tree_id))
+ .map(|ele| ele.to_owned());
- let second_iter = tree.entries(true, 0usize);
+ let second_iter = tree.entries(true, 0usize).with_git_statuses();
let second = if reverse_search {
second_iter
.take_until(|ele| ele.id == start.entry_id)
- .filter(|ele| predicate(ele, tree_id))
+ .filter(|ele| predicate(*ele, tree_id))
.last()
- .cloned()
+ .map(|ele| ele.to_owned())
} else {
second_iter
.take_while(|ele| ele.id != start.entry_id)
- .filter(|ele| predicate(ele, tree_id))
+ .filter(|ele| predicate(*ele, tree_id))
.last()
- .cloned()
+ .map(|ele| ele.to_owned())
};
if reverse_search {
@@ -3089,7 +3098,7 @@ impl ProjectPanel {
&self,
start: Option<&SelectedEntry>,
reverse_search: bool,
- predicate: impl Fn(&Entry, WorktreeId) -> bool,
+ predicate: impl Fn(GitEntryRef, WorktreeId) -> bool,
cx: &mut ViewContext<Self>,
) -> Option<SelectedEntry> {
let mut worktree_ids: Vec<_> = self
@@ -3131,8 +3140,8 @@ impl ProjectPanel {
)
};
- let first_search = first_iter.find(|ele| predicate(ele, start.worktree_id));
- let second_search = second_iter.find(|ele| predicate(ele, start.worktree_id));
+ let first_search = first_iter.find(|ele| predicate(ele.to_ref(), start.worktree_id));
+ let second_search = second_iter.find(|ele| predicate(ele.to_ref(), start.worktree_id));
if first_search.is_some() {
return first_search.map(|entry| SelectedEntry {
@@ -1768,7 +1768,7 @@ message Entry {
bool is_ignored = 7;
bool is_external = 8;
reserved 6;
- optional GitStatus git_status = 9;
+ reserved 9;
bool is_fifo = 10;
optional uint64 size = 11;
optional string canonical_path = 12;
@@ -1777,6 +1777,8 @@ message Entry {
message RepositoryEntry {
uint64 work_directory_id = 1;
optional string branch = 2;
+ repeated StatusEntry updated_statuses = 3;
+ repeated string removed_statuses = 4;
}
message StatusEntry {
@@ -1788,6 +1790,7 @@ enum GitStatus {
Added = 0;
Modified = 1;
Conflict = 2;
+ Deleted = 3;
}
message BufferState {
@@ -20,6 +20,7 @@ use serde_json::json;
use settings::{initial_server_settings_content, Settings, SettingsLocation, SettingsStore};
use smol::stream::StreamExt;
use std::{
+ collections::HashSet,
path::{Path, PathBuf},
sync::Arc,
};
@@ -1150,6 +1151,10 @@ async fn test_remote_git_branches(cx: &mut TestAppContext, server_cx: &mut TestA
let (project, headless_project) = init_test(&fs, cx, server_cx).await;
let branches = ["main", "dev", "feature-1"];
+ let branches_set = branches
+ .iter()
+ .map(ToString::to_string)
+ .collect::<HashSet<_>>();
fs.insert_branches(Path::new("/code/project1/.git"), &branches);
let (worktree, _) = project
@@ -1173,10 +1178,10 @@ async fn test_remote_git_branches(cx: &mut TestAppContext, server_cx: &mut TestA
let remote_branches = remote_branches
.into_iter()
- .map(|branch| branch.name)
- .collect::<Vec<_>>();
+ .map(|branch| branch.name.to_string())
+ .collect::<HashSet<_>>();
- assert_eq!(&remote_branches, &branches);
+ assert_eq!(&remote_branches, &branches_set);
cx.update(|cx| {
project.update(cx, |project, cx| {
@@ -39,7 +39,7 @@ pub async fn launch_remote_kernel(
let kernel_launch_request = KernelLaunchRequest {
name: kernel_name.to_string(),
// Note: since the path we have locally may not be the same as the one on the remote server,
- // we don't send it. We'll have to evaluate this decisiion along the way.
+ // we don't send it. We'll have to evaluate this decision along the way.
path: None,
};
@@ -5,7 +5,7 @@ use collections::HashMap;
// for those users.
//
// The way macOS solves this problem is to move shortcuts around so that they are all reachable,
-// even if the mnemoic changes. https://developer.apple.com/documentation/swiftui/keyboardshortcut/localization-swift.struct
+// even if the mnemonic changes. https://developer.apple.com/documentation/swiftui/keyboardshortcut/localization-swift.struct
//
// For example, cmd-> is the "switch window" shortcut because the > key is right above tab.
// To ensure this doesn't cause problems for shortcuts defined for a QWERTY layout, apple moves
@@ -9,6 +9,15 @@ struct StackEntry<'a, T: Item, D> {
position: D,
}
+impl<'a, T: Item + fmt::Debug, D: fmt::Debug> fmt::Debug for StackEntry<'a, T, D> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_struct("StackEntry")
+ .field("index", &self.index)
+ .field("position", &self.position)
+ .finish()
+ }
+}
+
#[derive(Clone)]
pub struct Cursor<'a, T: Item, D> {
tree: &'a SumTree<T>,
@@ -18,6 +27,21 @@ pub struct Cursor<'a, T: Item, D> {
at_end: bool,
}
+impl<'a, T: Item + fmt::Debug, D: fmt::Debug> fmt::Debug for Cursor<'a, T, D>
+where
+ T::Summary: fmt::Debug,
+{
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_struct("Cursor")
+ .field("tree", &self.tree)
+ .field("stack", &self.stack)
+ .field("position", &self.position)
+ .field("did_seek", &self.did_seek)
+ .field("at_end", &self.at_end)
+ .finish()
+ }
+}
+
pub struct Iter<'a, T: Item> {
tree: &'a SumTree<T>,
stack: ArrayVec<StackEntry<'a, T, ()>, 16>,
@@ -60,6 +84,7 @@ where
}
}
+ /// Item is None, when the list is empty, or this cursor is at the end of the list.
#[track_caller]
pub fn item(&self) -> Option<&'a T> {
self.assert_did_seek();
@@ -42,6 +42,21 @@ pub trait Summary: Clone {
fn add_summary(&mut self, summary: &Self, cx: &Self::Context);
}
+/// This type exists because we can't implement Summary for () without causing
+/// type resolution errors
+#[derive(Copy, Clone, PartialEq, Eq, Debug)]
+pub struct Unit;
+
+impl Summary for Unit {
+ type Context = ();
+
+ fn zero(_: &()) -> Self {
+ Unit
+ }
+
+ fn add_summary(&mut self, _: &Self, _: &()) {}
+}
+
/// Each [`Summary`] type can have more than one [`Dimension`] type that it measures.
///
/// You can use dimensions to seek to a specific location in the [`SumTree`]
@@ -761,6 +776,55 @@ impl<T: KeyedItem> SumTree<T> {
None
}
}
+
+ #[inline]
+ pub fn contains(&self, key: &T::Key, cx: &<T::Summary as Summary>::Context) -> bool {
+ self.get(key, cx).is_some()
+ }
+
+ pub fn update<F, R>(
+ &mut self,
+ key: &T::Key,
+ cx: &<T::Summary as Summary>::Context,
+ f: F,
+ ) -> Option<R>
+ where
+ F: FnOnce(&mut T) -> R,
+ {
+ let mut cursor = self.cursor::<T::Key>(cx);
+ let mut new_tree = cursor.slice(key, Bias::Left, cx);
+ let mut result = None;
+ if Ord::cmp(key, &cursor.end(cx)) == Ordering::Equal {
+ let mut updated = cursor.item().unwrap().clone();
+ result = Some(f(&mut updated));
+ new_tree.push(updated, cx);
+ cursor.next(cx);
+ }
+ new_tree.append(cursor.suffix(cx), cx);
+ drop(cursor);
+ *self = new_tree;
+ result
+ }
+
+ pub fn retain<F: FnMut(&T) -> bool>(
+ &mut self,
+ cx: &<T::Summary as Summary>::Context,
+ mut predicate: F,
+ ) {
+ let mut new_map = SumTree::new(cx);
+
+ let mut cursor = self.cursor::<T::Key>(cx);
+ cursor.next(cx);
+ while let Some(item) = cursor.item() {
+ if predicate(&item) {
+ new_map.push(item.clone(), cx);
+ }
+ cursor.next(cx);
+ }
+ drop(cursor);
+
+ *self = new_map;
+ }
}
impl<T, S> Default for SumTree<T>
@@ -358,13 +358,14 @@ impl PickerDelegate for TabSwitcherDelegate {
.item
.project_path(cx)
.as_ref()
- .and_then(|path| self.project.read(cx).entry_for_path(path, cx))
- .map(|entry| {
- entry_git_aware_label_color(
- entry.git_status,
- entry.is_ignored,
- selected,
- )
+ .and_then(|path| {
+ let project = self.project.read(cx);
+ let entry = project.entry_for_path(path, cx)?;
+ let git_status = project.project_path_git_status(path, cx);
+ Some((entry, git_status))
+ })
+ .map(|(entry, git_status)| {
+ entry_git_aware_label_color(git_status, entry.is_ignored, selected)
})
})
.flatten();
@@ -60,7 +60,7 @@ pub struct SpawnInTerminal {
pub show_command: bool,
}
-/// A final form of the [`TaskTemplate`], that got resolved with a particualar [`TaskContext`] and now is ready to spawn the actual task.
+/// A final form of the [`TaskTemplate`], that got resolved with a particular [`TaskContext`] and now is ready to spawn the actual task.
#[derive(Clone, Debug, PartialEq, Eq)]
pub struct ResolvedTask {
/// A way to distinguish tasks produced by the same template, but different contexts.
@@ -143,13 +143,13 @@ impl TaskTemplate {
let truncated_variables = truncate_variables(&task_variables);
let cwd = match self.cwd.as_deref() {
Some(cwd) => {
- let substitured_cwd = substitute_all_template_variables_in_str(
+ let substituted_cwd = substitute_all_template_variables_in_str(
cwd,
&task_variables,
&variable_names,
&mut substituted_variables,
)?;
- Some(PathBuf::from(substitured_cwd))
+ Some(PathBuf::from(substituted_cwd))
}
None => None,
}
@@ -100,7 +100,7 @@ impl TerminalPanel {
terminal_panel
}
- pub fn asssistant_enabled(&mut self, enabled: bool, cx: &mut ViewContext<Self>) {
+ pub fn set_assistant_enabled(&mut self, enabled: bool, cx: &mut ViewContext<Self>) {
self.assistant_enabled = enabled;
if enabled {
let focus_handle = self
@@ -270,7 +270,7 @@ pub struct ThemeColorsContent {
/// Fill Color. Used for the muted or deemphasized fill color of an icon.
///
- /// This might be used to show an icon in an inactive pane, or to demphasize a series of icons to give them less visual weight.
+ /// This might be used to show an icon in an inactive pane, or to deemphasize a series of icons to give them less visual weight.
#[serde(rename = "icon.muted")]
pub icon_muted: Option<String>,
@@ -97,7 +97,7 @@ pub struct ThemeColors {
pub icon: Hsla,
/// Fill Color. Used for the muted or deemphasized fill color of an icon.
///
- /// This might be used to show an icon in an inactive pane, or to demphasize a series of icons to give them less visual weight.
+ /// This might be used to show an icon in an inactive pane, or to deemphasize a series of icons to give them less visual weight.
pub icon_muted: Hsla,
/// Fill Color. Used for the disabled fill color of an icon.
///
@@ -71,7 +71,7 @@ impl From<WindowAppearance> for Appearance {
}
}
-/// Which themes should be loaded. This is used primarlily for testing.
+/// Which themes should be loaded. This is used primarily for testing.
pub enum LoadThemes {
/// Only load the base theme.
///
@@ -21,7 +21,7 @@ use gpui::{
Interactivity, IntoElement, Model, MouseButton, ParentElement, Render, Stateful,
StatefulInteractiveElement, Styled, Subscription, View, ViewContext, VisualContext, WeakView,
};
-use project::{Project, RepositoryEntry};
+use project::Project;
use rpc::proto;
use settings::Settings as _;
use smallvec::SmallVec;
@@ -487,7 +487,7 @@ impl TitleBar {
let workspace = self.workspace.upgrade()?;
let branch_name = entry
.as_ref()
- .and_then(RepositoryEntry::branch)
+ .and_then(|entry| entry.branch())
.map(|branch| util::truncate_and_trailoff(&branch, MAX_BRANCH_NAME_LENGTH))?;
Some(
Button::new("project_branch_trigger", branch_name)
@@ -179,7 +179,7 @@ define_connection! {
// group_id: usize, // Primary key for pane_groups
// workspace_id: usize, // References workspaces table
// parent_group_id: Option<usize>, // None indicates that this is the root node
- // position: Optiopn<usize>, // None indicates that this is the root node
+ // position: Option<usize>, // None indicates that this is the root node
// axis: Option<Axis>, // 'Vertical', 'Horizontal'
// flexes: Option<Vec<f32>>, // A JSON array of floats
// )
@@ -21,7 +21,6 @@ use fuzzy::CharBag;
use git::GitHostingProviderRegistry;
use git::{
repository::{GitFileStatus, GitRepository, RepoPath},
- status::GitStatus,
COOKIES, DOT_GIT, FSMONITOR_DAEMON, GITIGNORE,
};
use gpui::{
@@ -30,6 +29,7 @@ use gpui::{
};
use ignore::IgnoreStack;
use language::DiskState;
+
use parking_lot::Mutex;
use paths::local_settings_folder_relative_path;
use postage::{
@@ -53,8 +53,8 @@ use std::{
ffi::OsStr,
fmt,
future::Future,
- mem,
- ops::{AddAssign, Deref, DerefMut, Sub},
+ mem::{self},
+ ops::{Deref, DerefMut},
path::{Path, PathBuf},
pin::Pin,
sync::{
@@ -63,7 +63,9 @@ use std::{
},
time::{Duration, Instant},
};
-use sum_tree::{Bias, Edit, SeekTarget, SumTree, TreeMap, TreeSet};
+use sum_tree::{
+ Bias, Cursor, Edit, KeyedItem, SeekTarget, SumTree, Summary, TreeMap, TreeSet, Unit,
+};
use text::{LineEnding, Rope};
use util::{
paths::{home_dir, PathMatcher, SanitizedPath},
@@ -154,7 +156,7 @@ pub struct Snapshot {
entries_by_path: SumTree<Entry>,
entries_by_id: SumTree<PathEntry>,
always_included_entries: Vec<Arc<Path>>,
- repository_entries: TreeMap<RepositoryWorkDirectory, RepositoryEntry>,
+ repositories: SumTree<RepositoryEntry>,
/// A number that increases every time the worktree begins scanning
/// a set of paths from the filesystem. This scanning could be caused
@@ -171,8 +173,133 @@ pub struct Snapshot {
#[derive(Clone, Debug, PartialEq, Eq)]
pub struct RepositoryEntry {
- pub(crate) work_directory: WorkDirectoryEntry,
+ /// The git status entries for this repository.
+ /// Note that the paths on this repository are relative to the git work directory.
+ /// If the .git folder is external to Zed, these paths will be relative to that folder,
+ /// and this data structure might reference files external to this worktree.
+ ///
+ /// For example:
+ ///
+ /// my_root_folder/ <-- repository root
+ /// .git
+ /// my_sub_folder_1/
+ /// project_root/ <-- Project root, Zed opened here
+ /// changed_file_1 <-- File with changes, in worktree
+ /// my_sub_folder_2/
+ /// changed_file_2 <-- File with changes, out of worktree
+ /// ...
+ ///
+ /// With this setup, this field would contain 2 entries, like so:
+ /// - my_sub_folder_1/project_root/changed_file_1
+ /// - my_sub_folder_2/changed_file_2
+ pub(crate) statuses_by_path: SumTree<StatusEntry>,
+ pub(crate) work_directory_id: ProjectEntryId,
+ pub(crate) work_directory: WorkDirectory,
pub(crate) branch: Option<Arc<str>>,
+}
+
+impl Deref for RepositoryEntry {
+ type Target = WorkDirectory;
+
+ fn deref(&self) -> &Self::Target {
+ &self.work_directory
+ }
+}
+
+impl AsRef<Path> for RepositoryEntry {
+ fn as_ref(&self) -> &Path {
+ &self.path
+ }
+}
+
+impl RepositoryEntry {
+ pub fn branch(&self) -> Option<Arc<str>> {
+ self.branch.clone()
+ }
+
+ pub fn work_directory_id(&self) -> ProjectEntryId {
+ self.work_directory_id
+ }
+
+ pub fn status(&self) -> impl Iterator<Item = StatusEntry> + '_ {
+ self.statuses_by_path.iter().cloned()
+ }
+
+ pub fn initial_update(&self) -> proto::RepositoryEntry {
+ proto::RepositoryEntry {
+ work_directory_id: self.work_directory_id.to_proto(),
+ branch: self.branch.as_ref().map(|branch| branch.to_string()),
+ updated_statuses: self
+ .statuses_by_path
+ .iter()
+ .map(|entry| proto::StatusEntry {
+ repo_path: entry.repo_path.to_string_lossy().to_string(),
+ status: git_status_to_proto(entry.status),
+ })
+ .collect(),
+ removed_statuses: Default::default(),
+ }
+ }
+
+ pub fn build_update(&self, old: &Self) -> proto::RepositoryEntry {
+ let mut updated_statuses: Vec<proto::StatusEntry> = Vec::new();
+ let mut removed_statuses: Vec<String> = Vec::new();
+
+ let mut new_statuses = self.statuses_by_path.iter().peekable();
+ let mut old_statuses = old.statuses_by_path.iter().peekable();
+
+ let mut current_new_entry = new_statuses.next();
+ let mut current_old_entry = old_statuses.next();
+ loop {
+ match (current_new_entry, current_old_entry) {
+ (Some(new_entry), Some(old_entry)) => {
+ match new_entry.repo_path.cmp(&old_entry.repo_path) {
+ Ordering::Less => {
+ updated_statuses.push(new_entry.to_proto());
+ current_new_entry = new_statuses.next();
+ }
+ Ordering::Equal => {
+ if new_entry.status != old_entry.status {
+ updated_statuses.push(new_entry.to_proto());
+ }
+ current_old_entry = old_statuses.next();
+ current_new_entry = new_statuses.next();
+ }
+ Ordering::Greater => {
+ removed_statuses.push(old_entry.repo_path.to_proto());
+ current_old_entry = old_statuses.next();
+ }
+ }
+ }
+ (None, Some(old_entry)) => {
+ removed_statuses.push(old_entry.repo_path.to_proto());
+ current_old_entry = old_statuses.next();
+ }
+ (Some(new_entry), None) => {
+ updated_statuses.push(new_entry.to_proto());
+ current_new_entry = new_statuses.next();
+ }
+ (None, None) => break,
+ }
+ }
+
+ proto::RepositoryEntry {
+ work_directory_id: self.work_directory_id.to_proto(),
+ branch: self.branch.as_ref().map(|branch| branch.to_string()),
+ updated_statuses,
+ removed_statuses,
+ }
+ }
+}
+
+/// This path corresponds to the 'content path' of a repository in relation
+/// to Zed's project root.
+/// In the majority of the cases, this is the folder that contains the .git folder.
+/// But if a sub-folder of a git repository is opened, this corresponds to the
+/// project root and the .git folder is located in a parent directory.
+#[derive(Clone, Debug, Ord, PartialOrd, Eq, PartialEq, Hash)]
+pub struct WorkDirectory {
+ path: Arc<Path>,
/// If location_in_repo is set, it means the .git folder is external
/// and in a parent folder of the project root.
@@ -195,23 +322,19 @@ pub struct RepositoryEntry {
pub(crate) location_in_repo: Option<Arc<Path>>,
}
-impl RepositoryEntry {
- pub fn branch(&self) -> Option<Arc<str>> {
- self.branch.clone()
- }
-
- pub fn work_directory_id(&self) -> ProjectEntryId {
- *self.work_directory
- }
-
- pub fn work_directory(&self, snapshot: &Snapshot) -> Option<RepositoryWorkDirectory> {
- snapshot
- .entry_for_id(self.work_directory_id())
- .map(|entry| RepositoryWorkDirectory(entry.path.clone()))
+impl WorkDirectory {
+ pub fn path_key(&self) -> PathKey {
+ PathKey(self.path.clone())
}
- pub fn build_update(&self, _: &Self) -> proto::RepositoryEntry {
- self.into()
+ /// Returns true if the given path is a child of the work directory.
+ ///
+ /// Note that the path may not be a member of this repository, if there
+ /// is a repository in a directory between these two paths
+ /// external .git folder in a parent folder of the project root.
+ pub fn directory_contains(&self, path: impl AsRef<Path>) -> bool {
+ let path = path.as_ref();
+ path.starts_with(&self.path)
}
/// relativize returns the given project path relative to the root folder of the
@@ -219,53 +342,50 @@ impl RepositoryEntry {
/// If the root of the repository (and its .git folder) are located in a parent folder
/// of the project root folder, then the returned RepoPath is relative to the root
/// of the repository and not a valid path inside the project.
- pub fn relativize(&self, worktree: &Snapshot, path: &Path) -> Result<RepoPath> {
- let relativize_path = |path: &Path| {
- let entry = worktree
- .entry_for_id(self.work_directory.0)
- .ok_or_else(|| anyhow!("entry not found"))?;
-
+ pub fn relativize(&self, path: &Path) -> Result<RepoPath> {
+ if let Some(location_in_repo) = &self.location_in_repo {
+ Ok(location_in_repo.join(path).into())
+ } else {
let relativized_path = path
- .strip_prefix(&entry.path)
- .map_err(|_| anyhow!("could not relativize {:?} against {:?}", path, entry.path))?;
+ .strip_prefix(&self.path)
+ .map_err(|_| anyhow!("could not relativize {:?} against {:?}", path, self.path))?;
Ok(relativized_path.into())
- };
+ }
+ }
- if let Some(location_in_repo) = &self.location_in_repo {
- relativize_path(&location_in_repo.join(path))
+ /// This is the opposite operation to `relativize` above
+ pub fn unrelativize(&self, path: &RepoPath) -> Option<Arc<Path>> {
+ if let Some(location) = &self.location_in_repo {
+ // If we fail to strip the prefix, that means this status entry is
+ // external to this worktree, and we definitely won't have an entry_id
+ path.strip_prefix(location).ok().map(Into::into)
} else {
- relativize_path(path)
+ Some(self.path.join(path).into())
}
}
}
-impl From<&RepositoryEntry> for proto::RepositoryEntry {
- fn from(value: &RepositoryEntry) -> Self {
- proto::RepositoryEntry {
- work_directory_id: value.work_directory.to_proto(),
- branch: value.branch.as_ref().map(|str| str.to_string()),
+impl Default for WorkDirectory {
+ fn default() -> Self {
+ Self {
+ path: Arc::from(Path::new("")),
+ location_in_repo: None,
}
}
}
-/// This path corresponds to the 'content path' of a repository in relation
-/// to Zed's project root.
-/// In the majority of the cases, this is the folder that contains the .git folder.
-/// But if a sub-folder of a git repository is opened, this corresponds to the
-/// project root and the .git folder is located in a parent directory.
-#[derive(Clone, Debug, Ord, PartialOrd, Eq, PartialEq)]
-pub struct RepositoryWorkDirectory(pub(crate) Arc<Path>);
+impl Deref for WorkDirectory {
+ type Target = Path;
-impl Default for RepositoryWorkDirectory {
- fn default() -> Self {
- RepositoryWorkDirectory(Arc::from(Path::new("")))
+ fn deref(&self) -> &Self::Target {
+ self.as_ref()
}
}
-impl AsRef<Path> for RepositoryWorkDirectory {
+impl AsRef<Path> for WorkDirectory {
fn as_ref(&self) -> &Path {
- self.0.as_ref()
+ self.path.as_ref()
}
}
@@ -317,7 +437,9 @@ struct BackgroundScannerState {
#[derive(Debug, Clone)]
pub struct LocalRepositoryEntry {
+ pub(crate) work_directory: WorkDirectory,
pub(crate) git_dir_scan_id: usize,
+ pub(crate) status_scan_id: usize,
pub(crate) repo_ptr: Arc<dyn GitRepository>,
/// Absolute path to the actual .git folder.
/// Note: if .git is a file, this points to the folder indicated by the .git file
@@ -326,12 +448,39 @@ pub struct LocalRepositoryEntry {
pub(crate) dot_git_worktree_abs_path: Option<Arc<Path>>,
}
+impl sum_tree::Item for LocalRepositoryEntry {
+ type Summary = PathSummary<Unit>;
+
+ fn summary(&self, _: &<Self::Summary as Summary>::Context) -> Self::Summary {
+ PathSummary {
+ max_path: self.work_directory.path.clone(),
+ item_summary: Unit,
+ }
+ }
+}
+
+impl KeyedItem for LocalRepositoryEntry {
+ type Key = PathKey;
+
+ fn key(&self) -> Self::Key {
+ PathKey(self.work_directory.path.clone())
+ }
+}
+
impl LocalRepositoryEntry {
pub fn repo(&self) -> &Arc<dyn GitRepository> {
&self.repo_ptr
}
}
+impl Deref for LocalRepositoryEntry {
+ type Target = WorkDirectory;
+
+ fn deref(&self) -> &Self::Target {
+ &self.work_directory
+ }
+}
+
impl Deref for LocalSnapshot {
type Target = Snapshot;
@@ -716,9 +865,9 @@ impl Worktree {
let snapshot = this.snapshot();
cx.background_executor().spawn(async move {
if let Some(repo) = snapshot.repository_for_path(&path) {
- if let Some(repo_path) = repo.relativize(&snapshot, &path).log_err() {
+ if let Some(repo_path) = repo.relativize(&path).log_err() {
if let Some(git_repo) =
- snapshot.git_repositories.get(&*repo.work_directory)
+ snapshot.git_repositories.get(&repo.work_directory_id)
{
return Ok(git_repo.repo_ptr.load_index_text(&repo_path));
}
@@ -1215,6 +1364,7 @@ impl LocalWorktree {
let mut changes = Vec::new();
let mut old_repos = old_snapshot.git_repositories.iter().peekable();
let mut new_repos = new_snapshot.git_repositories.iter().peekable();
+
loop {
match (new_repos.peek().map(clone), old_repos.peek().map(clone)) {
(Some((new_entry_id, new_repo)), Some((old_entry_id, old_repo))) => {
@@ -1231,11 +1381,13 @@ impl LocalWorktree {
new_repos.next();
}
Ordering::Equal => {
- if new_repo.git_dir_scan_id != old_repo.git_dir_scan_id {
+ if new_repo.git_dir_scan_id != old_repo.git_dir_scan_id
+ || new_repo.status_scan_id != old_repo.status_scan_id
+ {
if let Some(entry) = new_snapshot.entry_for_id(new_entry_id) {
let old_repo = old_snapshot
- .repository_entries
- .get(&RepositoryWorkDirectory(entry.path.clone()))
+ .repositories
+ .get(&PathKey(entry.path.clone()), &())
.cloned();
changes.push((
entry.path.clone(),
@@ -1251,8 +1403,8 @@ impl LocalWorktree {
Ordering::Greater => {
if let Some(entry) = old_snapshot.entry_for_id(old_entry_id) {
let old_repo = old_snapshot
- .repository_entries
- .get(&RepositoryWorkDirectory(entry.path.clone()))
+ .repositories
+ .get(&PathKey(entry.path.clone()), &())
.cloned();
changes.push((
entry.path.clone(),
@@ -1279,8 +1431,8 @@ impl LocalWorktree {
(None, Some((entry_id, _))) => {
if let Some(entry) = old_snapshot.entry_for_id(entry_id) {
let old_repo = old_snapshot
- .repository_entries
- .get(&RepositoryWorkDirectory(entry.path.clone()))
+ .repositories
+ .get(&PathKey(entry.path.clone()), &())
.cloned();
changes.push((
entry.path.clone(),
@@ -1325,12 +1477,12 @@ impl LocalWorktree {
}
pub fn local_git_repo(&self, path: &Path) -> Option<Arc<dyn GitRepository>> {
- self.repo_for_path(path)
- .map(|(_, entry)| entry.repo_ptr.clone())
+ self.local_repo_for_path(path)
+ .map(|local_repo| local_repo.repo_ptr.clone())
}
pub fn get_local_repo(&self, repo: &RepositoryEntry) -> Option<&LocalRepositoryEntry> {
- self.git_repositories.get(&repo.work_directory.0)
+ self.git_repositories.get(&repo.work_directory_id)
}
fn load_binary_file(
@@ -2086,7 +2238,7 @@ impl Snapshot {
always_included_entries: Default::default(),
entries_by_path: Default::default(),
entries_by_id: Default::default(),
- repository_entries: Default::default(),
+ repositories: Default::default(),
scan_id: 1,
completed_scan_id: 0,
}
@@ -2121,9 +2273,9 @@ impl Snapshot {
updated_entries.sort_unstable_by_key(|e| e.id);
let mut updated_repositories = self
- .repository_entries
- .values()
- .map(proto::RepositoryEntry::from)
+ .repositories
+ .iter()
+ .map(|repository| repository.initial_update())
.collect::<Vec<_>>();
updated_repositories.sort_unstable_by_key(|e| e.work_directory_id);
@@ -2186,7 +2338,7 @@ impl Snapshot {
self.entries_by_path = {
let mut cursor = self.entries_by_path.cursor::<TraversalProgress>(&());
let mut new_entries_by_path =
- cursor.slice(&TraversalTarget::Path(&removed_entry.path), Bias::Left, &());
+ cursor.slice(&TraversalTarget::path(&removed_entry.path), Bias::Left, &());
while let Some(entry) = cursor.item() {
if entry.path.starts_with(&removed_entry.path) {
self.entries_by_id.remove(&entry.id, &());
@@ -2202,12 +2354,14 @@ impl Snapshot {
Some(removed_entry.path)
}
- #[cfg(any(test, feature = "test-support"))]
- pub fn status_for_file(&self, path: impl Into<PathBuf>) -> Option<GitFileStatus> {
- let path = path.into();
- self.entries_by_path
- .get(&PathKey(Arc::from(path)), &())
- .and_then(|entry| entry.git_status)
+ pub fn status_for_file(&self, path: impl AsRef<Path>) -> Option<GitFileStatus> {
+ let path = path.as_ref();
+ self.repository_for_path(path).and_then(|repo| {
+ let repo_path = repo.relativize(path).unwrap();
+ repo.statuses_by_path
+ .get(&PathKey(repo_path.0), &())
+ .map(|entry| entry.status)
+ })
}
fn update_abs_path(&mut self, abs_path: SanitizedPath, root_name: String) {
@@ -2267,38 +2421,66 @@ impl Snapshot {
self.entries_by_id.edit(entries_by_id_edits, &());
update.removed_repositories.sort_unstable();
- self.repository_entries.retain(|_, entry| {
+ self.repositories.retain(&(), |entry: &RepositoryEntry| {
update
.removed_repositories
- .binary_search(&entry.work_directory.to_proto())
+ .binary_search(&entry.work_directory_id.to_proto())
.is_err()
});
for repository in update.updated_repositories {
- let work_directory_entry: WorkDirectoryEntry =
- ProjectEntryId::from_proto(repository.work_directory_id).into();
-
- if let Some(entry) = self.entry_for_id(*work_directory_entry) {
- let work_directory = RepositoryWorkDirectory(entry.path.clone());
- if self.repository_entries.get(&work_directory).is_some() {
- self.repository_entries.update(&work_directory, |repo| {
- repo.branch = repository.branch.map(Into::into);
- });
+ let work_directory_id = ProjectEntryId::from_proto(repository.work_directory_id);
+ if let Some(work_dir_entry) = self.entry_for_id(work_directory_id) {
+ if self
+ .repositories
+ .contains(&PathKey(work_dir_entry.path.clone()), &())
+ {
+ let edits = repository
+ .removed_statuses
+ .into_iter()
+ .map(|path| Edit::Remove(PathKey(Path::new(&path).into())))
+ .chain(repository.updated_statuses.into_iter().filter_map(
+ |updated_status| {
+ Some(Edit::Insert(updated_status.try_into().log_err()?))
+ },
+ ))
+ .collect::<Vec<_>>();
+
+ self.repositories
+ .update(&PathKey(work_dir_entry.path.clone()), &(), |repo| {
+ repo.branch = repository.branch.map(Into::into);
+ repo.statuses_by_path.edit(edits, &());
+ });
} else {
- self.repository_entries.insert(
- work_directory,
+ let statuses = SumTree::from_iter(
+ repository
+ .updated_statuses
+ .into_iter()
+ .filter_map(|updated_status| updated_status.try_into().log_err()),
+ &(),
+ );
+
+ self.repositories.insert_or_replace(
RepositoryEntry {
- work_directory: work_directory_entry,
+ work_directory_id,
+ work_directory: WorkDirectory {
+ path: work_dir_entry.path.clone(),
+ // When syncing repository entries from a peer, we don't need
+ // the location_in_repo field, since git operations don't happen locally
+ // anyway.
+ location_in_repo: None,
+ },
branch: repository.branch.map(Into::into),
- // When syncing repository entries from a peer, we don't need
- // the location_in_repo field, since git operations don't happen locally
- // anyway.
- location_in_repo: None,
+ statuses_by_path: statuses,
},
- )
+ &(),
+ );
}
} else {
- log::error!("no work directory entry for repository {:?}", repository)
+ log::error!(
+ "no work directory entry for repository {:?}",
+ repository.work_directory_id
+ )
}
}
@@ -2355,6 +2537,7 @@ impl Snapshot {
&(),
);
Traversal {
+ snapshot: self,
cursor,
include_files,
include_dirs,
@@ -2369,13 +2552,7 @@ impl Snapshot {
include_ignored: bool,
path: &Path,
) -> Traversal {
- Traversal::new(
- &self.entries_by_path,
- include_files,
- include_dirs,
- include_ignored,
- path,
- )
+ Traversal::new(self, include_files, include_dirs, include_ignored, path)
}
pub fn files(&self, include_ignored: bool, start: usize) -> Traversal {
@@ -2390,33 +2567,39 @@ impl Snapshot {
self.traverse_from_offset(true, true, include_ignored, start)
}
- pub fn repositories(&self) -> impl Iterator<Item = (&Arc<Path>, &RepositoryEntry)> {
- self.repository_entries
- .iter()
- .map(|(path, entry)| (&path.0, entry))
+ #[cfg(any(feature = "test-support", test))]
+ pub fn git_status(&self, work_dir: &Path) -> Option<Vec<StatusEntry>> {
+ self.repositories
+ .get(&PathKey(work_dir.into()), &())
+ .map(|repo| repo.status().collect())
}
- pub fn repository_for_work_directory(&self, path: &Path) -> Option<RepositoryEntry> {
- self.repository_entries
- .get(&RepositoryWorkDirectory(path.into()))
- .cloned()
+ pub fn repositories(&self) -> impl Iterator<Item = &RepositoryEntry> {
+ self.repositories.iter()
}
- /// Get the repository whose work directory contains the given path.
- pub fn repository_for_path(&self, path: &Path) -> Option<RepositoryEntry> {
- self.repository_and_work_directory_for_path(path)
- .map(|e| e.1)
+ /// Get the repository whose work directory corresponds to the given path.
+ pub(crate) fn repository(&self, work_directory: PathKey) -> Option<RepositoryEntry> {
+ self.repositories.get(&work_directory, &()).cloned()
}
- pub fn repository_and_work_directory_for_path(
- &self,
- path: &Path,
- ) -> Option<(RepositoryWorkDirectory, RepositoryEntry)> {
- self.repository_entries
- .iter()
- .filter(|(workdir_path, _)| path.starts_with(workdir_path))
- .last()
- .map(|(path, repo)| (path.clone(), repo.clone()))
+ /// Get the repository whose work directory contains the given path.
+ pub fn repository_for_path(&self, path: &Path) -> Option<&RepositoryEntry> {
+ let mut cursor = self.repositories.cursor::<PathProgress>(&());
+ let mut repository = None;
+
+ // Git repositories may contain other git repositories. As a side effect of
+ // lexicographic sorting by path, deeper repositories will be after higher repositories
+ // So, let's loop through every matching repository until we can't find any more to find
+ // the deepest repository that could contain this path.
+ while cursor.seek_forward(&PathTarget::Contains(path), Bias::Left, &())
+ && cursor.item().is_some()
+ {
+ repository = cursor.item();
+ cursor.next(&());
+ }
+
+ repository
}
/// Given an ordered iterator of entries, returns an iterator of those entries,
@@ -2425,86 +2608,28 @@ impl Snapshot {
&'a self,
entries: impl 'a + Iterator<Item = &'a Entry>,
) -> impl 'a + Iterator<Item = (&'a Entry, Option<&'a RepositoryEntry>)> {
- let mut containing_repos = Vec::<(&Arc<Path>, &RepositoryEntry)>::new();
+ let mut containing_repos = Vec::<&RepositoryEntry>::new();
let mut repositories = self.repositories().peekable();
entries.map(move |entry| {
- while let Some((repo_path, _)) = containing_repos.last() {
- if entry.path.starts_with(repo_path) {
+ while let Some(repository) = containing_repos.last() {
+ if repository.directory_contains(&entry.path) {
break;
} else {
containing_repos.pop();
}
}
- while let Some((repo_path, _)) = repositories.peek() {
- if entry.path.starts_with(repo_path) {
+ while let Some(repository) = repositories.peek() {
+ if repository.directory_contains(&entry.path) {
containing_repos.push(repositories.next().unwrap());
} else {
break;
}
}
- let repo = containing_repos.last().map(|(_, repo)| *repo);
+ let repo = containing_repos.last().copied();
(entry, repo)
})
}
- /// Updates the `git_status` of the given entries such that files'
- /// statuses bubble up to their ancestor directories.
- pub fn propagate_git_statuses(&self, result: &mut [Entry]) {
- let mut cursor = self
- .entries_by_path
- .cursor::<(TraversalProgress, GitStatuses)>(&());
- let mut entry_stack = Vec::<(usize, GitStatuses)>::new();
-
- let mut result_ix = 0;
- loop {
- let next_entry = result.get(result_ix);
- let containing_entry = entry_stack.last().map(|(ix, _)| &result[*ix]);
-
- let entry_to_finish = match (containing_entry, next_entry) {
- (Some(_), None) => entry_stack.pop(),
- (Some(containing_entry), Some(next_path)) => {
- if next_path.path.starts_with(&containing_entry.path) {
- None
- } else {
- entry_stack.pop()
- }
- }
- (None, Some(_)) => None,
- (None, None) => break,
- };
-
- if let Some((entry_ix, prev_statuses)) = entry_to_finish {
- cursor.seek_forward(
- &TraversalTarget::PathSuccessor(&result[entry_ix].path),
- Bias::Left,
- &(),
- );
-
- let statuses = cursor.start().1 - prev_statuses;
-
- result[entry_ix].git_status = if statuses.conflict > 0 {
- Some(GitFileStatus::Conflict)
- } else if statuses.modified > 0 {
- Some(GitFileStatus::Modified)
- } else if statuses.added > 0 {
- Some(GitFileStatus::Added)
- } else {
- None
- };
- } else {
- if result[result_ix].is_dir() {
- cursor.seek_forward(
- &TraversalTarget::Path(&result[result_ix].path),
- Bias::Left,
- &(),
- );
- entry_stack.push((result_ix, cursor.start().1));
- }
- result_ix += 1;
- }
- }
- }
-
pub fn paths(&self) -> impl Iterator<Item = &Arc<Path>> {
let empty_path = Path::new("");
self.entries_by_path
@@ -2515,8 +2640,9 @@ impl Snapshot {
pub fn child_entries<'a>(&'a self, parent_path: &'a Path) -> ChildEntriesIter<'a> {
let mut cursor = self.entries_by_path.cursor(&());
- cursor.seek(&TraversalTarget::Path(parent_path), Bias::Right, &());
+ cursor.seek(&TraversalTarget::path(parent_path), Bias::Right, &());
let traversal = Traversal {
+ snapshot: self,
cursor,
include_files: true,
include_dirs: true,
@@ -2543,19 +2669,19 @@ impl Snapshot {
}
pub fn root_git_entry(&self) -> Option<RepositoryEntry> {
- self.repository_entries
- .get(&RepositoryWorkDirectory(Path::new("").into()))
+ self.repositories
+ .get(&PathKey(Path::new("").into()), &())
.map(|entry| entry.to_owned())
}
pub fn git_entry(&self, work_directory_path: Arc<Path>) -> Option<RepositoryEntry> {
- self.repository_entries
- .get(&RepositoryWorkDirectory(work_directory_path))
+ self.repositories
+ .get(&PathKey(work_directory_path), &())
.map(|entry| entry.to_owned())
}
pub fn git_entries(&self) -> impl Iterator<Item = &RepositoryEntry> {
- self.repository_entries.values()
+ self.repositories.iter()
}
pub fn scan_id(&self) -> usize {
@@ -2586,10 +2712,10 @@ impl Snapshot {
}
impl LocalSnapshot {
- pub fn repo_for_path(&self, path: &Path) -> Option<(RepositoryEntry, &LocalRepositoryEntry)> {
- let (_, repo_entry) = self.repository_and_work_directory_for_path(path)?;
- let work_directory_id = repo_entry.work_directory_id();
- Some((repo_entry, self.git_repositories.get(&work_directory_id)?))
+ pub fn local_repo_for_path(&self, path: &Path) -> Option<&LocalRepositoryEntry> {
+ let repository_entry = self.repository_for_path(path)?;
+ let work_directory_id = repository_entry.work_directory_id();
+ self.git_repositories.get(&work_directory_id)
}
fn build_update(
@@ -2613,18 +2739,16 @@ impl LocalSnapshot {
}
for (work_dir_path, change) in repo_changes.iter() {
- let new_repo = self
- .repository_entries
- .get(&RepositoryWorkDirectory(work_dir_path.clone()));
+ let new_repo = self.repositories.get(&PathKey(work_dir_path.clone()), &());
match (&change.old_repository, new_repo) {
(Some(old_repo), Some(new_repo)) => {
updated_repositories.push(new_repo.build_update(old_repo));
}
(None, Some(new_repo)) => {
- updated_repositories.push(proto::RepositoryEntry::from(new_repo));
+ updated_repositories.push(new_repo.initial_update());
}
(Some(old_repo), None) => {
- removed_repositories.push(old_repo.work_directory.0.to_proto());
+ removed_repositories.push(old_repo.work_directory_id.to_proto());
}
_ => {}
}
@@ -2827,15 +2951,15 @@ impl LocalSnapshot {
.map(|repo| repo.1.dot_git_dir_abs_path.clone())
.collect::<HashSet<_>>();
let work_dir_paths = self
- .repository_entries
+ .repositories
.iter()
- .map(|repo| repo.0.clone().0)
+ .map(|repo| repo.work_directory.path.clone())
.collect::<HashSet<_>>();
assert_eq!(dotgit_paths.len(), work_dir_paths.len());
- assert_eq!(self.repository_entries.iter().count(), work_dir_paths.len());
+ assert_eq!(self.repositories.iter().count(), work_dir_paths.len());
assert_eq!(self.git_repositories.iter().count(), work_dir_paths.len());
- for (_, entry) in self.repository_entries.iter() {
- self.git_repositories.get(&entry.work_directory).unwrap();
+ for entry in self.repositories.iter() {
+ self.git_repositories.get(&entry.work_directory_id).unwrap();
}
}
@@ -2872,23 +2996,7 @@ impl BackgroundScannerState {
let path = entry.path.clone();
let ignore_stack = self.snapshot.ignore_stack_for_abs_path(&abs_path, true);
let mut ancestor_inodes = self.snapshot.ancestor_inodes_for_path(&path);
- let mut containing_repository = None;
- if !ignore_stack.is_abs_path_ignored(&abs_path, true) {
- if let Some((repo_entry, repo)) = self.snapshot.repo_for_path(&path) {
- if let Some(workdir_path) = repo_entry.work_directory(&self.snapshot) {
- if let Ok(repo_path) = repo_entry.relativize(&self.snapshot, &path) {
- containing_repository = Some(ScanJobContainingRepository {
- work_directory: workdir_path,
- statuses: repo
- .repo_ptr
- .status(&[repo_path.0])
- .log_err()
- .unwrap_or_default(),
- });
- }
- }
- }
- }
+
if !ancestor_inodes.contains(&entry.inode) {
ancestor_inodes.insert(entry.inode);
scan_job_tx
@@ -2899,7 +3007,6 @@ impl BackgroundScannerState {
scan_queue: scan_job_tx.clone(),
ancestor_inodes,
is_external: entry.is_external,
- containing_repository,
})
.unwrap();
}
@@ -3006,8 +3113,8 @@ impl BackgroundScannerState {
.snapshot
.entries_by_path
.cursor::<TraversalProgress>(&());
- new_entries = cursor.slice(&TraversalTarget::Path(path), Bias::Left, &());
- removed_entries = cursor.slice(&TraversalTarget::PathSuccessor(path), Bias::Left, &());
+ new_entries = cursor.slice(&TraversalTarget::path(path), Bias::Left, &());
+ removed_entries = cursor.slice(&TraversalTarget::successor(path), Bias::Left, &());
new_entries.append(cursor.suffix(&()), &());
}
self.snapshot.entries_by_path = new_entries;
@@ -3053,9 +3160,9 @@ impl BackgroundScannerState {
self.snapshot
.git_repositories
.retain(|id, _| removed_ids.binary_search(id).is_err());
- self.snapshot
- .repository_entries
- .retain(|repo_path, _| !repo_path.0.starts_with(path));
+ self.snapshot.repositories.retain(&(), |repository| {
+ !repository.work_directory.starts_with(path)
+ });
#[cfg(test)]
self.snapshot.check_invariants(false);
@@ -3066,7 +3173,7 @@ impl BackgroundScannerState {
dot_git_path: Arc<Path>,
fs: &dyn Fs,
watcher: &dyn Watcher,
- ) -> Option<(RepositoryWorkDirectory, Arc<dyn GitRepository>)> {
+ ) -> Option<LocalRepositoryEntry> {
let work_dir_path: Arc<Path> = match dot_git_path.parent() {
Some(parent_dir) => {
// Guard against repositories inside the repository metadata
@@ -3102,7 +3209,7 @@ impl BackgroundScannerState {
location_in_repo: Option<Arc<Path>>,
fs: &dyn Fs,
watcher: &dyn Watcher,
- ) -> Option<(RepositoryWorkDirectory, Arc<dyn GitRepository>)> {
+ ) -> Option<LocalRepositoryEntry> {
let work_dir_id = self
.snapshot
.entry_for_path(work_dir_path.clone())
@@ -3134,7 +3241,10 @@ impl BackgroundScannerState {
};
log::trace!("constructed libgit2 repo in {:?}", t0.elapsed());
- let work_directory = RepositoryWorkDirectory(work_dir_path.clone());
+ let work_directory = WorkDirectory {
+ path: work_dir_path.clone(),
+ location_in_repo,
+ };
if let Some(git_hosting_provider_registry) = self.git_hosting_provider_registry.clone() {
git_hosting_providers::register_additional_providers(
@@ -3143,25 +3253,30 @@ impl BackgroundScannerState {
);
}
- self.snapshot.repository_entries.insert(
- work_directory.clone(),
+ self.snapshot.repositories.insert_or_replace(
RepositoryEntry {
- work_directory: work_dir_id.into(),
+ work_directory_id: work_dir_id,
+ work_directory: work_directory.clone(),
branch: repository.branch_name().map(Into::into),
- location_in_repo,
- },
- );
- self.snapshot.git_repositories.insert(
- work_dir_id,
- LocalRepositoryEntry {
- git_dir_scan_id: 0,
- repo_ptr: repository.clone(),
- dot_git_dir_abs_path: actual_dot_git_dir_abs_path,
- dot_git_worktree_abs_path,
+ statuses_by_path: Default::default(),
},
+ &(),
);
- Some((work_directory, repository))
+ let local_repository = LocalRepositoryEntry {
+ work_directory: work_directory.clone(),
+ git_dir_scan_id: 0,
+ status_scan_id: 0,
+ repo_ptr: repository.clone(),
+ dot_git_dir_abs_path: actual_dot_git_dir_abs_path,
+ dot_git_worktree_abs_path,
+ };
+
+ self.snapshot
+ .git_repositories
+ .insert(work_dir_id, local_repository.clone());
+
+ Some(local_repository)
}
}
@@ -3466,7 +3581,7 @@ pub struct Entry {
/// directory is expanded. External entries are treated like gitignored
/// entries in that they are not included in searches.
pub is_external: bool,
- pub git_status: Option<GitFileStatus>,
+
/// Whether this entry is considered to be a `.env` file.
pub is_private: bool,
/// The entry's size on disk, in bytes.
@@ -3499,6 +3614,7 @@ pub enum PathChange {
Loaded,
}
+#[derive(Debug)]
pub struct GitRepositoryChange {
/// The previous state of the repository, if it already existed.
pub old_repository: Option<RepositoryEntry>,
@@ -3507,63 +3623,282 @@ pub struct GitRepositoryChange {
pub type UpdatedEntriesSet = Arc<[(Arc<Path>, ProjectEntryId, PathChange)]>;
pub type UpdatedGitRepositoriesSet = Arc<[(Arc<Path>, GitRepositoryChange)]>;
-impl Entry {
- fn new(
- path: Arc<Path>,
- metadata: &fs::Metadata,
- next_entry_id: &AtomicUsize,
- root_char_bag: CharBag,
- canonical_path: Option<Box<Path>>,
- ) -> Self {
- let char_bag = char_bag_for_path(root_char_bag, &path);
- Self {
- id: ProjectEntryId::new(next_entry_id),
- kind: if metadata.is_dir {
- EntryKind::PendingDir
- } else {
- EntryKind::File
- },
- path,
- inode: metadata.inode,
- mtime: Some(metadata.mtime),
- size: metadata.len,
- canonical_path,
- is_ignored: false,
- is_always_included: false,
- is_external: false,
- is_private: false,
- git_status: None,
- char_bag,
- is_fifo: metadata.is_fifo,
+#[derive(Clone, Debug, PartialEq, Eq)]
+pub struct StatusEntry {
+ pub repo_path: RepoPath,
+ pub status: GitFileStatus,
+}
+
+impl StatusEntry {
+ fn to_proto(&self) -> proto::StatusEntry {
+ proto::StatusEntry {
+ repo_path: self.repo_path.to_proto(),
+ status: git_status_to_proto(self.status),
}
}
+}
- pub fn is_created(&self) -> bool {
- self.mtime.is_some()
+impl TryFrom<proto::StatusEntry> for StatusEntry {
+ type Error = anyhow::Error;
+ fn try_from(value: proto::StatusEntry) -> Result<Self, Self::Error> {
+ Ok(Self {
+ repo_path: RepoPath(Path::new(&value.repo_path).into()),
+ status: git_status_from_proto(Some(value.status))
+ .ok_or_else(|| anyhow!("Unable to parse status value {}", value.status))?,
+ })
}
+}
- pub fn is_dir(&self) -> bool {
- self.kind.is_dir()
- }
+#[derive(Clone, Debug)]
+struct PathProgress<'a> {
+ max_path: &'a Path,
+}
- pub fn is_file(&self) -> bool {
- self.kind.is_file()
+#[derive(Clone, Debug)]
+pub struct PathSummary<S> {
+ max_path: Arc<Path>,
+ item_summary: S,
+}
+
+impl<S: Summary> Summary for PathSummary<S> {
+ type Context = S::Context;
+
+ fn zero(cx: &Self::Context) -> Self {
+ Self {
+ max_path: Path::new("").into(),
+ item_summary: S::zero(cx),
+ }
}
- pub fn git_status(&self) -> Option<GitFileStatus> {
- self.git_status
+ fn add_summary(&mut self, rhs: &Self, cx: &Self::Context) {
+ self.max_path = rhs.max_path.clone();
+ self.item_summary.add_summary(&rhs.item_summary, cx);
}
}
-impl EntryKind {
- pub fn is_dir(&self) -> bool {
- matches!(
- self,
- EntryKind::Dir | EntryKind::PendingDir | EntryKind::UnloadedDir
- )
+impl<'a, S: Summary> sum_tree::Dimension<'a, PathSummary<S>> for PathProgress<'a> {
+ fn zero(_: &<PathSummary<S> as Summary>::Context) -> Self {
+ Self {
+ max_path: Path::new(""),
+ }
}
- pub fn is_unloaded(&self) -> bool {
+ fn add_summary(
+ &mut self,
+ summary: &'a PathSummary<S>,
+ _: &<PathSummary<S> as Summary>::Context,
+ ) {
+ self.max_path = summary.max_path.as_ref()
+ }
+}
+
+impl sum_tree::Item for RepositoryEntry {
+ type Summary = PathSummary<Unit>;
+
+ fn summary(&self, _: &<Self::Summary as Summary>::Context) -> Self::Summary {
+ PathSummary {
+ max_path: self.work_directory.path.clone(),
+ item_summary: Unit,
+ }
+ }
+}
+
+impl sum_tree::KeyedItem for RepositoryEntry {
+ type Key = PathKey;
+
+ fn key(&self) -> Self::Key {
+ PathKey(self.work_directory.path.clone())
+ }
+}
+
+impl sum_tree::Summary for GitStatuses {
+ type Context = ();
+
+ fn zero(_: &Self::Context) -> Self {
+ Default::default()
+ }
+
+ fn add_summary(&mut self, rhs: &Self, _: &Self::Context) {
+ *self += *rhs;
+ }
+}
+
+impl sum_tree::Item for StatusEntry {
+ type Summary = PathSummary<GitStatuses>;
+
+ fn summary(&self, _: &<Self::Summary as Summary>::Context) -> Self::Summary {
+ PathSummary {
+ max_path: self.repo_path.0.clone(),
+ item_summary: match self.status {
+ GitFileStatus::Added => GitStatuses {
+ added: 1,
+ ..Default::default()
+ },
+ GitFileStatus::Modified => GitStatuses {
+ modified: 1,
+ ..Default::default()
+ },
+ GitFileStatus::Conflict => GitStatuses {
+ conflict: 1,
+ ..Default::default()
+ },
+ GitFileStatus::Deleted => Default::default(),
+ GitFileStatus::Untracked => GitStatuses {
+ untracked: 1,
+ ..Default::default()
+ },
+ },
+ }
+ }
+}
+
+impl sum_tree::KeyedItem for StatusEntry {
+ type Key = PathKey;
+
+ fn key(&self) -> Self::Key {
+ PathKey(self.repo_path.0.clone())
+ }
+}
+
+#[derive(Clone, Debug, Default, Copy, PartialEq, Eq)]
+pub struct GitStatuses {
+ added: usize,
+ modified: usize,
+ conflict: usize,
+ untracked: usize,
+}
+
+impl GitStatuses {
+ pub fn to_status(&self) -> Option<GitFileStatus> {
+ if self.conflict > 0 {
+ Some(GitFileStatus::Conflict)
+ } else if self.modified > 0 {
+ Some(GitFileStatus::Modified)
+ } else if self.added > 0 || self.untracked > 0 {
+ Some(GitFileStatus::Added)
+ } else {
+ None
+ }
+ }
+}
+
+impl std::ops::Add<Self> for GitStatuses {
+ type Output = Self;
+
+ fn add(self, rhs: Self) -> Self {
+ GitStatuses {
+ added: self.added + rhs.added,
+ modified: self.modified + rhs.modified,
+ conflict: self.conflict + rhs.conflict,
+ untracked: self.untracked + rhs.untracked,
+ }
+ }
+}
+
+impl std::ops::AddAssign for GitStatuses {
+ fn add_assign(&mut self, rhs: Self) {
+ self.added += rhs.added;
+ self.modified += rhs.modified;
+ self.conflict += rhs.conflict;
+ self.untracked += rhs.untracked;
+ }
+}
+
+impl std::ops::Sub for GitStatuses {
+ type Output = GitStatuses;
+
+ fn sub(self, rhs: Self) -> Self::Output {
+ GitStatuses {
+ added: self.added - rhs.added,
+ modified: self.modified - rhs.modified,
+ conflict: self.conflict - rhs.conflict,
+ untracked: self.untracked - rhs.untracked,
+ }
+ }
+}
+
+impl<'a> sum_tree::Dimension<'a, PathSummary<GitStatuses>> for GitStatuses {
+ fn zero(_cx: &()) -> Self {
+ Default::default()
+ }
+
+ fn add_summary(&mut self, summary: &'a PathSummary<GitStatuses>, _: &()) {
+ *self += summary.item_summary
+ }
+}
+
+impl<'a, S: Summary> sum_tree::Dimension<'a, PathSummary<S>> for PathKey {
+ fn zero(_: &S::Context) -> Self {
+ Default::default()
+ }
+
+ fn add_summary(&mut self, summary: &'a PathSummary<S>, _: &S::Context) {
+ self.0 = summary.max_path.clone();
+ }
+}
+
+impl<'a, S: Summary> sum_tree::Dimension<'a, PathSummary<S>> for TraversalProgress<'a> {
+ fn zero(_cx: &S::Context) -> Self {
+ Default::default()
+ }
+
+ fn add_summary(&mut self, summary: &'a PathSummary<S>, _: &S::Context) {
+ self.max_path = summary.max_path.as_ref();
+ }
+}
+
+impl Entry {
+ fn new(
+ path: Arc<Path>,
+ metadata: &fs::Metadata,
+ next_entry_id: &AtomicUsize,
+ root_char_bag: CharBag,
+ canonical_path: Option<Box<Path>>,
+ ) -> Self {
+ let char_bag = char_bag_for_path(root_char_bag, &path);
+ Self {
+ id: ProjectEntryId::new(next_entry_id),
+ kind: if metadata.is_dir {
+ EntryKind::PendingDir
+ } else {
+ EntryKind::File
+ },
+ path,
+ inode: metadata.inode,
+ mtime: Some(metadata.mtime),
+ size: metadata.len,
+ canonical_path,
+ is_ignored: false,
+ is_always_included: false,
+ is_external: false,
+ is_private: false,
+ char_bag,
+ is_fifo: metadata.is_fifo,
+ }
+ }
+
+ pub fn is_created(&self) -> bool {
+ self.mtime.is_some()
+ }
+
+ pub fn is_dir(&self) -> bool {
+ self.kind.is_dir()
+ }
+
+ pub fn is_file(&self) -> bool {
+ self.kind.is_file()
+ }
+}
+
+impl EntryKind {
+ pub fn is_dir(&self) -> bool {
+ matches!(
+ self,
+ EntryKind::Dir | EntryKind::PendingDir | EntryKind::UnloadedDir
+ )
+ }
+
+ pub fn is_unloaded(&self) -> bool {
matches!(self, EntryKind::UnloadedDir)
}
@@ -1497,7 +1497,8 @@ async fn test_bump_mtime_of_git_repo_workdir(cx: &mut TestAppContext) {
cx.executor().run_until_parked();
let snapshot = tree.read_with(cx, |tree, _| tree.snapshot());
- check_propagated_statuses(
+
+ check_git_statuses(
&snapshot,
&[
(Path::new(""), Some(GitFileStatus::Modified)),
@@ -2178,15 +2179,15 @@ async fn test_rename_work_directory(cx: &mut TestAppContext) {
cx.read(|cx| {
let tree = tree.read(cx);
- let (work_dir, _) = tree.repositories().next().unwrap();
- assert_eq!(work_dir.as_ref(), Path::new("projects/project1"));
+ let repo = tree.repositories().next().unwrap();
+ assert_eq!(repo.path.as_ref(), Path::new("projects/project1"));
assert_eq!(
tree.status_for_file(Path::new("projects/project1/a")),
Some(GitFileStatus::Modified)
);
assert_eq!(
tree.status_for_file(Path::new("projects/project1/b")),
- Some(GitFileStatus::Added)
+ Some(GitFileStatus::Untracked)
);
});
@@ -2199,15 +2200,15 @@ async fn test_rename_work_directory(cx: &mut TestAppContext) {
cx.read(|cx| {
let tree = tree.read(cx);
- let (work_dir, _) = tree.repositories().next().unwrap();
- assert_eq!(work_dir.as_ref(), Path::new("projects/project2"));
+ let repo = tree.repositories().next().unwrap();
+ assert_eq!(repo.path.as_ref(), Path::new("projects/project2"));
assert_eq!(
tree.status_for_file(Path::new("projects/project2/a")),
Some(GitFileStatus::Modified)
);
assert_eq!(
tree.status_for_file(Path::new("projects/project2/b")),
- Some(GitFileStatus::Added)
+ Some(GitFileStatus::Untracked)
);
});
}
@@ -2253,23 +2254,13 @@ async fn test_git_repository_for_path(cx: &mut TestAppContext) {
assert!(tree.repository_for_path("c.txt".as_ref()).is_none());
- let entry = tree.repository_for_path("dir1/src/b.txt".as_ref()).unwrap();
- assert_eq!(
- entry
- .work_directory(tree)
- .map(|directory| directory.as_ref().to_owned()),
- Some(Path::new("dir1").to_owned())
- );
+ let repo = tree.repository_for_path("dir1/src/b.txt".as_ref()).unwrap();
+ assert_eq!(repo.path.as_ref(), Path::new("dir1"));
- let entry = tree
+ let repo = tree
.repository_for_path("dir1/deps/dep1/src/a.txt".as_ref())
.unwrap();
- assert_eq!(
- entry
- .work_directory(tree)
- .map(|directory| directory.as_ref().to_owned()),
- Some(Path::new("dir1/deps/dep1").to_owned())
- );
+ assert_eq!(repo.path.as_ref(), Path::new("dir1/deps/dep1"));
let entries = tree.files(false, 0);
@@ -2278,10 +2269,7 @@ async fn test_git_repository_for_path(cx: &mut TestAppContext) {
.map(|(entry, repo)| {
(
entry.path.as_ref(),
- repo.and_then(|repo| {
- repo.work_directory(tree)
- .map(|work_directory| work_directory.0.to_path_buf())
- }),
+ repo.map(|repo| repo.path.to_path_buf()),
)
})
.collect::<Vec<_>>();
@@ -2334,7 +2322,7 @@ async fn test_git_repository_for_path(cx: &mut TestAppContext) {
}
#[gpui::test]
-async fn test_git_status(cx: &mut TestAppContext) {
+async fn test_file_status(cx: &mut TestAppContext) {
init_test(cx);
cx.executor().allow_parking();
const IGNORE_RULE: &str = "**/target";
@@ -2393,17 +2381,17 @@ async fn test_git_status(cx: &mut TestAppContext) {
tree.read_with(cx, |tree, _cx| {
let snapshot = tree.snapshot();
assert_eq!(snapshot.repositories().count(), 1);
- let (dir, repo_entry) = snapshot.repositories().next().unwrap();
- assert_eq!(dir.as_ref(), Path::new("project"));
+ let repo_entry = snapshot.repositories().next().unwrap();
+ assert_eq!(repo_entry.path.as_ref(), Path::new("project"));
assert!(repo_entry.location_in_repo.is_none());
assert_eq!(
snapshot.status_for_file(project_path.join(B_TXT)),
- Some(GitFileStatus::Added)
+ Some(GitFileStatus::Untracked)
);
assert_eq!(
snapshot.status_for_file(project_path.join(F_TXT)),
- Some(GitFileStatus::Added)
+ Some(GitFileStatus::Untracked)
);
});
@@ -2433,7 +2421,7 @@ async fn test_git_status(cx: &mut TestAppContext) {
let snapshot = tree.snapshot();
assert_eq!(
snapshot.status_for_file(project_path.join(F_TXT)),
- Some(GitFileStatus::Added)
+ Some(GitFileStatus::Untracked)
);
assert_eq!(snapshot.status_for_file(project_path.join(B_TXT)), None);
assert_eq!(snapshot.status_for_file(project_path.join(A_TXT)), None);
@@ -2455,7 +2443,7 @@ async fn test_git_status(cx: &mut TestAppContext) {
assert_eq!(snapshot.status_for_file(project_path.join(A_TXT)), None);
assert_eq!(
snapshot.status_for_file(project_path.join(B_TXT)),
- Some(GitFileStatus::Added)
+ Some(GitFileStatus::Untracked)
);
assert_eq!(
snapshot.status_for_file(project_path.join(E_TXT)),
@@ -2494,7 +2482,7 @@ async fn test_git_status(cx: &mut TestAppContext) {
let snapshot = tree.snapshot();
assert_eq!(
snapshot.status_for_file(project_path.join(renamed_dir_name).join(RENAMED_FILE)),
- Some(GitFileStatus::Added)
+ Some(GitFileStatus::Untracked)
);
});
@@ -2518,8 +2506,122 @@ async fn test_git_status(cx: &mut TestAppContext) {
.join(Path::new(renamed_dir_name))
.join(RENAMED_FILE)
),
- Some(GitFileStatus::Added)
+ Some(GitFileStatus::Untracked)
+ );
+ });
+}
+
+#[gpui::test]
+async fn test_git_repository_status(cx: &mut TestAppContext) {
+ init_test(cx);
+ cx.executor().allow_parking();
+
+ let root = temp_tree(json!({
+ "project": {
+ "a.txt": "a", // Modified
+ "b.txt": "bb", // Added
+ "c.txt": "ccc", // Unchanged
+ "d.txt": "dddd", // Deleted
+ },
+
+ }));
+
+ // Set up git repository before creating the worktree.
+ let work_dir = root.path().join("project");
+ let repo = git_init(work_dir.as_path());
+ git_add("a.txt", &repo);
+ git_add("c.txt", &repo);
+ git_add("d.txt", &repo);
+ git_commit("Initial commit", &repo);
+ std::fs::remove_file(work_dir.join("d.txt")).unwrap();
+ std::fs::write(work_dir.join("a.txt"), "aa").unwrap();
+
+ let tree = Worktree::local(
+ root.path(),
+ true,
+ Arc::new(RealFs::default()),
+ Default::default(),
+ &mut cx.to_async(),
+ )
+ .await
+ .unwrap();
+
+ tree.flush_fs_events(cx).await;
+ cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
+ .await;
+ cx.executor().run_until_parked();
+
+ // Check that the right git state is observed on startup
+ tree.read_with(cx, |tree, _cx| {
+ let snapshot = tree.snapshot();
+ let repo = snapshot.repositories().next().unwrap();
+ let entries = repo.status().collect::<Vec<_>>();
+
+ assert_eq!(entries.len(), 3);
+ assert_eq!(entries[0].repo_path.as_ref(), Path::new("a.txt"));
+ assert_eq!(entries[0].status, GitFileStatus::Modified);
+ assert_eq!(entries[1].repo_path.as_ref(), Path::new("b.txt"));
+ assert_eq!(entries[1].status, GitFileStatus::Untracked);
+ assert_eq!(entries[2].repo_path.as_ref(), Path::new("d.txt"));
+ assert_eq!(entries[2].status, GitFileStatus::Deleted);
+ });
+
+ std::fs::write(work_dir.join("c.txt"), "some changes").unwrap();
+ eprintln!("File c.txt has been modified");
+
+ tree.flush_fs_events(cx).await;
+ cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
+ .await;
+ cx.executor().run_until_parked();
+
+ tree.read_with(cx, |tree, _cx| {
+ let snapshot = tree.snapshot();
+ let repository = snapshot.repositories().next().unwrap();
+ let entries = repository.status().collect::<Vec<_>>();
+
+ std::assert_eq!(entries.len(), 4, "entries: {entries:?}");
+ assert_eq!(entries[0].repo_path.as_ref(), Path::new("a.txt"));
+ assert_eq!(entries[0].status, GitFileStatus::Modified);
+ assert_eq!(entries[1].repo_path.as_ref(), Path::new("b.txt"));
+ assert_eq!(entries[1].status, GitFileStatus::Untracked);
+ // Status updated
+ assert_eq!(entries[2].repo_path.as_ref(), Path::new("c.txt"));
+ assert_eq!(entries[2].status, GitFileStatus::Modified);
+ assert_eq!(entries[3].repo_path.as_ref(), Path::new("d.txt"));
+ assert_eq!(entries[3].status, GitFileStatus::Deleted);
+ });
+
+ git_add("a.txt", &repo);
+ git_add("c.txt", &repo);
+ git_remove_index(Path::new("d.txt"), &repo);
+ git_commit("Another commit", &repo);
+ tree.flush_fs_events(cx).await;
+ cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
+ .await;
+ cx.executor().run_until_parked();
+
+ std::fs::remove_file(work_dir.join("a.txt")).unwrap();
+ std::fs::remove_file(work_dir.join("b.txt")).unwrap();
+ tree.flush_fs_events(cx).await;
+ cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
+ .await;
+ cx.executor().run_until_parked();
+
+ tree.read_with(cx, |tree, _cx| {
+ let snapshot = tree.snapshot();
+ let repo = snapshot.repositories().next().unwrap();
+ let entries = repo.status().collect::<Vec<_>>();
+
+ // Deleting an untracked entry, b.txt, should leave no status
+ // a.txt was tracked, and so should have a status
+ assert_eq!(
+ entries.len(),
+ 1,
+ "Entries length was incorrect\n{:#?}",
+ &entries
);
+ assert_eq!(entries[0].repo_path.as_ref(), Path::new("a.txt"));
+ assert_eq!(entries[0].status, GitFileStatus::Deleted);
});
}
@@ -2575,22 +2677,22 @@ async fn test_repository_subfolder_git_status(cx: &mut TestAppContext) {
tree.read_with(cx, |tree, _cx| {
let snapshot = tree.snapshot();
assert_eq!(snapshot.repositories().count(), 1);
- let (dir, repo_entry) = snapshot.repositories().next().unwrap();
+ let repo = snapshot.repositories().next().unwrap();
// Path is blank because the working directory of
// the git repository is located at the root of the project
- assert_eq!(dir.as_ref(), Path::new(""));
+ assert_eq!(repo.path.as_ref(), Path::new(""));
// This is the missing path between the root of the project (sub-folder-2) and its
// location relative to the root of the repository.
assert_eq!(
- repo_entry.location_in_repo,
+ repo.location_in_repo,
Some(Arc::from(Path::new("sub-folder-1/sub-folder-2")))
);
assert_eq!(snapshot.status_for_file("c.txt"), None);
assert_eq!(
snapshot.status_for_file("d/e.txt"),
- Some(GitFileStatus::Added)
+ Some(GitFileStatus::Untracked)
);
});
@@ -2612,6 +2714,93 @@ async fn test_repository_subfolder_git_status(cx: &mut TestAppContext) {
});
}
+#[gpui::test]
+async fn test_traverse_with_git_status(cx: &mut TestAppContext) {
+ init_test(cx);
+ let fs = FakeFs::new(cx.background_executor.clone());
+ fs.insert_tree(
+ "/root",
+ json!({
+ "x": {
+ ".git": {},
+ "x1.txt": "foo",
+ "x2.txt": "bar",
+ "y": {
+ ".git": {},
+ "y1.txt": "baz",
+ "y2.txt": "qux"
+ },
+ "z.txt": "sneaky..."
+ },
+ "z": {
+ ".git": {},
+ "z1.txt": "quux",
+ "z2.txt": "quuux"
+ }
+ }),
+ )
+ .await;
+
+ fs.set_status_for_repo_via_git_operation(
+ Path::new("/root/x/.git"),
+ &[
+ (Path::new("x2.txt"), GitFileStatus::Modified),
+ (Path::new("z.txt"), GitFileStatus::Added),
+ ],
+ );
+ fs.set_status_for_repo_via_git_operation(
+ Path::new("/root/x/y/.git"),
+ &[(Path::new("y1.txt"), GitFileStatus::Conflict)],
+ );
+ fs.set_status_for_repo_via_git_operation(
+ Path::new("/root/z/.git"),
+ &[(Path::new("z2.txt"), GitFileStatus::Added)],
+ );
+
+ let tree = Worktree::local(
+ Path::new("/root"),
+ true,
+ fs.clone(),
+ Default::default(),
+ &mut cx.to_async(),
+ )
+ .await
+ .unwrap();
+
+ tree.flush_fs_events(cx).await;
+ cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
+ .await;
+ cx.executor().run_until_parked();
+
+ let snapshot = tree.read_with(cx, |tree, _| tree.snapshot());
+
+ let mut traversal = snapshot
+ .traverse_from_path(true, false, true, Path::new("x"))
+ .with_git_statuses();
+
+ let entry = traversal.next().unwrap();
+ assert_eq!(entry.path.as_ref(), Path::new("x/x1.txt"));
+ assert_eq!(entry.git_status, None);
+ let entry = traversal.next().unwrap();
+ assert_eq!(entry.path.as_ref(), Path::new("x/x2.txt"));
+ assert_eq!(entry.git_status, Some(GitFileStatus::Modified));
+ let entry = traversal.next().unwrap();
+ assert_eq!(entry.path.as_ref(), Path::new("x/y/y1.txt"));
+ assert_eq!(entry.git_status, Some(GitFileStatus::Conflict));
+ let entry = traversal.next().unwrap();
+ assert_eq!(entry.path.as_ref(), Path::new("x/y/y2.txt"));
+ assert_eq!(entry.git_status, None);
+ let entry = traversal.next().unwrap();
+ assert_eq!(entry.path.as_ref(), Path::new("x/z.txt"));
+ assert_eq!(entry.git_status, Some(GitFileStatus::Added));
+ let entry = traversal.next().unwrap();
+ assert_eq!(entry.path.as_ref(), Path::new("z/z1.txt"));
+ assert_eq!(entry.git_status, None);
+ let entry = traversal.next().unwrap();
+ assert_eq!(entry.path.as_ref(), Path::new("z/z2.txt"));
+ assert_eq!(entry.git_status, Some(GitFileStatus::Added));
+}
+
#[gpui::test]
async fn test_propagate_git_statuses(cx: &mut TestAppContext) {
init_test(cx);
@@ -2638,7 +2827,6 @@ async fn test_propagate_git_statuses(cx: &mut TestAppContext) {
"h1.txt": "",
"h2.txt": ""
},
-
}),
)
.await;
@@ -2668,7 +2856,16 @@ async fn test_propagate_git_statuses(cx: &mut TestAppContext) {
cx.executor().run_until_parked();
let snapshot = tree.read_with(cx, |tree, _| tree.snapshot());
- check_propagated_statuses(
+ check_git_statuses(
+ &snapshot,
+ &[
+ (Path::new(""), Some(GitFileStatus::Conflict)),
+ (Path::new("g"), Some(GitFileStatus::Conflict)),
+ (Path::new("g/h2.txt"), Some(GitFileStatus::Conflict)),
+ ],
+ );
+
+ check_git_statuses(
&snapshot,
&[
(Path::new(""), Some(GitFileStatus::Conflict)),
@@ -2685,7 +2882,7 @@ async fn test_propagate_git_statuses(cx: &mut TestAppContext) {
],
);
- check_propagated_statuses(
+ check_git_statuses(
&snapshot,
&[
(Path::new("a/b"), Some(GitFileStatus::Added)),
@@ -2700,7 +2897,7 @@ async fn test_propagate_git_statuses(cx: &mut TestAppContext) {
],
);
- check_propagated_statuses(
+ check_git_statuses(
&snapshot,
&[
(Path::new("a/b/c1.txt"), Some(GitFileStatus::Added)),
@@ -2712,6 +2909,246 @@ async fn test_propagate_git_statuses(cx: &mut TestAppContext) {
);
}
+#[gpui::test]
+async fn test_propagate_statuses_for_repos_under_project(cx: &mut TestAppContext) {
+ init_test(cx);
+ let fs = FakeFs::new(cx.background_executor.clone());
+ fs.insert_tree(
+ "/root",
+ json!({
+ "x": {
+ ".git": {},
+ "x1.txt": "foo",
+ "x2.txt": "bar"
+ },
+ "y": {
+ ".git": {},
+ "y1.txt": "baz",
+ "y2.txt": "qux"
+ },
+ "z": {
+ ".git": {},
+ "z1.txt": "quux",
+ "z2.txt": "quuux"
+ }
+ }),
+ )
+ .await;
+
+ fs.set_status_for_repo_via_git_operation(
+ Path::new("/root/x/.git"),
+ &[(Path::new("x1.txt"), GitFileStatus::Added)],
+ );
+ fs.set_status_for_repo_via_git_operation(
+ Path::new("/root/y/.git"),
+ &[
+ (Path::new("y1.txt"), GitFileStatus::Conflict),
+ (Path::new("y2.txt"), GitFileStatus::Modified),
+ ],
+ );
+ fs.set_status_for_repo_via_git_operation(
+ Path::new("/root/z/.git"),
+ &[(Path::new("z2.txt"), GitFileStatus::Modified)],
+ );
+
+ let tree = Worktree::local(
+ Path::new("/root"),
+ true,
+ fs.clone(),
+ Default::default(),
+ &mut cx.to_async(),
+ )
+ .await
+ .unwrap();
+
+ tree.flush_fs_events(cx).await;
+ cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
+ .await;
+ cx.executor().run_until_parked();
+
+ let snapshot = tree.read_with(cx, |tree, _| tree.snapshot());
+
+ check_git_statuses(
+ &snapshot,
+ &[
+ (Path::new("x"), Some(GitFileStatus::Added)),
+ (Path::new("x/x1.txt"), Some(GitFileStatus::Added)),
+ ],
+ );
+
+ check_git_statuses(
+ &snapshot,
+ &[
+ (Path::new("y"), Some(GitFileStatus::Conflict)),
+ (Path::new("y/y1.txt"), Some(GitFileStatus::Conflict)),
+ (Path::new("y/y2.txt"), Some(GitFileStatus::Modified)),
+ ],
+ );
+
+ check_git_statuses(
+ &snapshot,
+ &[
+ (Path::new("z"), Some(GitFileStatus::Modified)),
+ (Path::new("z/z2.txt"), Some(GitFileStatus::Modified)),
+ ],
+ );
+
+ check_git_statuses(
+ &snapshot,
+ &[
+ (Path::new("x"), Some(GitFileStatus::Added)),
+ (Path::new("x/x1.txt"), Some(GitFileStatus::Added)),
+ ],
+ );
+
+ check_git_statuses(
+ &snapshot,
+ &[
+ (Path::new("x"), Some(GitFileStatus::Added)),
+ (Path::new("x/x1.txt"), Some(GitFileStatus::Added)),
+ (Path::new("x/x2.txt"), None),
+ (Path::new("y"), Some(GitFileStatus::Conflict)),
+ (Path::new("y/y1.txt"), Some(GitFileStatus::Conflict)),
+ (Path::new("y/y2.txt"), Some(GitFileStatus::Modified)),
+ (Path::new("z"), Some(GitFileStatus::Modified)),
+ (Path::new("z/z1.txt"), None),
+ (Path::new("z/z2.txt"), Some(GitFileStatus::Modified)),
+ ],
+ );
+}
+
+#[gpui::test]
+async fn test_propagate_statuses_for_nested_repos(cx: &mut TestAppContext) {
+ init_test(cx);
+ let fs = FakeFs::new(cx.background_executor.clone());
+ fs.insert_tree(
+ "/root",
+ json!({
+ "x": {
+ ".git": {},
+ "x1.txt": "foo",
+ "x2.txt": "bar",
+ "y": {
+ ".git": {},
+ "y1.txt": "baz",
+ "y2.txt": "qux"
+ },
+ "z.txt": "sneaky..."
+ },
+ "z": {
+ ".git": {},
+ "z1.txt": "quux",
+ "z2.txt": "quuux"
+ }
+ }),
+ )
+ .await;
+
+ fs.set_status_for_repo_via_git_operation(
+ Path::new("/root/x/.git"),
+ &[
+ (Path::new("x2.txt"), GitFileStatus::Modified),
+ (Path::new("z.txt"), GitFileStatus::Added),
+ ],
+ );
+ fs.set_status_for_repo_via_git_operation(
+ Path::new("/root/x/y/.git"),
+ &[(Path::new("y1.txt"), GitFileStatus::Conflict)],
+ );
+
+ fs.set_status_for_repo_via_git_operation(
+ Path::new("/root/z/.git"),
+ &[(Path::new("z2.txt"), GitFileStatus::Added)],
+ );
+
+ let tree = Worktree::local(
+ Path::new("/root"),
+ true,
+ fs.clone(),
+ Default::default(),
+ &mut cx.to_async(),
+ )
+ .await
+ .unwrap();
+
+ tree.flush_fs_events(cx).await;
+ cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
+ .await;
+ cx.executor().run_until_parked();
+
+ let snapshot = tree.read_with(cx, |tree, _| tree.snapshot());
+
+ // Sanity check the propagation for x/y and z
+ check_git_statuses(
+ &snapshot,
+ &[
+ (Path::new("x/y"), Some(GitFileStatus::Conflict)), // the y git repository has conflict file in it, and so should have a conflict status
+ (Path::new("x/y/y1.txt"), Some(GitFileStatus::Conflict)),
+ (Path::new("x/y/y2.txt"), None),
+ ],
+ );
+ check_git_statuses(
+ &snapshot,
+ &[
+ (Path::new("z"), Some(GitFileStatus::Added)),
+ (Path::new("z/z1.txt"), None),
+ (Path::new("z/z2.txt"), Some(GitFileStatus::Added)),
+ ],
+ );
+
+ // Test one of the fundamental cases of propagation blocking, the transition from one git repository to another
+ check_git_statuses(
+ &snapshot,
+ &[
+ (Path::new("x"), Some(GitFileStatus::Modified)),
+ (Path::new("x/y"), Some(GitFileStatus::Conflict)),
+ (Path::new("x/y/y1.txt"), Some(GitFileStatus::Conflict)),
+ ],
+ );
+
+ // Sanity check everything around it
+ check_git_statuses(
+ &snapshot,
+ &[
+ (Path::new("x"), Some(GitFileStatus::Modified)),
+ (Path::new("x/x1.txt"), None),
+ (Path::new("x/x2.txt"), Some(GitFileStatus::Modified)),
+ (Path::new("x/y"), Some(GitFileStatus::Conflict)),
+ (Path::new("x/y/y1.txt"), Some(GitFileStatus::Conflict)),
+ (Path::new("x/y/y2.txt"), None),
+ (Path::new("x/z.txt"), Some(GitFileStatus::Added)),
+ ],
+ );
+
+ // Test the other fundamental case, transitioning from git repository to non-git repository
+ check_git_statuses(
+ &snapshot,
+ &[
+ (Path::new(""), None),
+ (Path::new("x"), Some(GitFileStatus::Modified)),
+ (Path::new("x/x1.txt"), None),
+ ],
+ );
+
+ // And all together now
+ check_git_statuses(
+ &snapshot,
+ &[
+ (Path::new(""), None),
+ (Path::new("x"), Some(GitFileStatus::Modified)),
+ (Path::new("x/x1.txt"), None),
+ (Path::new("x/x2.txt"), Some(GitFileStatus::Modified)),
+ (Path::new("x/y"), Some(GitFileStatus::Conflict)),
+ (Path::new("x/y/y1.txt"), Some(GitFileStatus::Conflict)),
+ (Path::new("x/y/y2.txt"), None),
+ (Path::new("x/z.txt"), Some(GitFileStatus::Added)),
+ (Path::new("z"), Some(GitFileStatus::Added)),
+ (Path::new("z/z1.txt"), None),
+ (Path::new("z/z2.txt"), Some(GitFileStatus::Added)),
+ ],
+ );
+}
+
#[gpui::test]
async fn test_private_single_file_worktree(cx: &mut TestAppContext) {
init_test(cx);
@@ -2736,22 +3173,20 @@ async fn test_private_single_file_worktree(cx: &mut TestAppContext) {
}
#[track_caller]
-fn check_propagated_statuses(
- snapshot: &Snapshot,
- expected_statuses: &[(&Path, Option<GitFileStatus>)],
-) {
- let mut entries = expected_statuses
+fn check_git_statuses(snapshot: &Snapshot, expected_statuses: &[(&Path, Option<GitFileStatus>)]) {
+ let mut traversal = snapshot
+ .traverse_from_path(true, true, false, "".as_ref())
+ .with_git_statuses();
+ let found_statuses = expected_statuses
.iter()
- .map(|(path, _)| snapshot.entry_for_path(path).unwrap().clone())
+ .map(|&(path, _)| {
+ let git_entry = traversal
+ .find(|git_entry| &*git_entry.path == path)
+ .expect("Traversal has no entry for {path:?}");
+ (path, git_entry.git_status)
+ })
.collect::<Vec<_>>();
- snapshot.propagate_git_statuses(&mut entries);
- assert_eq!(
- entries
- .iter()
- .map(|e| (e.path.as_ref(), e.git_status))
- .collect::<Vec<_>>(),
- expected_statuses
- );
+ assert_eq!(found_statuses, expected_statuses);
}
#[track_caller]
@@ -2763,14 +3198,14 @@ fn git_init(path: &Path) -> git2::Repository {
fn git_add<P: AsRef<Path>>(path: P, repo: &git2::Repository) {
let path = path.as_ref();
let mut index = repo.index().expect("Failed to get index");
- index.add_path(path).expect("Failed to add a.txt");
+ index.add_path(path).expect("Failed to add file");
index.write().expect("Failed to write index");
}
#[track_caller]
fn git_remove_index(path: &Path, repo: &git2::Repository) {
let mut index = repo.index().expect("Failed to get index");
- index.remove_path(path).expect("Failed to add a.txt");
+ index.remove_path(path).expect("Failed to add file");
index.write().expect("Failed to write index");
}
@@ -2900,7 +3335,8 @@ fn assert_entry_git_state(
) {
let entry = tree.entry_for_path(path).expect("entry {path} not found");
assert_eq!(
- entry.git_status, git_status,
+ tree.status_for_file(Path::new(path)),
+ git_status,
"expected {path} to have git status: {git_status:?}"
);
assert_eq!(
@@ -70,7 +70,7 @@ use util::load_shell_from_passwd;
#[global_allocator]
static GLOBAL: mimalloc::MiMalloc = mimalloc::MiMalloc;
-fn files_not_createad_on_launch(errors: HashMap<io::ErrorKind, Vec<&Path>>) {
+fn files_not_created_on_launch(errors: HashMap<io::ErrorKind, Vec<&Path>>) {
let message = "Zed failed to launch";
let error_details = errors
.into_iter()
@@ -179,7 +179,7 @@ fn main() {
let file_errors = init_paths();
if !file_errors.is_empty() {
- files_not_createad_on_launch(file_errors);
+ files_not_created_on_launch(file_errors);
return;
}
@@ -38,6 +38,6 @@ Open the AI Assistant panel (`cmd-r` or `ctrl-r`) and enter:
This extension requires a Perplexity API key to be available via the `PERPLEXITY_API_KEY` environment variable.
-To onbtain a Perplexity.ai API token, login to your Perplexity.ai account and go [Settings->API](https://www.perplexity.ai/settings/api) and under "API Keys" click "Generate". This will require you to have [Perplexity Pro](https://www.perplexity.ai/pro) or to buy API credits. By default the extension uses `llama-3.1-sonar-small-128k-online`, currently cheapest model available which is roughly half a penny per request + a penny per 50,000 tokens. So most requests will cost less than $0.01 USD.
+To obtain a Perplexity.ai API token, login to your Perplexity.ai account and go [Settings->API](https://www.perplexity.ai/settings/api) and under "API Keys" click "Generate". This will require you to have [Perplexity Pro](https://www.perplexity.ai/pro) or to buy API credits. By default the extension uses `llama-3.1-sonar-small-128k-online`, currently cheapest model available which is roughly half a penny per request + a penny per 50,000 tokens. So most requests will cost less than $0.01 USD.
Take your API key and add it to your environment by adding `export PERPLEXITY_API_KEY="pplx-0123456789abcdef..."` to your `~/.zshrc` or `~/.bashrc`. Reload close and reopen your terminal session. Check with `env |grep PERPLEXITY_API_KEY`.
@@ -14,7 +14,7 @@ can_code_sign=false
# This must match the team in the provisioning profile.
IDENTITY="Zed Industries, Inc."
-APPLE_NOTORIZATION_TEAM="MQ55VZLNZQ"
+APPLE_NOTARIZATION_TEAM="MQ55VZLNZQ"
# Function for displaying help info
help_info() {
@@ -317,7 +317,7 @@ function sign_app_binaries() {
/usr/bin/codesign --deep --force --timestamp --options runtime --sign "$IDENTITY" "$(pwd)/${dmg_file_path}" -v
echo "Notarizing DMG with Apple"
- "${xcode_bin_dir_path}/notarytool" submit --wait --apple-id "$APPLE_NOTARIZATION_USERNAME" --password "$APPLE_NOTARIZATION_PASSWORD" --team-id "$APPLE_NOTORIZATION_TEAM" "${dmg_file_path}"
+ "${xcode_bin_dir_path}/notarytool" submit --wait --apple-id "$APPLE_NOTARIZATION_USERNAME" --password "$APPLE_NOTARIZATION_PASSWORD" --team-id "$APPLE_NOTARIZATION_TEAM" "${dmg_file_path}"
echo "Removing temporary DMG (used only for notarization)"
rm "${dmg_file_path}"
@@ -344,7 +344,7 @@ function sign_app_binaries() {
if [[ $can_code_sign = true ]]; then
echo "Notarizing DMG with Apple"
/usr/bin/codesign --deep --force --timestamp --options runtime --sign "$IDENTITY" "$(pwd)/${dmg_file_path}" -v
- "${xcode_bin_dir_path}/notarytool" submit --wait --apple-id "$APPLE_NOTARIZATION_USERNAME" --password "$APPLE_NOTARIZATION_PASSWORD" --team-id "$APPLE_NOTORIZATION_TEAM" "${dmg_file_path}"
+ "${xcode_bin_dir_path}/notarytool" submit --wait --apple-id "$APPLE_NOTARIZATION_USERNAME" --password "$APPLE_NOTARIZATION_PASSWORD" --team-id "$APPLE_NOTARIZATION_TEAM" "${dmg_file_path}"
"${xcode_bin_dir_path}/stapler" staple "${dmg_file_path}"
fi