diff --git a/assets/icons/version_control_branch_12.svg b/assets/icons/version_control_branch_12.svg
new file mode 100644
index 0000000000000000000000000000000000000000..3571874a898e6f1bc9dbfb162c81f8708610d5d9
--- /dev/null
+++ b/assets/icons/version_control_branch_12.svg
@@ -0,0 +1,3 @@
+
diff --git a/crates/collab/migrations.sqlite/20221109000000_test_schema.sql b/crates/collab/migrations.sqlite/20221109000000_test_schema.sql
index 89b924087ef987c89ec58e65f2b165a7d11b4afa..684b6bffe0b938358bbb9f1803da162c5b19cda6 100644
--- a/crates/collab/migrations.sqlite/20221109000000_test_schema.sql
+++ b/crates/collab/migrations.sqlite/20221109000000_test_schema.sql
@@ -82,6 +82,20 @@ CREATE TABLE "worktree_entries" (
CREATE INDEX "index_worktree_entries_on_project_id" ON "worktree_entries" ("project_id");
CREATE INDEX "index_worktree_entries_on_project_id_and_worktree_id" ON "worktree_entries" ("project_id", "worktree_id");
+CREATE TABLE "worktree_repositories" (
+ "project_id" INTEGER NOT NULL,
+ "worktree_id" INTEGER NOT NULL,
+ "work_directory_id" INTEGER NOT NULL,
+ "scan_id" INTEGER NOT NULL,
+ "branch" VARCHAR,
+ "is_deleted" BOOL NOT NULL,
+ PRIMARY KEY(project_id, worktree_id, work_directory_id),
+ FOREIGN KEY(project_id, worktree_id) REFERENCES worktrees (project_id, id) ON DELETE CASCADE,
+ FOREIGN KEY(project_id, worktree_id, work_directory_id) REFERENCES worktree_entries (project_id, worktree_id, id) ON DELETE CASCADE
+);
+CREATE INDEX "index_worktree_repositories_on_project_id" ON "worktree_repositories" ("project_id");
+CREATE INDEX "index_worktree_repositories_on_project_id_and_worktree_id" ON "worktree_repositories" ("project_id", "worktree_id");
+
CREATE TABLE "worktree_diagnostic_summaries" (
"project_id" INTEGER NOT NULL,
"worktree_id" INTEGER NOT NULL,
@@ -153,7 +167,7 @@ CREATE TABLE "followers" (
"follower_connection_server_id" INTEGER NOT NULL REFERENCES servers (id) ON DELETE CASCADE,
"follower_connection_id" INTEGER NOT NULL
);
-CREATE UNIQUE INDEX
+CREATE UNIQUE INDEX
"index_followers_on_project_id_and_leader_connection_server_id_and_leader_connection_id_and_follower_connection_server_id_and_follower_connection_id"
ON "followers" ("project_id", "leader_connection_server_id", "leader_connection_id", "follower_connection_server_id", "follower_connection_id");
CREATE INDEX "index_followers_on_room_id" ON "followers" ("room_id");
diff --git a/crates/collab/migrations/20230508211523_add-repository-entries.sql b/crates/collab/migrations/20230508211523_add-repository-entries.sql
new file mode 100644
index 0000000000000000000000000000000000000000..1e593479394c8434f56f3519b41ce2fa2a9fc2a3
--- /dev/null
+++ b/crates/collab/migrations/20230508211523_add-repository-entries.sql
@@ -0,0 +1,13 @@
+CREATE TABLE "worktree_repositories" (
+ "project_id" INTEGER NOT NULL,
+ "worktree_id" INT8 NOT NULL,
+ "work_directory_id" INT8 NOT NULL,
+ "scan_id" INT8 NOT NULL,
+ "branch" VARCHAR,
+ "is_deleted" BOOL NOT NULL,
+ PRIMARY KEY(project_id, worktree_id, work_directory_id),
+ FOREIGN KEY(project_id, worktree_id) REFERENCES worktrees (project_id, id) ON DELETE CASCADE,
+ FOREIGN KEY(project_id, worktree_id, work_directory_id) REFERENCES worktree_entries (project_id, worktree_id, id) ON DELETE CASCADE
+);
+CREATE INDEX "index_worktree_repositories_on_project_id" ON "worktree_repositories" ("project_id");
+CREATE INDEX "index_worktree_repositories_on_project_id_and_worktree_id" ON "worktree_repositories" ("project_id", "worktree_id");
diff --git a/crates/collab/src/db.rs b/crates/collab/src/db.rs
index f441bbfb000504d959b9e54f333b5a33cc31273d..bc5b816abf2126f0880ac2f23932b020a86a2ee8 100644
--- a/crates/collab/src/db.rs
+++ b/crates/collab/src/db.rs
@@ -14,6 +14,7 @@ mod user;
mod worktree;
mod worktree_diagnostic_summary;
mod worktree_entry;
+mod worktree_repository;
use crate::executor::Executor;
use crate::{Error, Result};
@@ -1489,6 +1490,8 @@ impl Database {
visible: db_worktree.visible,
updated_entries: Default::default(),
removed_entries: Default::default(),
+ updated_repositories: Default::default(),
+ removed_repositories: Default::default(),
diagnostic_summaries: Default::default(),
scan_id: db_worktree.scan_id as u64,
completed_scan_id: db_worktree.completed_scan_id as u64,
@@ -1498,38 +1501,75 @@ impl Database {
.worktrees
.iter()
.find(|worktree| worktree.id == db_worktree.id as u64);
- let entry_filter = if let Some(rejoined_worktree) = rejoined_worktree {
- worktree_entry::Column::ScanId.gt(rejoined_worktree.scan_id)
- } else {
- worktree_entry::Column::IsDeleted.eq(false)
- };
-
- let mut db_entries = worktree_entry::Entity::find()
- .filter(
- Condition::all()
- .add(worktree_entry::Column::WorktreeId.eq(worktree.id))
- .add(entry_filter),
- )
- .stream(&*tx)
- .await?;
- while let Some(db_entry) = db_entries.next().await {
- let db_entry = db_entry?;
- if db_entry.is_deleted {
- worktree.removed_entries.push(db_entry.id as u64);
+ // File entries
+ {
+ let entry_filter = if let Some(rejoined_worktree) = rejoined_worktree {
+ worktree_entry::Column::ScanId.gt(rejoined_worktree.scan_id)
} else {
- worktree.updated_entries.push(proto::Entry {
- id: db_entry.id as u64,
- is_dir: db_entry.is_dir,
- path: db_entry.path,
- inode: db_entry.inode as u64,
- mtime: Some(proto::Timestamp {
- seconds: db_entry.mtime_seconds as u64,
- nanos: db_entry.mtime_nanos as u32,
- }),
- is_symlink: db_entry.is_symlink,
- is_ignored: db_entry.is_ignored,
- });
+ worktree_entry::Column::IsDeleted.eq(false)
+ };
+
+ let mut db_entries = worktree_entry::Entity::find()
+ .filter(
+ Condition::all()
+ .add(worktree_entry::Column::WorktreeId.eq(worktree.id))
+ .add(entry_filter),
+ )
+ .stream(&*tx)
+ .await?;
+
+ while let Some(db_entry) = db_entries.next().await {
+ let db_entry = db_entry?;
+ if db_entry.is_deleted {
+ worktree.removed_entries.push(db_entry.id as u64);
+ } else {
+ worktree.updated_entries.push(proto::Entry {
+ id: db_entry.id as u64,
+ is_dir: db_entry.is_dir,
+ path: db_entry.path,
+ inode: db_entry.inode as u64,
+ mtime: Some(proto::Timestamp {
+ seconds: db_entry.mtime_seconds as u64,
+ nanos: db_entry.mtime_nanos as u32,
+ }),
+ is_symlink: db_entry.is_symlink,
+ is_ignored: db_entry.is_ignored,
+ });
+ }
+ }
+ }
+
+ // Repository Entries
+ {
+ let repository_entry_filter =
+ if let Some(rejoined_worktree) = rejoined_worktree {
+ worktree_repository::Column::ScanId.gt(rejoined_worktree.scan_id)
+ } else {
+ worktree_repository::Column::IsDeleted.eq(false)
+ };
+
+ let mut db_repositories = worktree_repository::Entity::find()
+ .filter(
+ Condition::all()
+ .add(worktree_repository::Column::WorktreeId.eq(worktree.id))
+ .add(repository_entry_filter),
+ )
+ .stream(&*tx)
+ .await?;
+
+ while let Some(db_repository) = db_repositories.next().await {
+ let db_repository = db_repository?;
+ if db_repository.is_deleted {
+ worktree
+ .removed_repositories
+ .push(db_repository.work_directory_id as u64);
+ } else {
+ worktree.updated_repositories.push(proto::RepositoryEntry {
+ work_directory_id: db_repository.work_directory_id as u64,
+ branch: db_repository.branch,
+ });
+ }
}
}
@@ -2330,6 +2370,53 @@ impl Database {
.await?;
}
+ if !update.updated_repositories.is_empty() {
+ worktree_repository::Entity::insert_many(update.updated_repositories.iter().map(
+ |repository| worktree_repository::ActiveModel {
+ project_id: ActiveValue::set(project_id),
+ worktree_id: ActiveValue::set(worktree_id),
+ work_directory_id: ActiveValue::set(repository.work_directory_id as i64),
+ scan_id: ActiveValue::set(update.scan_id as i64),
+ branch: ActiveValue::set(repository.branch.clone()),
+ is_deleted: ActiveValue::set(false),
+ },
+ ))
+ .on_conflict(
+ OnConflict::columns([
+ worktree_repository::Column::ProjectId,
+ worktree_repository::Column::WorktreeId,
+ worktree_repository::Column::WorkDirectoryId,
+ ])
+ .update_columns([
+ worktree_repository::Column::ScanId,
+ worktree_repository::Column::Branch,
+ ])
+ .to_owned(),
+ )
+ .exec(&*tx)
+ .await?;
+ }
+
+ if !update.removed_repositories.is_empty() {
+ worktree_repository::Entity::update_many()
+ .filter(
+ worktree_repository::Column::ProjectId
+ .eq(project_id)
+ .and(worktree_repository::Column::WorktreeId.eq(worktree_id))
+ .and(
+ worktree_repository::Column::WorkDirectoryId
+ .is_in(update.removed_repositories.iter().map(|id| *id as i64)),
+ ),
+ )
+ .set(worktree_repository::ActiveModel {
+ is_deleted: ActiveValue::Set(true),
+ scan_id: ActiveValue::Set(update.scan_id as i64),
+ ..Default::default()
+ })
+ .exec(&*tx)
+ .await?;
+ }
+
let connection_ids = self.project_guest_connection_ids(project_id, &tx).await?;
Ok(connection_ids)
})
@@ -2505,6 +2592,7 @@ impl Database {
root_name: db_worktree.root_name,
visible: db_worktree.visible,
entries: Default::default(),
+ repository_entries: Default::default(),
diagnostic_summaries: Default::default(),
scan_id: db_worktree.scan_id as u64,
completed_scan_id: db_worktree.completed_scan_id as u64,
@@ -2542,6 +2630,29 @@ impl Database {
}
}
+ // Populate repository entries.
+ {
+ let mut db_repository_entries = worktree_repository::Entity::find()
+ .filter(
+ Condition::all()
+ .add(worktree_repository::Column::ProjectId.eq(project_id))
+ .add(worktree_repository::Column::IsDeleted.eq(false)),
+ )
+ .stream(&*tx)
+ .await?;
+ while let Some(db_repository_entry) = db_repository_entries.next().await {
+ let db_repository_entry = db_repository_entry?;
+ if let Some(worktree) =
+ worktrees.get_mut(&(db_repository_entry.worktree_id as u64))
+ {
+ worktree.repository_entries.push(proto::RepositoryEntry {
+ work_directory_id: db_repository_entry.work_directory_id as u64,
+ branch: db_repository_entry.branch,
+ });
+ }
+ }
+ }
+
// Populate worktree diagnostic summaries.
{
let mut db_summaries = worktree_diagnostic_summary::Entity::find()
@@ -3223,6 +3334,8 @@ pub struct RejoinedWorktree {
pub visible: bool,
pub updated_entries: Vec,
pub removed_entries: Vec,
+ pub updated_repositories: Vec,
+ pub removed_repositories: Vec,
pub diagnostic_summaries: Vec,
pub scan_id: u64,
pub completed_scan_id: u64,
@@ -3277,6 +3390,7 @@ pub struct Worktree {
pub root_name: String,
pub visible: bool,
pub entries: Vec,
+ pub repository_entries: Vec,
pub diagnostic_summaries: Vec,
pub scan_id: u64,
pub completed_scan_id: u64,
diff --git a/crates/collab/src/db/worktree_repository.rs b/crates/collab/src/db/worktree_repository.rs
new file mode 100644
index 0000000000000000000000000000000000000000..116d7b3ed9ed214ef8989171bfa610c3fcb08c37
--- /dev/null
+++ b/crates/collab/src/db/worktree_repository.rs
@@ -0,0 +1,21 @@
+use super::ProjectId;
+use sea_orm::entity::prelude::*;
+
+#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)]
+#[sea_orm(table_name = "worktree_repositories")]
+pub struct Model {
+ #[sea_orm(primary_key)]
+ pub project_id: ProjectId,
+ #[sea_orm(primary_key)]
+ pub worktree_id: i64,
+ #[sea_orm(primary_key)]
+ pub work_directory_id: i64,
+ pub scan_id: i64,
+ pub branch: Option,
+ pub is_deleted: bool,
+}
+
+#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
+pub enum Relation {}
+
+impl ActiveModelBehavior for ActiveModel {}
diff --git a/crates/collab/src/rpc.rs b/crates/collab/src/rpc.rs
index 16e7577d957e3ee993acceab9a52e90987ef7358..23935904d3a85221e1e3ac95df29b64dfd40cbb4 100644
--- a/crates/collab/src/rpc.rs
+++ b/crates/collab/src/rpc.rs
@@ -1063,6 +1063,8 @@ async fn rejoin_room(
removed_entries: worktree.removed_entries,
scan_id: worktree.scan_id,
is_last_update: worktree.completed_scan_id == worktree.scan_id,
+ updated_repositories: worktree.updated_repositories,
+ removed_repositories: worktree.removed_repositories,
};
for update in proto::split_worktree_update(message, MAX_CHUNK_SIZE) {
session.peer.send(session.connection_id, update.clone())?;
@@ -1383,6 +1385,8 @@ async fn join_project(
removed_entries: Default::default(),
scan_id: worktree.scan_id,
is_last_update: worktree.scan_id == worktree.completed_scan_id,
+ updated_repositories: worktree.repository_entries,
+ removed_repositories: Default::default(),
};
for update in proto::split_worktree_update(message, MAX_CHUNK_SIZE) {
session.peer.send(session.connection_id, update.clone())?;
diff --git a/crates/collab/src/tests/integration_tests.rs b/crates/collab/src/tests/integration_tests.rs
index 40f11735795f920cb68ba9dc640b1b11d0a2ad7f..9f04642e30855a79aa6ac91e674c62e3c23c4adc 100644
--- a/crates/collab/src/tests/integration_tests.rs
+++ b/crates/collab/src/tests/integration_tests.rs
@@ -13,8 +13,8 @@ use editor::{
use fs::{FakeFs, Fs as _, LineEnding, RemoveOptions};
use futures::StreamExt as _;
use gpui::{
- executor::Deterministic, geometry::vector::vec2f, test::EmptyView, ModelHandle, TestAppContext,
- ViewHandle,
+ executor::Deterministic, geometry::vector::vec2f, test::EmptyView, AppContext, ModelHandle,
+ TestAppContext, ViewHandle,
};
use indoc::indoc;
use language::{
@@ -2604,6 +2604,92 @@ async fn test_git_diff_base_change(
});
}
+#[gpui::test]
+async fn test_git_branch_name(
+ deterministic: Arc,
+ cx_a: &mut TestAppContext,
+ cx_b: &mut TestAppContext,
+ cx_c: &mut TestAppContext,
+) {
+ deterministic.forbid_parking();
+ let mut server = TestServer::start(&deterministic).await;
+ let client_a = server.create_client(cx_a, "user_a").await;
+ let client_b = server.create_client(cx_b, "user_b").await;
+ let client_c = server.create_client(cx_c, "user_c").await;
+ server
+ .create_room(&mut [(&client_a, cx_a), (&client_b, cx_b), (&client_c, cx_c)])
+ .await;
+ let active_call_a = cx_a.read(ActiveCall::global);
+
+ client_a
+ .fs
+ .insert_tree(
+ "/dir",
+ json!({
+ ".git": {},
+ }),
+ )
+ .await;
+
+ let (project_local, _worktree_id) = client_a.build_local_project("/dir", cx_a).await;
+ let project_id = active_call_a
+ .update(cx_a, |call, cx| {
+ call.share_project(project_local.clone(), cx)
+ })
+ .await
+ .unwrap();
+
+ let project_remote = client_b.build_remote_project(project_id, cx_b).await;
+ client_a
+ .fs
+ .as_fake()
+ .set_branch_name(Path::new("/dir/.git"), Some("branch-1"))
+ .await;
+
+ // Wait for it to catch up to the new branch
+ deterministic.run_until_parked();
+
+ #[track_caller]
+ fn assert_branch(branch_name: Option>, project: &Project, cx: &AppContext) {
+ let branch_name = branch_name.map(Into::into);
+ let worktrees = project.visible_worktrees(cx).collect::>();
+ assert_eq!(worktrees.len(), 1);
+ let worktree = worktrees[0].clone();
+ let root_entry = worktree.read(cx).snapshot().root_git_entry().unwrap();
+ assert_eq!(root_entry.branch(), branch_name.map(Into::into));
+ }
+
+ // Smoke test branch reading
+ project_local.read_with(cx_a, |project, cx| {
+ assert_branch(Some("branch-1"), project, cx)
+ });
+ project_remote.read_with(cx_b, |project, cx| {
+ assert_branch(Some("branch-1"), project, cx)
+ });
+
+ client_a
+ .fs
+ .as_fake()
+ .set_branch_name(Path::new("/dir/.git"), Some("branch-2"))
+ .await;
+
+ // Wait for buffer_local_a to receive it
+ deterministic.run_until_parked();
+
+ // Smoke test branch reading
+ project_local.read_with(cx_a, |project, cx| {
+ assert_branch(Some("branch-2"), project, cx)
+ });
+ project_remote.read_with(cx_b, |project, cx| {
+ assert_branch(Some("branch-2"), project, cx)
+ });
+
+ let project_remote_c = client_c.build_remote_project(project_id, cx_c).await;
+ project_remote_c.read_with(cx_c, |project, cx| {
+ assert_branch(Some("branch-2"), project, cx)
+ });
+}
+
#[gpui::test(iterations = 10)]
async fn test_fs_operations(
deterministic: Arc,
diff --git a/crates/collab/src/tests/randomized_integration_tests.rs b/crates/collab/src/tests/randomized_integration_tests.rs
index 00273722c4312217b533c9f3a25fc12b0387acc7..d5bd0033f7d53e6b18766db6d58642b5472201c4 100644
--- a/crates/collab/src/tests/randomized_integration_tests.rs
+++ b/crates/collab/src/tests/randomized_integration_tests.rs
@@ -785,6 +785,28 @@ async fn apply_client_operation(
}
client.fs.set_index_for_repo(&dot_git_dir, &contents).await;
}
+
+ ClientOperation::WriteGitBranch {
+ repo_path,
+ new_branch,
+ } => {
+ if !client.fs.directories().contains(&repo_path) {
+ return Err(TestError::Inapplicable);
+ }
+
+ log::info!(
+ "{}: writing git branch for repo {:?}: {:?}",
+ client.username,
+ repo_path,
+ new_branch
+ );
+
+ let dot_git_dir = repo_path.join(".git");
+ if client.fs.metadata(&dot_git_dir).await?.is_none() {
+ client.fs.create_dir(&dot_git_dir).await?;
+ }
+ client.fs.set_branch_name(&dot_git_dir, new_branch).await;
+ }
}
Ok(())
}
@@ -859,6 +881,12 @@ fn check_consistency_between_clients(clients: &[(Rc, TestAppContext)
host_snapshot.abs_path(),
guest_project.remote_id(),
);
+ assert_eq!(guest_snapshot.repositories().collect::>(), host_snapshot.repositories().collect::>(),
+ "{} has different repositories than the host for worktree {:?} and project {:?}",
+ client.username,
+ host_snapshot.abs_path(),
+ guest_project.remote_id(),
+ );
assert_eq!(guest_snapshot.scan_id(), host_snapshot.scan_id(),
"{} has different scan id than the host for worktree {:?} and project {:?}",
client.username,
@@ -1151,6 +1179,10 @@ enum ClientOperation {
repo_path: PathBuf,
contents: Vec<(PathBuf, String)>,
},
+ WriteGitBranch {
+ repo_path: PathBuf,
+ new_branch: Option,
+ },
}
#[derive(Clone, Debug, Serialize, Deserialize)]
@@ -1664,10 +1696,11 @@ impl TestPlan {
}
// Update a git index
- 91..=95 => {
+ 91..=93 => {
let repo_path = client
.fs
.directories()
+ .into_iter()
.choose(&mut self.rng)
.unwrap()
.clone();
@@ -1698,6 +1731,24 @@ impl TestPlan {
};
}
+ // Update a git branch
+ 94..=95 => {
+ let repo_path = client
+ .fs
+ .directories()
+ .choose(&mut self.rng)
+ .unwrap()
+ .clone();
+
+ let new_branch = (self.rng.gen_range(0..10) > 3)
+ .then(|| Alphanumeric.sample_string(&mut self.rng, 8));
+
+ break ClientOperation::WriteGitBranch {
+ repo_path,
+ new_branch,
+ };
+ }
+
// Create or update a file or directory
96.. => {
let is_dir = self.rng.gen::();
diff --git a/crates/collab_ui/src/collab_titlebar_item.rs b/crates/collab_ui/src/collab_titlebar_item.rs
index 69ca64360cbe5363f6ea9811eb8f43f6f1c63a97..7374b166ca6e4eb5b2b7aae5304c556fea5ff526 100644
--- a/crates/collab_ui/src/collab_titlebar_item.rs
+++ b/crates/collab_ui/src/collab_titlebar_item.rs
@@ -24,6 +24,8 @@ use theme::{AvatarStyle, Theme};
use util::ResultExt;
use workspace::{FollowNextCollaborator, Workspace};
+const MAX_TITLE_LENGTH: usize = 75;
+
actions!(
collab,
[
@@ -68,29 +70,11 @@ impl View for CollabTitlebarItem {
};
let project = self.project.read(cx);
- let mut project_title = String::new();
- for (i, name) in project.worktree_root_names(cx).enumerate() {
- if i > 0 {
- project_title.push_str(", ");
- }
- project_title.push_str(name);
- }
- if project_title.is_empty() {
- project_title = "empty project".to_owned();
- }
-
let theme = cx.global::().theme.clone();
-
let mut left_container = Flex::row();
let mut right_container = Flex::row().align_children_center();
- left_container.add_child(
- Label::new(project_title, theme.workspace.titlebar.title.clone())
- .contained()
- .with_margin_right(theme.workspace.titlebar.item_spacing)
- .aligned()
- .left(),
- );
+ left_container.add_child(self.collect_title_root_names(&project, theme.clone(), cx));
let user = self.user_store.read(cx).current_user();
let peer_id = self.client.peer_id();
@@ -120,7 +104,21 @@ impl View for CollabTitlebarItem {
Stack::new()
.with_child(left_container)
- .with_child(right_container.aligned().right())
+ .with_child(
+ Flex::row()
+ .with_child(
+ right_container.contained().with_background_color(
+ theme
+ .workspace
+ .titlebar
+ .container
+ .background_color
+ .unwrap_or_else(|| Color::transparent_black()),
+ ),
+ )
+ .aligned()
+ .right(),
+ )
.into_any()
}
}
@@ -137,6 +135,7 @@ impl CollabTitlebarItem {
let active_call = ActiveCall::global(cx);
let mut subscriptions = Vec::new();
subscriptions.push(cx.observe(workspace_handle, |_, _, cx| cx.notify()));
+ subscriptions.push(cx.observe(&project, |_, _, cx| cx.notify()));
subscriptions.push(cx.observe(&active_call, |this, _, cx| this.active_call_changed(cx)));
subscriptions.push(cx.observe_window_activation(|this, active, cx| {
this.window_activation_changed(active, cx)
@@ -181,6 +180,63 @@ impl CollabTitlebarItem {
}
}
+ fn collect_title_root_names(
+ &self,
+ project: &Project,
+ theme: Arc,
+ cx: &ViewContext,
+ ) -> AnyElement {
+ let names_and_branches = project.visible_worktrees(cx).map(|worktree| {
+ let worktree = worktree.read(cx);
+ (worktree.root_name(), worktree.root_git_entry())
+ });
+
+ fn push_str(buffer: &mut String, index: &mut usize, str: &str) {
+ buffer.push_str(str);
+ *index += str.chars().count();
+ }
+
+ let mut indices = Vec::new();
+ let mut index = 0;
+ let mut title = String::new();
+ let mut names_and_branches = names_and_branches.peekable();
+ while let Some((name, entry)) = names_and_branches.next() {
+ let pre_index = index;
+ push_str(&mut title, &mut index, name);
+ indices.extend((pre_index..index).into_iter());
+ if let Some(branch) = entry.and_then(|entry| entry.branch()) {
+ push_str(&mut title, &mut index, "/");
+ push_str(&mut title, &mut index, &branch);
+ }
+ if names_and_branches.peek().is_some() {
+ push_str(&mut title, &mut index, ", ");
+ if index >= MAX_TITLE_LENGTH {
+ title.push_str(" …");
+ break;
+ }
+ }
+ }
+
+ let text_style = theme.workspace.titlebar.title.clone();
+ let item_spacing = theme.workspace.titlebar.item_spacing;
+
+ let mut highlight = text_style.clone();
+ highlight.color = theme.workspace.titlebar.highlight_color;
+
+ let style = LabelStyle {
+ text: text_style,
+ highlight_text: Some(highlight),
+ };
+
+ Label::new(title, style)
+ .with_highlights(indices)
+ .contained()
+ .with_margin_right(item_spacing)
+ .aligned()
+ .left()
+ .into_any_named("title-with-git-information")
+ }
+
fn window_activation_changed(&mut self, active: bool, cx: &mut ViewContext) {
let project = if active {
Some(self.project.clone())
diff --git a/crates/fs/src/fs.rs b/crates/fs/src/fs.rs
index d856b71e398c58940623773adf79334215e71807..945ffaea16a66e754db72bfa8db23dc56f48c424 100644
--- a/crates/fs/src/fs.rs
+++ b/crates/fs/src/fs.rs
@@ -619,7 +619,10 @@ impl FakeFs {
.boxed()
}
- pub async fn set_index_for_repo(&self, dot_git: &Path, head_state: &[(&Path, String)]) {
+ pub fn with_git_state(&self, dot_git: &Path, f: F)
+ where
+ F: FnOnce(&mut FakeGitRepositoryState),
+ {
let mut state = self.state.lock();
let entry = state.read_path(dot_git).unwrap();
let mut entry = entry.lock();
@@ -628,12 +631,7 @@ impl FakeFs {
let repo_state = git_repo_state.get_or_insert_with(Default::default);
let mut repo_state = repo_state.lock();
- repo_state.index_contents.clear();
- repo_state.index_contents.extend(
- head_state
- .iter()
- .map(|(path, content)| (path.to_path_buf(), content.clone())),
- );
+ f(&mut repo_state);
state.emit_event([dot_git]);
} else {
@@ -641,6 +639,21 @@ impl FakeFs {
}
}
+ pub async fn set_branch_name(&self, dot_git: &Path, branch: Option>) {
+ self.with_git_state(dot_git, |state| state.branch_name = branch.map(Into::into))
+ }
+
+ pub async fn set_index_for_repo(&self, dot_git: &Path, head_state: &[(&Path, String)]) {
+ self.with_git_state(dot_git, |state| {
+ state.index_contents.clear();
+ state.index_contents.extend(
+ head_state
+ .iter()
+ .map(|(path, content)| (path.to_path_buf(), content.clone())),
+ );
+ });
+ }
+
pub fn paths(&self) -> Vec {
let mut result = Vec::new();
let mut queue = collections::VecDeque::new();
diff --git a/crates/fs/src/repository.rs b/crates/fs/src/repository.rs
index 6ead6f36b5d1b903d02d54f78ef5349234462c76..5624ce42f1dc6e134f4602ff82342835bbc4a39f 100644
--- a/crates/fs/src/repository.rs
+++ b/crates/fs/src/repository.rs
@@ -5,6 +5,7 @@ use std::{
path::{Component, Path, PathBuf},
sync::Arc,
};
+use util::ResultExt;
pub use git2::Repository as LibGitRepository;
@@ -13,6 +14,14 @@ pub trait GitRepository: Send {
fn reload_index(&self);
fn load_index_text(&self, relative_file_path: &Path) -> Option;
+
+ fn branch_name(&self) -> Option;
+}
+
+impl std::fmt::Debug for dyn GitRepository {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ f.debug_struct("dyn GitRepository<...>").finish()
+ }
}
#[async_trait::async_trait]
@@ -46,6 +55,12 @@ impl GitRepository for LibGitRepository {
}
None
}
+
+ fn branch_name(&self) -> Option {
+ let head = self.head().log_err()?;
+ let branch = String::from_utf8_lossy(head.shorthand_bytes());
+ Some(branch.to_string())
+ }
}
#[derive(Debug, Clone, Default)]
@@ -56,6 +71,7 @@ pub struct FakeGitRepository {
#[derive(Debug, Clone, Default)]
pub struct FakeGitRepositoryState {
pub index_contents: HashMap,
+ pub branch_name: Option,
}
impl FakeGitRepository {
@@ -72,6 +88,11 @@ impl GitRepository for FakeGitRepository {
let state = self.state.lock();
state.index_contents.get(path).cloned()
}
+
+ fn branch_name(&self) -> Option {
+ let state = self.state.lock();
+ state.branch_name.clone()
+ }
}
fn check_path_to_repo_path_errors(relative_file_path: &Path) -> Result<()> {
diff --git a/crates/gpui/src/elements/flex.rs b/crates/gpui/src/elements/flex.rs
index e0e8dfc215069b892fbfc1cd25d640fbdb7f18e2..857f3f56fc08b0b24f39011d7f4323838b97dde2 100644
--- a/crates/gpui/src/elements/flex.rs
+++ b/crates/gpui/src/elements/flex.rs
@@ -66,6 +66,10 @@ impl Flex {
self
}
+ pub fn is_empty(&self) -> bool {
+ self.children.is_empty()
+ }
+
fn layout_flex_children(
&mut self,
layout_expanded: bool,
diff --git a/crates/project/src/project.rs b/crates/project/src/project.rs
index 40687ce0a7fa518f61896ac5b42866086edb78a4..b3d432763e52f52dfaef111f26cbb8e1cf1a6b48 100644
--- a/crates/project/src/project.rs
+++ b/crates/project/src/project.rs
@@ -64,6 +64,7 @@ use std::{
},
time::{Duration, Instant, SystemTime},
};
+
use terminals::Terminals;
use util::{debug_panic, defer, merge_json_value_into, post_inc, ResultExt, TryFutureExt as _};
@@ -4695,40 +4696,50 @@ impl Project {
fn update_local_worktree_buffers_git_repos(
&mut self,
- worktree: ModelHandle,
- repos: &[GitRepositoryEntry],
+ worktree_handle: ModelHandle,
+ repos: &HashMap, LocalRepositoryEntry>,
cx: &mut ModelContext,
) {
+ debug_assert!(worktree_handle.read(cx).is_local());
+
for (_, buffer) in &self.opened_buffers {
if let Some(buffer) = buffer.upgrade(cx) {
let file = match File::from_dyn(buffer.read(cx).file()) {
Some(file) => file,
None => continue,
};
- if file.worktree != worktree {
+ if file.worktree != worktree_handle {
continue;
}
let path = file.path().clone();
- let repo = match repos.iter().find(|repo| repo.manages(&path)) {
+ let worktree = worktree_handle.read(cx);
+
+ let (work_directory, repo) = match repos
+ .iter()
+ .find(|(work_directory, _)| path.starts_with(work_directory))
+ {
Some(repo) => repo.clone(),
None => return,
};
- let relative_repo = match path.strip_prefix(repo.content_path) {
- Ok(relative_repo) => relative_repo.to_owned(),
- Err(_) => return,
+ let relative_repo = match path.strip_prefix(work_directory).log_err() {
+ Some(relative_repo) => relative_repo.to_owned(),
+ None => return,
};
+ drop(worktree);
+
let remote_id = self.remote_id();
let client = self.client.clone();
+ let git_ptr = repo.repo_ptr.clone();
+ let diff_base_task = cx
+ .background()
+ .spawn(async move { git_ptr.lock().load_index_text(&relative_repo) });
cx.spawn(|_, mut cx| async move {
- let diff_base = cx
- .background()
- .spawn(async move { repo.repo.lock().load_index_text(&relative_repo) })
- .await;
+ let diff_base = diff_base_task.await;
let buffer_id = buffer.update(&mut cx, |buffer, cx| {
buffer.set_diff_base(diff_base.clone(), cx);
diff --git a/crates/project/src/worktree.rs b/crates/project/src/worktree.rs
index 1281ddeff3f86607cbe5a25990a3171aaab0913e..554304f3d32dd4c223807a3ffbc9028bf93478ee 100644
--- a/crates/project/src/worktree.rs
+++ b/crates/project/src/worktree.rs
@@ -51,7 +51,7 @@ use std::{
},
time::{Duration, SystemTime},
};
-use sum_tree::{Bias, Edit, SeekTarget, SumTree, TreeSet};
+use sum_tree::{Bias, Edit, SeekTarget, SumTree, TreeMap, TreeSet};
use util::{paths::HOME, ResultExt, TryFutureExt};
#[derive(Copy, Clone, PartialEq, Eq, Debug, Hash, PartialOrd, Ord)]
@@ -102,6 +102,7 @@ pub struct Snapshot {
root_char_bag: CharBag,
entries_by_path: SumTree,
entries_by_id: SumTree,
+ repository_entries: TreeMap,
/// A number that increases every time the worktree begins scanning
/// a set of paths from the filesystem. This scanning could be caused
@@ -116,45 +117,133 @@ pub struct Snapshot {
completed_scan_id: usize,
}
-#[derive(Clone)]
-pub struct GitRepositoryEntry {
- pub(crate) repo: Arc>,
+#[derive(Clone, Debug, Eq, PartialEq)]
+pub struct RepositoryEntry {
+ pub(crate) work_directory: WorkDirectoryEntry,
+ pub(crate) branch: Option>,
+}
- pub(crate) scan_id: usize,
- // Path to folder containing the .git file or directory
- pub(crate) content_path: Arc,
- // Path to the actual .git folder.
- // Note: if .git is a file, this points to the folder indicated by the .git file
- pub(crate) git_dir_path: Arc,
+impl RepositoryEntry {
+ pub fn branch(&self) -> Option> {
+ self.branch.clone()
+ }
+
+ pub fn work_directory_id(&self) -> ProjectEntryId {
+ *self.work_directory
+ }
+
+ pub fn work_directory(&self, snapshot: &Snapshot) -> Option {
+ snapshot
+ .entry_for_id(self.work_directory_id())
+ .map(|entry| RepositoryWorkDirectory(entry.path.clone()))
+ }
+
+ pub(crate) fn contains(&self, snapshot: &Snapshot, path: &Path) -> bool {
+ self.work_directory.contains(snapshot, path)
+ }
}
-impl std::fmt::Debug for GitRepositoryEntry {
- fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
- f.debug_struct("GitRepositoryEntry")
- .field("content_path", &self.content_path)
- .field("git_dir_path", &self.git_dir_path)
- .finish()
+impl From<&RepositoryEntry> for proto::RepositoryEntry {
+ fn from(value: &RepositoryEntry) -> Self {
+ proto::RepositoryEntry {
+ work_directory_id: value.work_directory.to_proto(),
+ branch: value.branch.as_ref().map(|str| str.to_string()),
+ }
}
}
-#[derive(Debug)]
+/// This path corresponds to the 'content path' (the folder that contains the .git)
+#[derive(Clone, Debug, Ord, PartialOrd, Eq, PartialEq)]
+pub struct RepositoryWorkDirectory(Arc);
+
+impl Default for RepositoryWorkDirectory {
+ fn default() -> Self {
+ RepositoryWorkDirectory(Arc::from(Path::new("")))
+ }
+}
+
+#[derive(Clone, Debug, Ord, PartialOrd, Eq, PartialEq)]
+pub struct WorkDirectoryEntry(ProjectEntryId);
+
+impl WorkDirectoryEntry {
+ // Note that these paths should be relative to the worktree root.
+ pub(crate) fn contains(&self, snapshot: &Snapshot, path: &Path) -> bool {
+ snapshot
+ .entry_for_id(self.0)
+ .map(|entry| path.starts_with(&entry.path))
+ .unwrap_or(false)
+ }
+
+ pub(crate) fn relativize(&self, worktree: &Snapshot, path: &Path) -> Option {
+ worktree.entry_for_id(self.0).and_then(|entry| {
+ path.strip_prefix(&entry.path)
+ .ok()
+ .map(move |path| RepoPath(path.to_owned()))
+ })
+ }
+}
+
+impl Deref for WorkDirectoryEntry {
+ type Target = ProjectEntryId;
+
+ fn deref(&self) -> &Self::Target {
+ &self.0
+ }
+}
+
+impl<'a> From for WorkDirectoryEntry {
+ fn from(value: ProjectEntryId) -> Self {
+ WorkDirectoryEntry(value)
+ }
+}
+
+#[derive(Clone, Debug, Ord, PartialOrd, Eq, PartialEq)]
+pub struct RepoPath(PathBuf);
+
+impl AsRef for RepoPath {
+ fn as_ref(&self) -> &Path {
+ self.0.as_ref()
+ }
+}
+
+impl Deref for RepoPath {
+ type Target = PathBuf;
+
+ fn deref(&self) -> &Self::Target {
+ &self.0
+ }
+}
+
+impl AsRef for RepositoryWorkDirectory {
+ fn as_ref(&self) -> &Path {
+ self.0.as_ref()
+ }
+}
+
+#[derive(Debug, Clone)]
pub struct LocalSnapshot {
ignores_by_parent_abs_path: HashMap, (Arc, usize)>,
- git_repositories: Vec,
+ // The ProjectEntryId corresponds to the entry for the .git dir
+ // work_directory_id
+ git_repositories: TreeMap,
removed_entry_ids: HashMap,
next_entry_id: Arc,
snapshot: Snapshot,
}
-impl Clone for LocalSnapshot {
- fn clone(&self) -> Self {
- Self {
- ignores_by_parent_abs_path: self.ignores_by_parent_abs_path.clone(),
- git_repositories: self.git_repositories.iter().cloned().collect(),
- removed_entry_ids: self.removed_entry_ids.clone(),
- next_entry_id: self.next_entry_id.clone(),
- snapshot: self.snapshot.clone(),
- }
+#[derive(Debug, Clone)]
+pub struct LocalRepositoryEntry {
+ pub(crate) scan_id: usize,
+ pub(crate) repo_ptr: Arc>,
+ /// Path to the actual .git folder.
+ /// Note: if .git is a file, this points to the folder indicated by the .git file
+ pub(crate) git_dir_path: Arc,
+}
+
+impl LocalRepositoryEntry {
+ // Note that this path should be relative to the worktree root.
+ pub(crate) fn in_dot_git(&self, path: &Path) -> bool {
+ path.starts_with(self.git_dir_path.as_ref())
}
}
@@ -191,7 +280,7 @@ struct ShareState {
pub enum Event {
UpdatedEntries(HashMap, PathChange>),
- UpdatedGitRepositories(Vec),
+ UpdatedGitRepositories(HashMap, LocalRepositoryEntry>),
}
impl Entity for Worktree {
@@ -222,8 +311,8 @@ impl Worktree {
let mut snapshot = LocalSnapshot {
ignores_by_parent_abs_path: Default::default(),
- git_repositories: Default::default(),
removed_entry_ids: Default::default(),
+ git_repositories: Default::default(),
next_entry_id,
snapshot: Snapshot {
id: WorktreeId::from_usize(cx.model_id()),
@@ -232,6 +321,7 @@ impl Worktree {
root_char_bag: root_name.chars().map(|c| c.to_ascii_lowercase()).collect(),
entries_by_path: Default::default(),
entries_by_id: Default::default(),
+ repository_entries: Default::default(),
scan_id: 1,
completed_scan_id: 0,
},
@@ -330,6 +420,7 @@ impl Worktree {
.collect(),
entries_by_path: Default::default(),
entries_by_id: Default::default(),
+ repository_entries: Default::default(),
scan_id: 1,
completed_scan_id: 0,
};
@@ -598,10 +689,8 @@ impl LocalWorktree {
}
fn set_snapshot(&mut self, new_snapshot: LocalSnapshot, cx: &mut ModelContext) {
- let updated_repos = Self::changed_repos(
- &self.snapshot.git_repositories,
- &new_snapshot.git_repositories,
- );
+ let updated_repos =
+ self.changed_repos(&self.git_repositories, &new_snapshot.git_repositories);
self.snapshot = new_snapshot;
if let Some(share) = self.share.as_mut() {
@@ -614,31 +703,57 @@ impl LocalWorktree {
}
fn changed_repos(
- old_repos: &[GitRepositoryEntry],
- new_repos: &[GitRepositoryEntry],
- ) -> Vec {
- fn diff<'a>(
- a: &'a [GitRepositoryEntry],
- b: &'a [GitRepositoryEntry],
- updated: &mut HashMap<&'a Path, GitRepositoryEntry>,
- ) {
- for a_repo in a {
- let matched = b.iter().find(|b_repo| {
- a_repo.git_dir_path == b_repo.git_dir_path && a_repo.scan_id == b_repo.scan_id
- });
+ &self,
+ old_repos: &TreeMap,
+ new_repos: &TreeMap,
+ ) -> HashMap, LocalRepositoryEntry> {
+ let mut diff = HashMap::default();
+ let mut old_repos = old_repos.iter().peekable();
+ let mut new_repos = new_repos.iter().peekable();
+ loop {
+ match (old_repos.peek(), new_repos.peek()) {
+ (Some((old_entry_id, old_repo)), Some((new_entry_id, new_repo))) => {
+ match Ord::cmp(old_entry_id, new_entry_id) {
+ Ordering::Less => {
+ if let Some(entry) = self.entry_for_id(**old_entry_id) {
+ diff.insert(entry.path.clone(), (*old_repo).clone());
+ }
+ old_repos.next();
+ }
+ Ordering::Equal => {
+ if old_repo.scan_id != new_repo.scan_id {
+ if let Some(entry) = self.entry_for_id(**new_entry_id) {
+ diff.insert(entry.path.clone(), (*new_repo).clone());
+ }
+ }
- if matched.is_none() {
- updated.insert(a_repo.git_dir_path.as_ref(), a_repo.clone());
+ old_repos.next();
+ new_repos.next();
+ }
+ Ordering::Greater => {
+ if let Some(entry) = self.entry_for_id(**new_entry_id) {
+ diff.insert(entry.path.clone(), (*new_repo).clone());
+ }
+ new_repos.next();
+ }
+ }
+ }
+ (Some((old_entry_id, old_repo)), None) => {
+ if let Some(entry) = self.entry_for_id(**old_entry_id) {
+ diff.insert(entry.path.clone(), (*old_repo).clone());
+ }
+ old_repos.next();
+ }
+ (None, Some((new_entry_id, new_repo))) => {
+ if let Some(entry) = self.entry_for_id(**new_entry_id) {
+ diff.insert(entry.path.clone(), (*new_repo).clone());
+ }
+ new_repos.next();
}
+ (None, None) => break,
}
}
-
- let mut updated = HashMap::<&Path, GitRepositoryEntry>::default();
-
- diff(old_repos, new_repos, &mut updated);
- diff(new_repos, old_repos, &mut updated);
-
- updated.into_values().collect()
+ diff
}
pub fn scan_complete(&self) -> impl Future