Merge pull request #2428 from zed-industries/add-branch-name

Mikayla Maki created

Add branch name and synchronize repository representations on the worktree.

Change summary

assets/icons/version_control_branch_12.svg                         |   1 
crates/collab/migrations.sqlite/20221109000000_test_schema.sql     |  16 
crates/collab/migrations/20230508211523_add-repository-entries.sql |  13 
crates/collab/src/db.rs                                            | 174 
crates/collab/src/db/worktree_repository.rs                        |  21 
crates/collab/src/rpc.rs                                           |   4 
crates/collab/src/tests/integration_tests.rs                       |  90 
crates/collab/src/tests/randomized_integration_tests.rs            |  53 
crates/collab_ui/src/collab_titlebar_item.rs                       |  96 
crates/fs/src/fs.rs                                                |  27 
crates/fs/src/repository.rs                                        |  21 
crates/gpui/src/elements/flex.rs                                   |   4 
crates/project/src/project.rs                                      |  33 
crates/project/src/worktree.rs                                     | 597 
crates/rpc/proto/zed.proto                                         |  13 
crates/rpc/src/proto.rs                                            |  19 
crates/rpc/src/rpc.rs                                              |   2 
crates/sum_tree/src/tree_map.rs                                    |  65 
crates/theme/src/theme.rs                                          |   1 
styles/src/styleTree/workspace.ts                                  |   1 
20 files changed, 979 insertions(+), 272 deletions(-)

Detailed changes

crates/collab/migrations.sqlite/20221109000000_test_schema.sql 🔗

@@ -82,6 +82,20 @@ CREATE TABLE "worktree_entries" (
 CREATE INDEX "index_worktree_entries_on_project_id" ON "worktree_entries" ("project_id");
 CREATE INDEX "index_worktree_entries_on_project_id_and_worktree_id" ON "worktree_entries" ("project_id", "worktree_id");
 
+CREATE TABLE "worktree_repositories" (
+    "project_id" INTEGER NOT NULL,
+    "worktree_id" INTEGER NOT NULL,
+    "work_directory_id" INTEGER NOT NULL,
+    "scan_id" INTEGER NOT NULL,
+    "branch" VARCHAR,
+    "is_deleted" BOOL NOT NULL,
+    PRIMARY KEY(project_id, worktree_id, work_directory_id),
+    FOREIGN KEY(project_id, worktree_id) REFERENCES worktrees (project_id, id) ON DELETE CASCADE,
+    FOREIGN KEY(project_id, worktree_id, work_directory_id) REFERENCES worktree_entries (project_id, worktree_id, id) ON DELETE CASCADE
+);
+CREATE INDEX "index_worktree_repositories_on_project_id" ON "worktree_repositories" ("project_id");
+CREATE INDEX "index_worktree_repositories_on_project_id_and_worktree_id" ON "worktree_repositories" ("project_id", "worktree_id");
+
 CREATE TABLE "worktree_diagnostic_summaries" (
     "project_id" INTEGER NOT NULL,
     "worktree_id" INTEGER NOT NULL,
@@ -153,7 +167,7 @@ CREATE TABLE "followers" (
     "follower_connection_server_id" INTEGER NOT NULL REFERENCES servers (id) ON DELETE CASCADE,
     "follower_connection_id" INTEGER NOT NULL
 );
-CREATE UNIQUE INDEX 
+CREATE UNIQUE INDEX
     "index_followers_on_project_id_and_leader_connection_server_id_and_leader_connection_id_and_follower_connection_server_id_and_follower_connection_id"
 ON "followers" ("project_id", "leader_connection_server_id", "leader_connection_id", "follower_connection_server_id", "follower_connection_id");
 CREATE INDEX "index_followers_on_room_id" ON "followers" ("room_id");

crates/collab/migrations/20230508211523_add-repository-entries.sql 🔗

@@ -0,0 +1,13 @@
+CREATE TABLE "worktree_repositories" (
+    "project_id" INTEGER NOT NULL,
+    "worktree_id" INT8 NOT NULL,
+    "work_directory_id" INT8 NOT NULL,
+    "scan_id" INT8 NOT NULL,
+    "branch" VARCHAR,
+    "is_deleted" BOOL NOT NULL,
+    PRIMARY KEY(project_id, worktree_id, work_directory_id),
+    FOREIGN KEY(project_id, worktree_id) REFERENCES worktrees (project_id, id) ON DELETE CASCADE,
+    FOREIGN KEY(project_id, worktree_id, work_directory_id) REFERENCES worktree_entries (project_id, worktree_id, id) ON DELETE CASCADE
+);
+CREATE INDEX "index_worktree_repositories_on_project_id" ON "worktree_repositories" ("project_id");
+CREATE INDEX "index_worktree_repositories_on_project_id_and_worktree_id" ON "worktree_repositories" ("project_id", "worktree_id");

crates/collab/src/db.rs 🔗

@@ -14,6 +14,7 @@ mod user;
 mod worktree;
 mod worktree_diagnostic_summary;
 mod worktree_entry;
+mod worktree_repository;
 
 use crate::executor::Executor;
 use crate::{Error, Result};
@@ -1489,6 +1490,8 @@ impl Database {
                         visible: db_worktree.visible,
                         updated_entries: Default::default(),
                         removed_entries: Default::default(),
+                        updated_repositories: Default::default(),
+                        removed_repositories: Default::default(),
                         diagnostic_summaries: Default::default(),
                         scan_id: db_worktree.scan_id as u64,
                         completed_scan_id: db_worktree.completed_scan_id as u64,
@@ -1498,38 +1501,75 @@ impl Database {
                         .worktrees
                         .iter()
                         .find(|worktree| worktree.id == db_worktree.id as u64);
-                    let entry_filter = if let Some(rejoined_worktree) = rejoined_worktree {
-                        worktree_entry::Column::ScanId.gt(rejoined_worktree.scan_id)
-                    } else {
-                        worktree_entry::Column::IsDeleted.eq(false)
-                    };
-
-                    let mut db_entries = worktree_entry::Entity::find()
-                        .filter(
-                            Condition::all()
-                                .add(worktree_entry::Column::WorktreeId.eq(worktree.id))
-                                .add(entry_filter),
-                        )
-                        .stream(&*tx)
-                        .await?;
 
-                    while let Some(db_entry) = db_entries.next().await {
-                        let db_entry = db_entry?;
-                        if db_entry.is_deleted {
-                            worktree.removed_entries.push(db_entry.id as u64);
+                    // File entries
+                    {
+                        let entry_filter = if let Some(rejoined_worktree) = rejoined_worktree {
+                            worktree_entry::Column::ScanId.gt(rejoined_worktree.scan_id)
                         } else {
-                            worktree.updated_entries.push(proto::Entry {
-                                id: db_entry.id as u64,
-                                is_dir: db_entry.is_dir,
-                                path: db_entry.path,
-                                inode: db_entry.inode as u64,
-                                mtime: Some(proto::Timestamp {
-                                    seconds: db_entry.mtime_seconds as u64,
-                                    nanos: db_entry.mtime_nanos as u32,
-                                }),
-                                is_symlink: db_entry.is_symlink,
-                                is_ignored: db_entry.is_ignored,
-                            });
+                            worktree_entry::Column::IsDeleted.eq(false)
+                        };
+
+                        let mut db_entries = worktree_entry::Entity::find()
+                            .filter(
+                                Condition::all()
+                                    .add(worktree_entry::Column::WorktreeId.eq(worktree.id))
+                                    .add(entry_filter),
+                            )
+                            .stream(&*tx)
+                            .await?;
+
+                        while let Some(db_entry) = db_entries.next().await {
+                            let db_entry = db_entry?;
+                            if db_entry.is_deleted {
+                                worktree.removed_entries.push(db_entry.id as u64);
+                            } else {
+                                worktree.updated_entries.push(proto::Entry {
+                                    id: db_entry.id as u64,
+                                    is_dir: db_entry.is_dir,
+                                    path: db_entry.path,
+                                    inode: db_entry.inode as u64,
+                                    mtime: Some(proto::Timestamp {
+                                        seconds: db_entry.mtime_seconds as u64,
+                                        nanos: db_entry.mtime_nanos as u32,
+                                    }),
+                                    is_symlink: db_entry.is_symlink,
+                                    is_ignored: db_entry.is_ignored,
+                                });
+                            }
+                        }
+                    }
+
+                    // Repository Entries
+                    {
+                        let repository_entry_filter =
+                            if let Some(rejoined_worktree) = rejoined_worktree {
+                                worktree_repository::Column::ScanId.gt(rejoined_worktree.scan_id)
+                            } else {
+                                worktree_repository::Column::IsDeleted.eq(false)
+                            };
+
+                        let mut db_repositories = worktree_repository::Entity::find()
+                            .filter(
+                                Condition::all()
+                                    .add(worktree_repository::Column::WorktreeId.eq(worktree.id))
+                                    .add(repository_entry_filter),
+                            )
+                            .stream(&*tx)
+                            .await?;
+
+                        while let Some(db_repository) = db_repositories.next().await {
+                            let db_repository = db_repository?;
+                            if db_repository.is_deleted {
+                                worktree
+                                    .removed_repositories
+                                    .push(db_repository.work_directory_id as u64);
+                            } else {
+                                worktree.updated_repositories.push(proto::RepositoryEntry {
+                                    work_directory_id: db_repository.work_directory_id as u64,
+                                    branch: db_repository.branch,
+                                });
+                            }
                         }
                     }
 
@@ -2330,6 +2370,53 @@ impl Database {
                     .await?;
             }
 
+            if !update.updated_repositories.is_empty() {
+                worktree_repository::Entity::insert_many(update.updated_repositories.iter().map(
+                    |repository| worktree_repository::ActiveModel {
+                        project_id: ActiveValue::set(project_id),
+                        worktree_id: ActiveValue::set(worktree_id),
+                        work_directory_id: ActiveValue::set(repository.work_directory_id as i64),
+                        scan_id: ActiveValue::set(update.scan_id as i64),
+                        branch: ActiveValue::set(repository.branch.clone()),
+                        is_deleted: ActiveValue::set(false),
+                    },
+                ))
+                .on_conflict(
+                    OnConflict::columns([
+                        worktree_repository::Column::ProjectId,
+                        worktree_repository::Column::WorktreeId,
+                        worktree_repository::Column::WorkDirectoryId,
+                    ])
+                    .update_columns([
+                        worktree_repository::Column::ScanId,
+                        worktree_repository::Column::Branch,
+                    ])
+                    .to_owned(),
+                )
+                .exec(&*tx)
+                .await?;
+            }
+
+            if !update.removed_repositories.is_empty() {
+                worktree_repository::Entity::update_many()
+                    .filter(
+                        worktree_repository::Column::ProjectId
+                            .eq(project_id)
+                            .and(worktree_repository::Column::WorktreeId.eq(worktree_id))
+                            .and(
+                                worktree_repository::Column::WorkDirectoryId
+                                    .is_in(update.removed_repositories.iter().map(|id| *id as i64)),
+                            ),
+                    )
+                    .set(worktree_repository::ActiveModel {
+                        is_deleted: ActiveValue::Set(true),
+                        scan_id: ActiveValue::Set(update.scan_id as i64),
+                        ..Default::default()
+                    })
+                    .exec(&*tx)
+                    .await?;
+            }
+
             let connection_ids = self.project_guest_connection_ids(project_id, &tx).await?;
             Ok(connection_ids)
         })
@@ -2505,6 +2592,7 @@ impl Database {
                             root_name: db_worktree.root_name,
                             visible: db_worktree.visible,
                             entries: Default::default(),
+                            repository_entries: Default::default(),
                             diagnostic_summaries: Default::default(),
                             scan_id: db_worktree.scan_id as u64,
                             completed_scan_id: db_worktree.completed_scan_id as u64,
@@ -2542,6 +2630,29 @@ impl Database {
                 }
             }
 
+            // Populate repository entries.
+            {
+                let mut db_repository_entries = worktree_repository::Entity::find()
+                    .filter(
+                        Condition::all()
+                            .add(worktree_repository::Column::ProjectId.eq(project_id))
+                            .add(worktree_repository::Column::IsDeleted.eq(false)),
+                    )
+                    .stream(&*tx)
+                    .await?;
+                while let Some(db_repository_entry) = db_repository_entries.next().await {
+                    let db_repository_entry = db_repository_entry?;
+                    if let Some(worktree) =
+                        worktrees.get_mut(&(db_repository_entry.worktree_id as u64))
+                    {
+                        worktree.repository_entries.push(proto::RepositoryEntry {
+                            work_directory_id: db_repository_entry.work_directory_id as u64,
+                            branch: db_repository_entry.branch,
+                        });
+                    }
+                }
+            }
+
             // Populate worktree diagnostic summaries.
             {
                 let mut db_summaries = worktree_diagnostic_summary::Entity::find()
@@ -3223,6 +3334,8 @@ pub struct RejoinedWorktree {
     pub visible: bool,
     pub updated_entries: Vec<proto::Entry>,
     pub removed_entries: Vec<u64>,
+    pub updated_repositories: Vec<proto::RepositoryEntry>,
+    pub removed_repositories: Vec<u64>,
     pub diagnostic_summaries: Vec<proto::DiagnosticSummary>,
     pub scan_id: u64,
     pub completed_scan_id: u64,
@@ -3277,6 +3390,7 @@ pub struct Worktree {
     pub root_name: String,
     pub visible: bool,
     pub entries: Vec<proto::Entry>,
+    pub repository_entries: Vec<proto::RepositoryEntry>,
     pub diagnostic_summaries: Vec<proto::DiagnosticSummary>,
     pub scan_id: u64,
     pub completed_scan_id: u64,

crates/collab/src/db/worktree_repository.rs 🔗

@@ -0,0 +1,21 @@
+use super::ProjectId;
+use sea_orm::entity::prelude::*;
+
+#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)]
+#[sea_orm(table_name = "worktree_repositories")]
+pub struct Model {
+    #[sea_orm(primary_key)]
+    pub project_id: ProjectId,
+    #[sea_orm(primary_key)]
+    pub worktree_id: i64,
+    #[sea_orm(primary_key)]
+    pub work_directory_id: i64,
+    pub scan_id: i64,
+    pub branch: Option<String>,
+    pub is_deleted: bool,
+}
+
+#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
+pub enum Relation {}
+
+impl ActiveModelBehavior for ActiveModel {}

crates/collab/src/rpc.rs 🔗

@@ -1063,6 +1063,8 @@ async fn rejoin_room(
                     removed_entries: worktree.removed_entries,
                     scan_id: worktree.scan_id,
                     is_last_update: worktree.completed_scan_id == worktree.scan_id,
+                    updated_repositories: worktree.updated_repositories,
+                    removed_repositories: worktree.removed_repositories,
                 };
                 for update in proto::split_worktree_update(message, MAX_CHUNK_SIZE) {
                     session.peer.send(session.connection_id, update.clone())?;
@@ -1383,6 +1385,8 @@ async fn join_project(
             removed_entries: Default::default(),
             scan_id: worktree.scan_id,
             is_last_update: worktree.scan_id == worktree.completed_scan_id,
+            updated_repositories: worktree.repository_entries,
+            removed_repositories: Default::default(),
         };
         for update in proto::split_worktree_update(message, MAX_CHUNK_SIZE) {
             session.peer.send(session.connection_id, update.clone())?;

crates/collab/src/tests/integration_tests.rs 🔗

@@ -13,8 +13,8 @@ use editor::{
 use fs::{FakeFs, Fs as _, LineEnding, RemoveOptions};
 use futures::StreamExt as _;
 use gpui::{
-    executor::Deterministic, geometry::vector::vec2f, test::EmptyView, ModelHandle, TestAppContext,
-    ViewHandle,
+    executor::Deterministic, geometry::vector::vec2f, test::EmptyView, AppContext, ModelHandle,
+    TestAppContext, ViewHandle,
 };
 use indoc::indoc;
 use language::{
@@ -2604,6 +2604,92 @@ async fn test_git_diff_base_change(
     });
 }
 
+#[gpui::test]
+async fn test_git_branch_name(
+    deterministic: Arc<Deterministic>,
+    cx_a: &mut TestAppContext,
+    cx_b: &mut TestAppContext,
+    cx_c: &mut TestAppContext,
+) {
+    deterministic.forbid_parking();
+    let mut server = TestServer::start(&deterministic).await;
+    let client_a = server.create_client(cx_a, "user_a").await;
+    let client_b = server.create_client(cx_b, "user_b").await;
+    let client_c = server.create_client(cx_c, "user_c").await;
+    server
+        .create_room(&mut [(&client_a, cx_a), (&client_b, cx_b), (&client_c, cx_c)])
+        .await;
+    let active_call_a = cx_a.read(ActiveCall::global);
+
+    client_a
+        .fs
+        .insert_tree(
+            "/dir",
+            json!({
+            ".git": {},
+            }),
+        )
+        .await;
+
+    let (project_local, _worktree_id) = client_a.build_local_project("/dir", cx_a).await;
+    let project_id = active_call_a
+        .update(cx_a, |call, cx| {
+            call.share_project(project_local.clone(), cx)
+        })
+        .await
+        .unwrap();
+
+    let project_remote = client_b.build_remote_project(project_id, cx_b).await;
+    client_a
+        .fs
+        .as_fake()
+        .set_branch_name(Path::new("/dir/.git"), Some("branch-1"))
+        .await;
+
+    // Wait for it to catch up to the new branch
+    deterministic.run_until_parked();
+
+    #[track_caller]
+    fn assert_branch(branch_name: Option<impl Into<String>>, project: &Project, cx: &AppContext) {
+        let branch_name = branch_name.map(Into::into);
+        let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
+        assert_eq!(worktrees.len(), 1);
+        let worktree = worktrees[0].clone();
+        let root_entry = worktree.read(cx).snapshot().root_git_entry().unwrap();
+        assert_eq!(root_entry.branch(), branch_name.map(Into::into));
+    }
+
+    // Smoke test branch reading
+    project_local.read_with(cx_a, |project, cx| {
+        assert_branch(Some("branch-1"), project, cx)
+    });
+    project_remote.read_with(cx_b, |project, cx| {
+        assert_branch(Some("branch-1"), project, cx)
+    });
+
+    client_a
+        .fs
+        .as_fake()
+        .set_branch_name(Path::new("/dir/.git"), Some("branch-2"))
+        .await;
+
+    // Wait for buffer_local_a to receive it
+    deterministic.run_until_parked();
+
+    // Smoke test branch reading
+    project_local.read_with(cx_a, |project, cx| {
+        assert_branch(Some("branch-2"), project, cx)
+    });
+    project_remote.read_with(cx_b, |project, cx| {
+        assert_branch(Some("branch-2"), project, cx)
+    });
+
+    let project_remote_c = client_c.build_remote_project(project_id, cx_c).await;
+    project_remote_c.read_with(cx_c, |project, cx| {
+        assert_branch(Some("branch-2"), project, cx)
+    });
+}
+
 #[gpui::test(iterations = 10)]
 async fn test_fs_operations(
     deterministic: Arc<Deterministic>,

crates/collab/src/tests/randomized_integration_tests.rs 🔗

@@ -785,6 +785,28 @@ async fn apply_client_operation(
             }
             client.fs.set_index_for_repo(&dot_git_dir, &contents).await;
         }
+
+        ClientOperation::WriteGitBranch {
+            repo_path,
+            new_branch,
+        } => {
+            if !client.fs.directories().contains(&repo_path) {
+                return Err(TestError::Inapplicable);
+            }
+
+            log::info!(
+                "{}: writing git branch for repo {:?}: {:?}",
+                client.username,
+                repo_path,
+                new_branch
+            );
+
+            let dot_git_dir = repo_path.join(".git");
+            if client.fs.metadata(&dot_git_dir).await?.is_none() {
+                client.fs.create_dir(&dot_git_dir).await?;
+            }
+            client.fs.set_branch_name(&dot_git_dir, new_branch).await;
+        }
     }
     Ok(())
 }
@@ -859,6 +881,12 @@ fn check_consistency_between_clients(clients: &[(Rc<TestClient>, TestAppContext)
                                 host_snapshot.abs_path(),
                                 guest_project.remote_id(),
                             );
+                            assert_eq!(guest_snapshot.repositories().collect::<Vec<_>>(), host_snapshot.repositories().collect::<Vec<_>>(),
+                                "{} has different repositories than the host for worktree {:?} and project {:?}",
+                                client.username,
+                                host_snapshot.abs_path(),
+                                guest_project.remote_id(),
+                            );
                             assert_eq!(guest_snapshot.scan_id(), host_snapshot.scan_id(),
                                 "{} has different scan id than the host for worktree {:?} and project {:?}",
                                 client.username,
@@ -1151,6 +1179,10 @@ enum ClientOperation {
         repo_path: PathBuf,
         contents: Vec<(PathBuf, String)>,
     },
+    WriteGitBranch {
+        repo_path: PathBuf,
+        new_branch: Option<String>,
+    },
 }
 
 #[derive(Clone, Debug, Serialize, Deserialize)]
@@ -1664,10 +1696,11 @@ impl TestPlan {
                 }
 
                 // Update a git index
-                91..=95 => {
+                91..=93 => {
                     let repo_path = client
                         .fs
                         .directories()
+                        .into_iter()
                         .choose(&mut self.rng)
                         .unwrap()
                         .clone();
@@ -1698,6 +1731,24 @@ impl TestPlan {
                     };
                 }
 
+                // Update a git branch
+                94..=95 => {
+                    let repo_path = client
+                        .fs
+                        .directories()
+                        .choose(&mut self.rng)
+                        .unwrap()
+                        .clone();
+
+                    let new_branch = (self.rng.gen_range(0..10) > 3)
+                        .then(|| Alphanumeric.sample_string(&mut self.rng, 8));
+
+                    break ClientOperation::WriteGitBranch {
+                        repo_path,
+                        new_branch,
+                    };
+                }
+
                 // Create or update a file or directory
                 96.. => {
                     let is_dir = self.rng.gen::<bool>();

crates/collab_ui/src/collab_titlebar_item.rs 🔗

@@ -24,6 +24,8 @@ use theme::{AvatarStyle, Theme};
 use util::ResultExt;
 use workspace::{FollowNextCollaborator, Workspace};
 
+const MAX_TITLE_LENGTH: usize = 75;
+
 actions!(
     collab,
     [
@@ -68,29 +70,11 @@ impl View for CollabTitlebarItem {
         };
 
         let project = self.project.read(cx);
-        let mut project_title = String::new();
-        for (i, name) in project.worktree_root_names(cx).enumerate() {
-            if i > 0 {
-                project_title.push_str(", ");
-            }
-            project_title.push_str(name);
-        }
-        if project_title.is_empty() {
-            project_title = "empty project".to_owned();
-        }
-
         let theme = cx.global::<Settings>().theme.clone();
-
         let mut left_container = Flex::row();
         let mut right_container = Flex::row().align_children_center();
 
-        left_container.add_child(
-            Label::new(project_title, theme.workspace.titlebar.title.clone())
-                .contained()
-                .with_margin_right(theme.workspace.titlebar.item_spacing)
-                .aligned()
-                .left(),
-        );
+        left_container.add_child(self.collect_title_root_names(&project, theme.clone(), cx));
 
         let user = self.user_store.read(cx).current_user();
         let peer_id = self.client.peer_id();
@@ -120,7 +104,21 @@ impl View for CollabTitlebarItem {
 
         Stack::new()
             .with_child(left_container)
-            .with_child(right_container.aligned().right())
+            .with_child(
+                Flex::row()
+                    .with_child(
+                        right_container.contained().with_background_color(
+                            theme
+                                .workspace
+                                .titlebar
+                                .container
+                                .background_color
+                                .unwrap_or_else(|| Color::transparent_black()),
+                        ),
+                    )
+                    .aligned()
+                    .right(),
+            )
             .into_any()
     }
 }
@@ -137,6 +135,7 @@ impl CollabTitlebarItem {
         let active_call = ActiveCall::global(cx);
         let mut subscriptions = Vec::new();
         subscriptions.push(cx.observe(workspace_handle, |_, _, cx| cx.notify()));
+        subscriptions.push(cx.observe(&project, |_, _, cx| cx.notify()));
         subscriptions.push(cx.observe(&active_call, |this, _, cx| this.active_call_changed(cx)));
         subscriptions.push(cx.observe_window_activation(|this, active, cx| {
             this.window_activation_changed(active, cx)
@@ -181,6 +180,63 @@ impl CollabTitlebarItem {
         }
     }
 
+    fn collect_title_root_names(
+        &self,
+        project: &Project,
+        theme: Arc<Theme>,
+        cx: &ViewContext<Self>,
+    ) -> AnyElement<Self> {
+        let names_and_branches = project.visible_worktrees(cx).map(|worktree| {
+            let worktree = worktree.read(cx);
+            (worktree.root_name(), worktree.root_git_entry())
+        });
+
+        fn push_str(buffer: &mut String, index: &mut usize, str: &str) {
+            buffer.push_str(str);
+            *index += str.chars().count();
+        }
+
+        let mut indices = Vec::new();
+        let mut index = 0;
+        let mut title = String::new();
+        let mut names_and_branches = names_and_branches.peekable();
+        while let Some((name, entry)) = names_and_branches.next() {
+            let pre_index = index;
+            push_str(&mut title, &mut index, name);
+            indices.extend((pre_index..index).into_iter());
+            if let Some(branch) = entry.and_then(|entry| entry.branch()) {
+                push_str(&mut title, &mut index, "/");
+                push_str(&mut title, &mut index, &branch);
+            }
+            if names_and_branches.peek().is_some() {
+                push_str(&mut title, &mut index, ", ");
+                if index >= MAX_TITLE_LENGTH {
+                    title.push_str(" …");
+                    break;
+                }
+            }
+        }
+
+        let text_style = theme.workspace.titlebar.title.clone();
+        let item_spacing = theme.workspace.titlebar.item_spacing;
+
+        let mut highlight = text_style.clone();
+        highlight.color = theme.workspace.titlebar.highlight_color;
+
+        let style = LabelStyle {
+            text: text_style,
+            highlight_text: Some(highlight),
+        };
+
+        Label::new(title, style)
+            .with_highlights(indices)
+            .contained()
+            .with_margin_right(item_spacing)
+            .aligned()
+            .left()
+            .into_any_named("title-with-git-information")
+    }
+
     fn window_activation_changed(&mut self, active: bool, cx: &mut ViewContext<Self>) {
         let project = if active {
             Some(self.project.clone())

crates/fs/src/fs.rs 🔗

@@ -619,7 +619,10 @@ impl FakeFs {
         .boxed()
     }
 
-    pub async fn set_index_for_repo(&self, dot_git: &Path, head_state: &[(&Path, String)]) {
+    pub fn with_git_state<F>(&self, dot_git: &Path, f: F)
+    where
+        F: FnOnce(&mut FakeGitRepositoryState),
+    {
         let mut state = self.state.lock();
         let entry = state.read_path(dot_git).unwrap();
         let mut entry = entry.lock();
@@ -628,12 +631,7 @@ impl FakeFs {
             let repo_state = git_repo_state.get_or_insert_with(Default::default);
             let mut repo_state = repo_state.lock();
 
-            repo_state.index_contents.clear();
-            repo_state.index_contents.extend(
-                head_state
-                    .iter()
-                    .map(|(path, content)| (path.to_path_buf(), content.clone())),
-            );
+            f(&mut repo_state);
 
             state.emit_event([dot_git]);
         } else {
@@ -641,6 +639,21 @@ impl FakeFs {
         }
     }
 
+    pub async fn set_branch_name(&self, dot_git: &Path, branch: Option<impl Into<String>>) {
+        self.with_git_state(dot_git, |state| state.branch_name = branch.map(Into::into))
+    }
+
+    pub async fn set_index_for_repo(&self, dot_git: &Path, head_state: &[(&Path, String)]) {
+        self.with_git_state(dot_git, |state| {
+            state.index_contents.clear();
+            state.index_contents.extend(
+                head_state
+                    .iter()
+                    .map(|(path, content)| (path.to_path_buf(), content.clone())),
+            );
+        });
+    }
+
     pub fn paths(&self) -> Vec<PathBuf> {
         let mut result = Vec::new();
         let mut queue = collections::VecDeque::new();

crates/fs/src/repository.rs 🔗

@@ -5,6 +5,7 @@ use std::{
     path::{Component, Path, PathBuf},
     sync::Arc,
 };
+use util::ResultExt;
 
 pub use git2::Repository as LibGitRepository;
 
@@ -13,6 +14,14 @@ pub trait GitRepository: Send {
     fn reload_index(&self);
 
     fn load_index_text(&self, relative_file_path: &Path) -> Option<String>;
+
+    fn branch_name(&self) -> Option<String>;
+}
+
+impl std::fmt::Debug for dyn GitRepository {
+    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+        f.debug_struct("dyn GitRepository<...>").finish()
+    }
 }
 
 #[async_trait::async_trait]
@@ -46,6 +55,12 @@ impl GitRepository for LibGitRepository {
         }
         None
     }
+
+    fn branch_name(&self) -> Option<String> {
+        let head = self.head().log_err()?;
+        let branch = String::from_utf8_lossy(head.shorthand_bytes());
+        Some(branch.to_string())
+    }
 }
 
 #[derive(Debug, Clone, Default)]
@@ -56,6 +71,7 @@ pub struct FakeGitRepository {
 #[derive(Debug, Clone, Default)]
 pub struct FakeGitRepositoryState {
     pub index_contents: HashMap<PathBuf, String>,
+    pub branch_name: Option<String>,
 }
 
 impl FakeGitRepository {
@@ -72,6 +88,11 @@ impl GitRepository for FakeGitRepository {
         let state = self.state.lock();
         state.index_contents.get(path).cloned()
     }
+
+    fn branch_name(&self) -> Option<String> {
+        let state = self.state.lock();
+        state.branch_name.clone()
+    }
 }
 
 fn check_path_to_repo_path_errors(relative_file_path: &Path) -> Result<()> {

crates/gpui/src/elements/flex.rs 🔗

@@ -66,6 +66,10 @@ impl<V: View> Flex<V> {
         self
     }
 
+    pub fn is_empty(&self) -> bool {
+        self.children.is_empty()
+    }
+
     fn layout_flex_children(
         &mut self,
         layout_expanded: bool,

crates/project/src/project.rs 🔗

@@ -64,6 +64,7 @@ use std::{
     },
     time::{Duration, Instant, SystemTime},
 };
+
 use terminals::Terminals;
 
 use util::{debug_panic, defer, merge_json_value_into, post_inc, ResultExt, TryFutureExt as _};
@@ -4695,40 +4696,50 @@ impl Project {
 
     fn update_local_worktree_buffers_git_repos(
         &mut self,
-        worktree: ModelHandle<Worktree>,
-        repos: &[GitRepositoryEntry],
+        worktree_handle: ModelHandle<Worktree>,
+        repos: &HashMap<Arc<Path>, LocalRepositoryEntry>,
         cx: &mut ModelContext<Self>,
     ) {
+        debug_assert!(worktree_handle.read(cx).is_local());
+
         for (_, buffer) in &self.opened_buffers {
             if let Some(buffer) = buffer.upgrade(cx) {
                 let file = match File::from_dyn(buffer.read(cx).file()) {
                     Some(file) => file,
                     None => continue,
                 };
-                if file.worktree != worktree {
+                if file.worktree != worktree_handle {
                     continue;
                 }
 
                 let path = file.path().clone();
 
-                let repo = match repos.iter().find(|repo| repo.manages(&path)) {
+                let worktree = worktree_handle.read(cx);
+
+                let (work_directory, repo) = match repos
+                    .iter()
+                    .find(|(work_directory, _)| path.starts_with(work_directory))
+                {
                     Some(repo) => repo.clone(),
                     None => return,
                 };
 
-                let relative_repo = match path.strip_prefix(repo.content_path) {
-                    Ok(relative_repo) => relative_repo.to_owned(),
-                    Err(_) => return,
+                let relative_repo = match path.strip_prefix(work_directory).log_err() {
+                    Some(relative_repo) => relative_repo.to_owned(),
+                    None => return,
                 };
 
+                drop(worktree);
+
                 let remote_id = self.remote_id();
                 let client = self.client.clone();
+                let git_ptr = repo.repo_ptr.clone();
+                let diff_base_task = cx
+                    .background()
+                    .spawn(async move { git_ptr.lock().load_index_text(&relative_repo) });
 
                 cx.spawn(|_, mut cx| async move {
-                    let diff_base = cx
-                        .background()
-                        .spawn(async move { repo.repo.lock().load_index_text(&relative_repo) })
-                        .await;
+                    let diff_base = diff_base_task.await;
 
                     let buffer_id = buffer.update(&mut cx, |buffer, cx| {
                         buffer.set_diff_base(diff_base.clone(), cx);

crates/project/src/worktree.rs 🔗

@@ -51,7 +51,7 @@ use std::{
     },
     time::{Duration, SystemTime},
 };
-use sum_tree::{Bias, Edit, SeekTarget, SumTree, TreeSet};
+use sum_tree::{Bias, Edit, SeekTarget, SumTree, TreeMap, TreeSet};
 use util::{paths::HOME, ResultExt, TryFutureExt};
 
 #[derive(Copy, Clone, PartialEq, Eq, Debug, Hash, PartialOrd, Ord)]
@@ -102,6 +102,7 @@ pub struct Snapshot {
     root_char_bag: CharBag,
     entries_by_path: SumTree<Entry>,
     entries_by_id: SumTree<PathEntry>,
+    repository_entries: TreeMap<RepositoryWorkDirectory, RepositoryEntry>,
 
     /// A number that increases every time the worktree begins scanning
     /// a set of paths from the filesystem. This scanning could be caused
@@ -116,45 +117,133 @@ pub struct Snapshot {
     completed_scan_id: usize,
 }
 
-#[derive(Clone)]
-pub struct GitRepositoryEntry {
-    pub(crate) repo: Arc<Mutex<dyn GitRepository>>,
+#[derive(Clone, Debug, Eq, PartialEq)]
+pub struct RepositoryEntry {
+    pub(crate) work_directory: WorkDirectoryEntry,
+    pub(crate) branch: Option<Arc<str>>,
+}
 
-    pub(crate) scan_id: usize,
-    // Path to folder containing the .git file or directory
-    pub(crate) content_path: Arc<Path>,
-    // Path to the actual .git folder.
-    // Note: if .git is a file, this points to the folder indicated by the .git file
-    pub(crate) git_dir_path: Arc<Path>,
+impl RepositoryEntry {
+    pub fn branch(&self) -> Option<Arc<str>> {
+        self.branch.clone()
+    }
+
+    pub fn work_directory_id(&self) -> ProjectEntryId {
+        *self.work_directory
+    }
+
+    pub fn work_directory(&self, snapshot: &Snapshot) -> Option<RepositoryWorkDirectory> {
+        snapshot
+            .entry_for_id(self.work_directory_id())
+            .map(|entry| RepositoryWorkDirectory(entry.path.clone()))
+    }
+
+    pub(crate) fn contains(&self, snapshot: &Snapshot, path: &Path) -> bool {
+        self.work_directory.contains(snapshot, path)
+    }
 }
 
-impl std::fmt::Debug for GitRepositoryEntry {
-    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
-        f.debug_struct("GitRepositoryEntry")
-            .field("content_path", &self.content_path)
-            .field("git_dir_path", &self.git_dir_path)
-            .finish()
+impl From<&RepositoryEntry> for proto::RepositoryEntry {
+    fn from(value: &RepositoryEntry) -> Self {
+        proto::RepositoryEntry {
+            work_directory_id: value.work_directory.to_proto(),
+            branch: value.branch.as_ref().map(|str| str.to_string()),
+        }
     }
 }
 
-#[derive(Debug)]
+/// This path corresponds to the 'content path' (the folder that contains the .git)
+#[derive(Clone, Debug, Ord, PartialOrd, Eq, PartialEq)]
+pub struct RepositoryWorkDirectory(Arc<Path>);
+
+impl Default for RepositoryWorkDirectory {
+    fn default() -> Self {
+        RepositoryWorkDirectory(Arc::from(Path::new("")))
+    }
+}
+
+#[derive(Clone, Debug, Ord, PartialOrd, Eq, PartialEq)]
+pub struct WorkDirectoryEntry(ProjectEntryId);
+
+impl WorkDirectoryEntry {
+    // Note that these paths should be relative to the worktree root.
+    pub(crate) fn contains(&self, snapshot: &Snapshot, path: &Path) -> bool {
+        snapshot
+            .entry_for_id(self.0)
+            .map(|entry| path.starts_with(&entry.path))
+            .unwrap_or(false)
+    }
+
+    pub(crate) fn relativize(&self, worktree: &Snapshot, path: &Path) -> Option<RepoPath> {
+        worktree.entry_for_id(self.0).and_then(|entry| {
+            path.strip_prefix(&entry.path)
+                .ok()
+                .map(move |path| RepoPath(path.to_owned()))
+        })
+    }
+}
+
+impl Deref for WorkDirectoryEntry {
+    type Target = ProjectEntryId;
+
+    fn deref(&self) -> &Self::Target {
+        &self.0
+    }
+}
+
+impl<'a> From<ProjectEntryId> for WorkDirectoryEntry {
+    fn from(value: ProjectEntryId) -> Self {
+        WorkDirectoryEntry(value)
+    }
+}
+
+#[derive(Clone, Debug, Ord, PartialOrd, Eq, PartialEq)]
+pub struct RepoPath(PathBuf);
+
+impl AsRef<Path> for RepoPath {
+    fn as_ref(&self) -> &Path {
+        self.0.as_ref()
+    }
+}
+
+impl Deref for RepoPath {
+    type Target = PathBuf;
+
+    fn deref(&self) -> &Self::Target {
+        &self.0
+    }
+}
+
+impl AsRef<Path> for RepositoryWorkDirectory {
+    fn as_ref(&self) -> &Path {
+        self.0.as_ref()
+    }
+}
+
+#[derive(Debug, Clone)]
 pub struct LocalSnapshot {
     ignores_by_parent_abs_path: HashMap<Arc<Path>, (Arc<Gitignore>, usize)>,
-    git_repositories: Vec<GitRepositoryEntry>,
+    // The ProjectEntryId corresponds to the entry for the .git dir
+    // work_directory_id
+    git_repositories: TreeMap<ProjectEntryId, LocalRepositoryEntry>,
     removed_entry_ids: HashMap<u64, ProjectEntryId>,
     next_entry_id: Arc<AtomicUsize>,
     snapshot: Snapshot,
 }
 
-impl Clone for LocalSnapshot {
-    fn clone(&self) -> Self {
-        Self {
-            ignores_by_parent_abs_path: self.ignores_by_parent_abs_path.clone(),
-            git_repositories: self.git_repositories.iter().cloned().collect(),
-            removed_entry_ids: self.removed_entry_ids.clone(),
-            next_entry_id: self.next_entry_id.clone(),
-            snapshot: self.snapshot.clone(),
-        }
+#[derive(Debug, Clone)]
+pub struct LocalRepositoryEntry {
+    pub(crate) scan_id: usize,
+    pub(crate) repo_ptr: Arc<Mutex<dyn GitRepository>>,
+    /// Path to the actual .git folder.
+    /// Note: if .git is a file, this points to the folder indicated by the .git file
+    pub(crate) git_dir_path: Arc<Path>,
+}
+
+impl LocalRepositoryEntry {
+    // Note that this path should be relative to the worktree root.
+    pub(crate) fn in_dot_git(&self, path: &Path) -> bool {
+        path.starts_with(self.git_dir_path.as_ref())
     }
 }
 
@@ -191,7 +280,7 @@ struct ShareState {
 
 pub enum Event {
     UpdatedEntries(HashMap<Arc<Path>, PathChange>),
-    UpdatedGitRepositories(Vec<GitRepositoryEntry>),
+    UpdatedGitRepositories(HashMap<Arc<Path>, LocalRepositoryEntry>),
 }
 
 impl Entity for Worktree {
@@ -222,8 +311,8 @@ impl Worktree {
 
             let mut snapshot = LocalSnapshot {
                 ignores_by_parent_abs_path: Default::default(),
-                git_repositories: Default::default(),
                 removed_entry_ids: Default::default(),
+                git_repositories: Default::default(),
                 next_entry_id,
                 snapshot: Snapshot {
                     id: WorktreeId::from_usize(cx.model_id()),
@@ -232,6 +321,7 @@ impl Worktree {
                     root_char_bag: root_name.chars().map(|c| c.to_ascii_lowercase()).collect(),
                     entries_by_path: Default::default(),
                     entries_by_id: Default::default(),
+                    repository_entries: Default::default(),
                     scan_id: 1,
                     completed_scan_id: 0,
                 },
@@ -330,6 +420,7 @@ impl Worktree {
                     .collect(),
                 entries_by_path: Default::default(),
                 entries_by_id: Default::default(),
+                repository_entries: Default::default(),
                 scan_id: 1,
                 completed_scan_id: 0,
             };
@@ -598,10 +689,8 @@ impl LocalWorktree {
     }
 
     fn set_snapshot(&mut self, new_snapshot: LocalSnapshot, cx: &mut ModelContext<Worktree>) {
-        let updated_repos = Self::changed_repos(
-            &self.snapshot.git_repositories,
-            &new_snapshot.git_repositories,
-        );
+        let updated_repos =
+            self.changed_repos(&self.git_repositories, &new_snapshot.git_repositories);
         self.snapshot = new_snapshot;
 
         if let Some(share) = self.share.as_mut() {
@@ -614,31 +703,57 @@ impl LocalWorktree {
     }
 
     fn changed_repos(
-        old_repos: &[GitRepositoryEntry],
-        new_repos: &[GitRepositoryEntry],
-    ) -> Vec<GitRepositoryEntry> {
-        fn diff<'a>(
-            a: &'a [GitRepositoryEntry],
-            b: &'a [GitRepositoryEntry],
-            updated: &mut HashMap<&'a Path, GitRepositoryEntry>,
-        ) {
-            for a_repo in a {
-                let matched = b.iter().find(|b_repo| {
-                    a_repo.git_dir_path == b_repo.git_dir_path && a_repo.scan_id == b_repo.scan_id
-                });
+        &self,
+        old_repos: &TreeMap<ProjectEntryId, LocalRepositoryEntry>,
+        new_repos: &TreeMap<ProjectEntryId, LocalRepositoryEntry>,
+    ) -> HashMap<Arc<Path>, LocalRepositoryEntry> {
+        let mut diff = HashMap::default();
+        let mut old_repos = old_repos.iter().peekable();
+        let mut new_repos = new_repos.iter().peekable();
+        loop {
+            match (old_repos.peek(), new_repos.peek()) {
+                (Some((old_entry_id, old_repo)), Some((new_entry_id, new_repo))) => {
+                    match Ord::cmp(old_entry_id, new_entry_id) {
+                        Ordering::Less => {
+                            if let Some(entry) = self.entry_for_id(**old_entry_id) {
+                                diff.insert(entry.path.clone(), (*old_repo).clone());
+                            }
+                            old_repos.next();
+                        }
+                        Ordering::Equal => {
+                            if old_repo.scan_id != new_repo.scan_id {
+                                if let Some(entry) = self.entry_for_id(**new_entry_id) {
+                                    diff.insert(entry.path.clone(), (*new_repo).clone());
+                                }
+                            }
 
-                if matched.is_none() {
-                    updated.insert(a_repo.git_dir_path.as_ref(), a_repo.clone());
+                            old_repos.next();
+                            new_repos.next();
+                        }
+                        Ordering::Greater => {
+                            if let Some(entry) = self.entry_for_id(**new_entry_id) {
+                                diff.insert(entry.path.clone(), (*new_repo).clone());
+                            }
+                            new_repos.next();
+                        }
+                    }
+                }
+                (Some((old_entry_id, old_repo)), None) => {
+                    if let Some(entry) = self.entry_for_id(**old_entry_id) {
+                        diff.insert(entry.path.clone(), (*old_repo).clone());
+                    }
+                    old_repos.next();
+                }
+                (None, Some((new_entry_id, new_repo))) => {
+                    if let Some(entry) = self.entry_for_id(**new_entry_id) {
+                        diff.insert(entry.path.clone(), (*new_repo).clone());
+                    }
+                    new_repos.next();
                 }
+                (None, None) => break,
             }
         }
-
-        let mut updated = HashMap::<&Path, GitRepositoryEntry>::default();
-
-        diff(old_repos, new_repos, &mut updated);
-        diff(new_repos, old_repos, &mut updated);
-
-        updated.into_values().collect()
+        diff
     }
 
     pub fn scan_complete(&self) -> impl Future<Output = ()> {
@@ -679,18 +794,24 @@ impl LocalWorktree {
         let fs = self.fs.clone();
         let snapshot = self.snapshot();
 
+        let mut index_task = None;
+
+        if let Some(repo) = snapshot.repo_for(&path) {
+            let repo_path = repo.work_directory.relativize(self, &path).unwrap();
+            if let Some(repo) = self.git_repositories.get(&*repo.work_directory) {
+                let repo = repo.repo_ptr.to_owned();
+                index_task = Some(
+                    cx.background()
+                        .spawn(async move { repo.lock().load_index_text(&repo_path) }),
+                );
+            }
+        }
+
         cx.spawn(|this, mut cx| async move {
             let text = fs.load(&abs_path).await?;
 
-            let diff_base = if let Some(repo) = snapshot.repo_for(&path) {
-                if let Ok(repo_relative) = path.strip_prefix(repo.content_path) {
-                    let repo_relative = repo_relative.to_owned();
-                    cx.background()
-                        .spawn(async move { repo.repo.lock().load_index_text(&repo_relative) })
-                        .await
-                } else {
-                    None
-                }
+            let diff_base = if let Some(index_task) = index_task {
+                index_task.await
             } else {
                 None
             };
@@ -1002,9 +1123,9 @@ impl LocalWorktree {
                     let mut share_tx = Some(share_tx);
                     let mut prev_snapshot = LocalSnapshot {
                         ignores_by_parent_abs_path: Default::default(),
-                        git_repositories: Default::default(),
                         removed_entry_ids: Default::default(),
                         next_entry_id: Default::default(),
+                        git_repositories: Default::default(),
                         snapshot: Snapshot {
                             id: WorktreeId(worktree_id as usize),
                             abs_path: Path::new("").into(),
@@ -1012,6 +1133,7 @@ impl LocalWorktree {
                             root_char_bag: Default::default(),
                             entries_by_path: Default::default(),
                             entries_by_id: Default::default(),
+                            repository_entries: Default::default(),
                             scan_id: 0,
                             completed_scan_id: 0,
                         },
@@ -1262,7 +1384,7 @@ impl Snapshot {
         Some(removed_entry.path)
     }
 
-    pub(crate) fn apply_remote_update(&mut self, update: proto::UpdateWorktree) -> Result<()> {
+    pub(crate) fn apply_remote_update(&mut self, mut update: proto::UpdateWorktree) -> Result<()> {
         let mut entries_by_path_edits = Vec::new();
         let mut entries_by_id_edits = Vec::new();
         for entry_id in update.removed_entries {
@@ -1288,6 +1410,32 @@ impl Snapshot {
 
         self.entries_by_path.edit(entries_by_path_edits, &());
         self.entries_by_id.edit(entries_by_id_edits, &());
+
+        update.removed_repositories.sort_unstable();
+        self.repository_entries.retain(|_, entry| {
+            if let Ok(_) = update
+                .removed_repositories
+                .binary_search(&entry.work_directory.to_proto())
+            {
+                false
+            } else {
+                true
+            }
+        });
+
+        for repository in update.updated_repositories {
+            let repository = RepositoryEntry {
+                work_directory: ProjectEntryId::from_proto(repository.work_directory_id).into(),
+                branch: repository.branch.map(Into::into),
+            };
+            if let Some(entry) = self.entry_for_id(repository.work_directory_id()) {
+                self.repository_entries
+                    .insert(RepositoryWorkDirectory(entry.path.clone()), repository)
+            } else {
+                log::error!("no work directory entry for repository {:?}", repository)
+            }
+        }
+
         self.scan_id = update.scan_id as usize;
         if update.is_last_update {
             self.completed_scan_id = update.scan_id as usize;
@@ -1350,6 +1498,10 @@ impl Snapshot {
         self.traverse_from_offset(true, include_ignored, 0)
     }
 
+    pub fn repositories(&self) -> impl Iterator<Item = &RepositoryEntry> {
+        self.repository_entries.values()
+    }
+
     pub fn paths(&self) -> impl Iterator<Item = &Arc<Path>> {
         let empty_path = Path::new("");
         self.entries_by_path
@@ -1380,6 +1532,16 @@ impl Snapshot {
         &self.root_name
     }
 
+    pub fn root_git_entry(&self) -> Option<RepositoryEntry> {
+        self.repository_entries
+            .get(&RepositoryWorkDirectory(Path::new("").into()))
+            .map(|entry| entry.to_owned())
+    }
+
+    pub fn git_entries(&self) -> impl Iterator<Item = &RepositoryEntry> {
+        self.repository_entries.values()
+    }
+
     pub fn scan_id(&self) -> usize {
         self.scan_id
     }
@@ -1408,23 +1570,32 @@ impl Snapshot {
 }
 
 impl LocalSnapshot {
-    // Gives the most specific git repository for a given path
-    pub(crate) fn repo_for(&self, path: &Path) -> Option<GitRepositoryEntry> {
-        self.git_repositories
-            .iter()
-            .rev() //git_repository is ordered lexicographically
-            .find(|repo| repo.manages(path))
-            .cloned()
+    pub(crate) fn repo_for(&self, path: &Path) -> Option<RepositoryEntry> {
+        let mut max_len = 0;
+        let mut current_candidate = None;
+        for (work_directory, repo) in (&self.repository_entries).iter() {
+            if repo.contains(self, path) {
+                if work_directory.0.as_os_str().len() >= max_len {
+                    current_candidate = Some(repo);
+                    max_len = work_directory.0.as_os_str().len();
+                } else {
+                    break;
+                }
+            }
+        }
+
+        current_candidate.map(|entry| entry.to_owned())
     }
 
-    pub(crate) fn repo_with_dot_git_containing(
-        &mut self,
+    pub(crate) fn repo_for_metadata(
+        &self,
         path: &Path,
-    ) -> Option<&mut GitRepositoryEntry> {
-        // Git repositories cannot be nested, so we don't need to reverse the order
-        self.git_repositories
-            .iter_mut()
-            .find(|repo| repo.in_dot_git(path))
+    ) -> Option<(ProjectEntryId, Arc<Mutex<dyn GitRepository>>)> {
+        let (entry_id, local_repo) = self
+            .git_repositories
+            .iter()
+            .find(|(_, repo)| repo.in_dot_git(path))?;
+        Some((*entry_id, local_repo.repo_ptr.to_owned()))
     }
 
     #[cfg(test)]
@@ -1439,6 +1610,8 @@ impl LocalSnapshot {
             removed_entries: Default::default(),
             scan_id: self.scan_id as u64,
             is_last_update: true,
+            updated_repositories: self.repository_entries.values().map(Into::into).collect(),
+            removed_repositories: Default::default(),
         }
     }
 
@@ -1498,6 +1671,44 @@ impl LocalSnapshot {
             }
         }
 
+        let mut updated_repositories: Vec<proto::RepositoryEntry> = Vec::new();
+        let mut removed_repositories = Vec::new();
+        let mut self_repos = self.snapshot.repository_entries.iter().peekable();
+        let mut other_repos = other.snapshot.repository_entries.iter().peekable();
+        loop {
+            match (self_repos.peek(), other_repos.peek()) {
+                (Some((self_work_dir, self_repo)), Some((other_work_dir, other_repo))) => {
+                    match Ord::cmp(self_work_dir, other_work_dir) {
+                        Ordering::Less => {
+                            updated_repositories.push((*self_repo).into());
+                            self_repos.next();
+                        }
+                        Ordering::Equal => {
+                            if self_repo != other_repo {
+                                updated_repositories.push((*self_repo).into());
+                            }
+
+                            self_repos.next();
+                            other_repos.next();
+                        }
+                        Ordering::Greater => {
+                            removed_repositories.push(other_repo.work_directory.to_proto());
+                            other_repos.next();
+                        }
+                    }
+                }
+                (Some((_, self_repo)), None) => {
+                    updated_repositories.push((*self_repo).into());
+                    self_repos.next();
+                }
+                (None, Some((_, other_repo))) => {
+                    removed_repositories.push(other_repo.work_directory.to_proto());
+                    other_repos.next();
+                }
+                (None, None) => break,
+            }
+        }
+
         proto::UpdateWorktree {
             project_id,
             worktree_id,
@@ -1507,6 +1718,8 @@ impl LocalSnapshot {
             removed_entries,
             scan_id: self.scan_id as u64,
             is_last_update: self.completed_scan_id == self.scan_id,
+            updated_repositories,
+            removed_repositories,
         }
     }
 
@@ -1595,24 +1808,7 @@ impl LocalSnapshot {
         }
 
         if parent_path.file_name() == Some(&DOT_GIT) {
-            let abs_path = self.abs_path.join(&parent_path);
-            let content_path: Arc<Path> = parent_path.parent().unwrap().into();
-            if let Err(ix) = self
-                .git_repositories
-                .binary_search_by_key(&&content_path, |repo| &repo.content_path)
-            {
-                if let Some(repo) = fs.open_repo(abs_path.as_path()) {
-                    self.git_repositories.insert(
-                        ix,
-                        GitRepositoryEntry {
-                            repo,
-                            scan_id: 0,
-                            content_path,
-                            git_dir_path: parent_path,
-                        },
-                    );
-                }
-            }
+            self.build_repo(parent_path, fs);
         }
 
         let mut entries_by_path_edits = vec![Edit::Insert(parent_entry)];
@@ -1633,6 +1829,50 @@ impl LocalSnapshot {
         self.entries_by_id.edit(entries_by_id_edits, &());
     }
 
+    fn build_repo(&mut self, parent_path: Arc<Path>, fs: &dyn Fs) -> Option<()> {
+        let abs_path = self.abs_path.join(&parent_path);
+        let work_dir: Arc<Path> = parent_path.parent().unwrap().into();
+
+        // Guard against repositories inside the repository metadata
+        if work_dir
+            .components()
+            .find(|component| component.as_os_str() == *DOT_GIT)
+            .is_some()
+        {
+            return None;
+        };
+
+        let work_dir_id = self
+            .entry_for_path(work_dir.clone())
+            .map(|entry| entry.id)?;
+
+        if self.git_repositories.get(&work_dir_id).is_none() {
+            let repo = fs.open_repo(abs_path.as_path())?;
+            let work_directory = RepositoryWorkDirectory(work_dir.clone());
+            let scan_id = self.scan_id;
+
+            let repo_lock = repo.lock();
+            self.repository_entries.insert(
+                work_directory,
+                RepositoryEntry {
+                    work_directory: work_dir_id.into(),
+                    branch: repo_lock.branch_name().map(Into::into),
+                },
+            );
+            drop(repo_lock);
+
+            self.git_repositories.insert(
+                work_dir_id,
+                LocalRepositoryEntry {
+                    scan_id,
+                    repo_ptr: repo,
+                    git_dir_path: parent_path.clone(),
+                },
+            )
+        }
+
+        Some(())
+    }
     fn reuse_entry_id(&mut self, entry: &mut Entry) {
         if let Some(removed_entry_id) = self.removed_entry_ids.remove(&entry.inode) {
             entry.id = removed_entry_id;
@@ -1671,14 +1911,6 @@ impl LocalSnapshot {
             {
                 *scan_id = self.snapshot.scan_id;
             }
-        } else if path.file_name() == Some(&DOT_GIT) {
-            let parent_path = path.parent().unwrap();
-            if let Ok(ix) = self
-                .git_repositories
-                .binary_search_by_key(&parent_path, |repo| repo.git_dir_path.as_ref())
-            {
-                self.git_repositories[ix].scan_id = self.snapshot.scan_id;
-            }
         }
     }
 
@@ -1718,22 +1950,6 @@ impl LocalSnapshot {
 
         ignore_stack
     }
-
-    pub fn git_repo_entries(&self) -> &[GitRepositoryEntry] {
-        &self.git_repositories
-    }
-}
-
-impl GitRepositoryEntry {
-    // Note that these paths should be relative to the worktree root.
-    pub(crate) fn manages(&self, path: &Path) -> bool {
-        path.starts_with(self.content_path.as_ref())
-    }
-
-    // Note that this path should be relative to the worktree root.
-    pub(crate) fn in_dot_git(&self, path: &Path) -> bool {
-        path.starts_with(self.git_dir_path.as_ref())
-    }
 }
 
 async fn build_gitignore(abs_path: &Path, fs: &dyn Fs) -> Result<Gitignore> {
@@ -2318,11 +2534,29 @@ impl BackgroundScanner {
         self.update_ignore_statuses().await;
 
         let mut snapshot = self.snapshot.lock();
+
         let mut git_repositories = mem::take(&mut snapshot.git_repositories);
-        git_repositories.retain(|repo| snapshot.entry_for_path(&repo.git_dir_path).is_some());
+        git_repositories.retain(|work_directory_id, _| {
+            snapshot
+                .entry_for_id(*work_directory_id)
+                .map_or(false, |entry| {
+                    snapshot.entry_for_path(entry.path.join(*DOT_GIT)).is_some()
+                })
+        });
         snapshot.git_repositories = git_repositories;
+
+        let mut git_repository_entries = mem::take(&mut snapshot.snapshot.repository_entries);
+        git_repository_entries.retain(|_, entry| {
+            snapshot
+                .git_repositories
+                .get(&entry.work_directory.0)
+                .is_some()
+        });
+        snapshot.snapshot.repository_entries = git_repository_entries;
+
         snapshot.removed_entry_ids.clear();
         snapshot.completed_scan_id = snapshot.scan_id;
+
         drop(snapshot);
 
         self.send_status_update(false, None);
@@ -2607,9 +2841,24 @@ impl BackgroundScanner {
                     snapshot.insert_entry(fs_entry, self.fs.as_ref());
 
                     let scan_id = snapshot.scan_id;
-                    if let Some(repo) = snapshot.repo_with_dot_git_containing(&path) {
-                        repo.repo.lock().reload_index();
-                        repo.scan_id = scan_id;
+
+                    let repo_with_path_in_dotgit = snapshot.repo_for_metadata(&path);
+                    if let Some((entry_id, repo)) = repo_with_path_in_dotgit {
+                        let work_dir = snapshot
+                            .entry_for_id(entry_id)
+                            .map(|entry| RepositoryWorkDirectory(entry.path.clone()))?;
+
+                        let repo = repo.lock();
+                        repo.reload_index();
+                        let branch = repo.branch_name();
+
+                        snapshot.git_repositories.update(&entry_id, |entry| {
+                            entry.scan_id = scan_id;
+                        });
+
+                        snapshot
+                            .repository_entries
+                            .update(&work_dir, |entry| entry.branch = branch.map(Into::into));
                     }
 
                     if let Some(scan_queue_tx) = &scan_queue_tx {
@@ -3121,7 +3370,6 @@ impl<'a> TryFrom<(&'a CharBag, proto::Entry)> for Entry {
 #[cfg(test)]
 mod tests {
     use super::*;
-    use fs::repository::FakeGitRepository;
     use fs::{FakeFs, RealFs};
     use gpui::{executor::Deterministic, TestAppContext};
     use pretty_assertions::assert_eq;
@@ -3389,31 +3637,44 @@ mod tests {
 
             assert!(tree.repo_for("c.txt".as_ref()).is_none());
 
-            let repo = tree.repo_for("dir1/src/b.txt".as_ref()).unwrap();
-            assert_eq!(repo.content_path.as_ref(), Path::new("dir1"));
-            assert_eq!(repo.git_dir_path.as_ref(), Path::new("dir1/.git"));
+            let entry = tree.repo_for("dir1/src/b.txt".as_ref()).unwrap();
+            assert_eq!(
+                entry
+                    .work_directory(tree)
+                    .map(|directory| directory.as_ref().to_owned()),
+                Some(Path::new("dir1").to_owned())
+            );
 
-            let repo = tree.repo_for("dir1/deps/dep1/src/a.txt".as_ref()).unwrap();
-            assert_eq!(repo.content_path.as_ref(), Path::new("dir1/deps/dep1"));
-            assert_eq!(repo.git_dir_path.as_ref(), Path::new("dir1/deps/dep1/.git"),);
+            let entry = tree.repo_for("dir1/deps/dep1/src/a.txt".as_ref()).unwrap();
+            assert_eq!(
+                entry
+                    .work_directory(tree)
+                    .map(|directory| directory.as_ref().to_owned()),
+                Some(Path::new("dir1/deps/dep1").to_owned())
+            );
         });
 
-        let original_scan_id = tree.read_with(cx, |tree, _cx| {
-            let tree = tree.as_local().unwrap();
-            tree.repo_for("dir1/src/b.txt".as_ref()).unwrap().scan_id
+        let repo_update_events = Arc::new(Mutex::new(vec![]));
+        tree.update(cx, |_, cx| {
+            let repo_update_events = repo_update_events.clone();
+            cx.subscribe(&tree, move |_, _, event, _| {
+                if let Event::UpdatedGitRepositories(update) = event {
+                    repo_update_events.lock().push(update.clone());
+                }
+            })
+            .detach();
         });
 
         std::fs::write(root.path().join("dir1/.git/random_new_file"), "hello").unwrap();
         tree.flush_fs_events(cx).await;
 
-        tree.read_with(cx, |tree, _cx| {
-            let tree = tree.as_local().unwrap();
-            let new_scan_id = tree.repo_for("dir1/src/b.txt".as_ref()).unwrap().scan_id;
-            assert_ne!(
-                original_scan_id, new_scan_id,
-                "original {original_scan_id}, new {new_scan_id}"
-            );
-        });
+        assert_eq!(
+            repo_update_events.lock()[0]
+                .keys()
+                .cloned()
+                .collect::<Vec<Arc<Path>>>(),
+            vec![Path::new("dir1").into()]
+        );
 
         std::fs::remove_dir_all(root.path().join("dir1/.git")).unwrap();
         tree.flush_fs_events(cx).await;
@@ -3425,56 +3686,6 @@ mod tests {
         });
     }
 
-    #[test]
-    fn test_changed_repos() {
-        fn fake_entry(git_dir_path: impl AsRef<Path>, scan_id: usize) -> GitRepositoryEntry {
-            GitRepositoryEntry {
-                repo: Arc::new(Mutex::new(FakeGitRepository::default())),
-                scan_id,
-                content_path: git_dir_path.as_ref().parent().unwrap().into(),
-                git_dir_path: git_dir_path.as_ref().into(),
-            }
-        }
-
-        let prev_repos: Vec<GitRepositoryEntry> = vec![
-            fake_entry("/.git", 0),
-            fake_entry("/a/.git", 0),
-            fake_entry("/a/b/.git", 0),
-        ];
-
-        let new_repos: Vec<GitRepositoryEntry> = vec![
-            fake_entry("/a/.git", 1),
-            fake_entry("/a/b/.git", 0),
-            fake_entry("/a/c/.git", 0),
-        ];
-
-        let res = LocalWorktree::changed_repos(&prev_repos, &new_repos);
-
-        // Deletion retained
-        assert!(res
-            .iter()
-            .find(|repo| repo.git_dir_path.as_ref() == Path::new("/.git") && repo.scan_id == 0)
-            .is_some());
-
-        // Update retained
-        assert!(res
-            .iter()
-            .find(|repo| repo.git_dir_path.as_ref() == Path::new("/a/.git") && repo.scan_id == 1)
-            .is_some());
-
-        // Addition retained
-        assert!(res
-            .iter()
-            .find(|repo| repo.git_dir_path.as_ref() == Path::new("/a/c/.git") && repo.scan_id == 0)
-            .is_some());
-
-        // Nochange, not retained
-        assert!(res
-            .iter()
-            .find(|repo| repo.git_dir_path.as_ref() == Path::new("/a/b/.git") && repo.scan_id == 0)
-            .is_none());
-    }
-
     #[gpui::test]
     async fn test_write_file(cx: &mut TestAppContext) {
         let dir = temp_tree(json!({

crates/rpc/proto/zed.proto 🔗

@@ -329,9 +329,11 @@ message UpdateWorktree {
     string root_name = 3;
     repeated Entry updated_entries = 4;
     repeated uint64 removed_entries = 5;
-    uint64 scan_id = 6;
-    bool is_last_update = 7;
-    string abs_path = 8;
+    repeated RepositoryEntry updated_repositories = 6;
+    repeated uint64 removed_repositories = 7;
+    uint64 scan_id = 8;
+    bool is_last_update = 9;
+    string abs_path = 10;
 }
 
 message CreateProjectEntry {
@@ -979,6 +981,11 @@ message Entry {
     bool is_ignored = 7;
 }
 
+message RepositoryEntry {
+    uint64 work_directory_id = 1;
+    optional string branch = 2;
+}
+
 message BufferState {
     uint64 id = 1;
     optional File file = 2;

crates/rpc/src/proto.rs 🔗

@@ -5,13 +5,13 @@ use futures::{SinkExt as _, StreamExt as _};
 use prost::Message as _;
 use serde::Serialize;
 use std::any::{Any, TypeId};
-use std::fmt;
 use std::{
     cmp,
     fmt::Debug,
     io, iter,
     time::{Duration, SystemTime, UNIX_EPOCH},
 };
+use std::{fmt, mem};
 
 include!(concat!(env!("OUT_DIR"), "/zed.messages.rs"));
 
@@ -503,6 +503,21 @@ pub fn split_worktree_update(
             .collect();
 
         done = message.updated_entries.is_empty() && message.removed_entries.is_empty();
+
+        // Wait to send repositories until after we've guaranteed that their associated entries
+        // will be read
+        let updated_repositories = if done {
+            mem::take(&mut message.updated_repositories)
+        } else {
+            Default::default()
+        };
+
+        let removed_repositories = if done {
+            mem::take(&mut message.removed_repositories)
+        } else {
+            Default::default()
+        };
+
         Some(UpdateWorktree {
             project_id: message.project_id,
             worktree_id: message.worktree_id,
@@ -512,6 +527,8 @@ pub fn split_worktree_update(
             removed_entries,
             scan_id: message.scan_id,
             is_last_update: done && message.is_last_update,
+            updated_repositories,
+            removed_repositories,
         })
     })
 }

crates/rpc/src/rpc.rs 🔗

@@ -6,4 +6,4 @@ pub use conn::Connection;
 pub use peer::*;
 mod macros;
 
-pub const PROTOCOL_VERSION: u32 = 53;
+pub const PROTOCOL_VERSION: u32 = 54;

crates/sum_tree/src/tree_map.rs 🔗

@@ -2,13 +2,13 @@ use std::{cmp::Ordering, fmt::Debug};
 
 use crate::{Bias, Dimension, Item, KeyedItem, SeekTarget, SumTree, Summary};
 
-#[derive(Clone)]
+#[derive(Clone, Debug)]
 pub struct TreeMap<K, V>(SumTree<MapEntry<K, V>>)
 where
     K: Clone + Debug + Default + Ord,
     V: Clone + Debug;
 
-#[derive(Clone)]
+#[derive(Clone, Debug)]
 pub struct MapEntry<K, V> {
     key: K,
     value: V,
@@ -73,9 +73,58 @@ impl<K: Clone + Debug + Default + Ord, V: Clone + Debug> TreeMap<K, V> {
         removed
     }
 
+    /// Returns the key-value pair with the greatest key less than or equal to the given key.
+    pub fn closest(&self, key: &K) -> Option<(&K, &V)> {
+        let mut cursor = self.0.cursor::<MapKeyRef<'_, K>>();
+        let key = MapKeyRef(Some(key));
+        cursor.seek(&key, Bias::Right, &());
+        cursor.prev(&());
+        cursor.item().map(|item| (&item.key, &item.value))
+    }
+
+    pub fn update<F, T>(&mut self, key: &K, f: F) -> Option<T>
+    where
+        F: FnOnce(&mut V) -> T,
+    {
+        let mut cursor = self.0.cursor::<MapKeyRef<'_, K>>();
+        let key = MapKeyRef(Some(key));
+        let mut new_tree = cursor.slice(&key, Bias::Left, &());
+        let mut result = None;
+        if key.cmp(&cursor.end(&()), &()) == Ordering::Equal {
+            let mut updated = cursor.item().unwrap().clone();
+            result = Some(f(&mut updated.value));
+            new_tree.push(updated, &());
+            cursor.next(&());
+        }
+        new_tree.push_tree(cursor.suffix(&()), &());
+        drop(cursor);
+        self.0 = new_tree;
+        result
+    }
+
+    pub fn retain<F: FnMut(&K, &V) -> bool>(&mut self, mut predicate: F) {
+        let mut new_map = SumTree::<MapEntry<K, V>>::default();
+
+        let mut cursor = self.0.cursor::<MapKeyRef<'_, K>>();
+        cursor.next(&());
+        while let Some(item) = cursor.item() {
+            if predicate(&item.key, &item.value) {
+                new_map.push(item.clone(), &());
+            }
+            cursor.next(&());
+        }
+        drop(cursor);
+
+        self.0 = new_map;
+    }
+
     pub fn iter(&self) -> impl Iterator<Item = (&K, &V)> + '_ {
         self.0.iter().map(|entry| (&entry.key, &entry.value))
     }
+
+    pub fn values(&self) -> impl Iterator<Item = &V> + '_ {
+        self.0.iter().map(|entry| &entry.value)
+    }
 }
 
 impl<K, V> Default for TreeMap<K, V>
@@ -199,10 +248,16 @@ mod tests {
             vec![(&1, &"a"), (&2, &"b"), (&3, &"c")]
         );
 
+        assert_eq!(map.closest(&0), None);
+        assert_eq!(map.closest(&1), Some((&1, &"a")));
+        assert_eq!(map.closest(&10), Some((&3, &"c")));
+
         map.remove(&2);
         assert_eq!(map.get(&2), None);
         assert_eq!(map.iter().collect::<Vec<_>>(), vec![(&1, &"a"), (&3, &"c")]);
 
+        assert_eq!(map.closest(&2), Some((&1, &"a")));
+
         map.remove(&3);
         assert_eq!(map.get(&3), None);
         assert_eq!(map.iter().collect::<Vec<_>>(), vec![(&1, &"a")]);
@@ -210,5 +265,11 @@ mod tests {
         map.remove(&1);
         assert_eq!(map.get(&1), None);
         assert_eq!(map.iter().collect::<Vec<_>>(), vec![]);
+
+        map.insert(4, "d");
+        map.insert(5, "e");
+        map.insert(6, "f");
+        map.retain(|key, _| *key % 2 == 0);
+        assert_eq!(map.iter().collect::<Vec<_>>(), vec![(&4, &"d"), (&6, &"f")]);
     }
 }

crates/theme/src/theme.rs 🔗

@@ -93,6 +93,7 @@ pub struct Titlebar {
     pub container: ContainerStyle,
     pub height: f32,
     pub title: TextStyle,
+    pub highlight_color: Color,
     pub item_spacing: f32,
     pub face_pile_spacing: f32,
     pub avatar_ribbon: AvatarRibbon,

styles/src/styleTree/workspace.ts 🔗

@@ -140,6 +140,7 @@ export default function workspace(colorScheme: ColorScheme) {
 
             // Project
             title: text(layer, "sans", "variant"),
+            highlight_color: text(layer, "sans", "active").color,
 
             // Collaborators
             leaderAvatar: {