Fixup more, tests finally pass

Julia created

Change summary

crates/project/src/project.rs  | 168 +++++++++++++++++++----------------
crates/project/src/worktree.rs |  29 +++--
2 files changed, 110 insertions(+), 87 deletions(-)

Detailed changes

crates/project/src/project.rs 🔗

@@ -123,6 +123,7 @@ pub struct Project {
         HashMap<Arc<Path>, Shared<Task<Result<ModelHandle<Worktree>, Arc<anyhow::Error>>>>>,
     opened_buffers: HashMap<u64, OpenBuffer>,
     local_buffer_ids_by_path: HashMap<ProjectPath, u64>,
+    local_buffer_ids_by_entry_id: HashMap<ProjectEntryId, u64>,
     /// A mapping from a buffer ID to None means that we've started waiting for an ID but haven't finished loading it.
     /// Used for re-issuing buffer requests when peers temporarily disconnect
     incomplete_remote_buffers: HashMap<u64, Option<ModelHandle<Buffer>>>,
@@ -451,6 +452,7 @@ impl Project {
                 loading_buffers_by_path: Default::default(),
                 loading_local_worktrees: Default::default(),
                 local_buffer_ids_by_path: Default::default(),
+                local_buffer_ids_by_entry_id: Default::default(),
                 buffer_snapshots: Default::default(),
                 join_project_response_message_id: 0,
                 client_state: None,
@@ -520,6 +522,7 @@ impl Project {
                 incomplete_remote_buffers: Default::default(),
                 loading_local_worktrees: Default::default(),
                 local_buffer_ids_by_path: Default::default(),
+                local_buffer_ids_by_entry_id: Default::default(),
                 active_entry: None,
                 collaborators: Default::default(),
                 join_project_response_message_id: response.message_id,
@@ -1640,6 +1643,9 @@ impl Project {
                     },
                     remote_id,
                 );
+
+                self.local_buffer_ids_by_entry_id
+                    .insert(file.entry_id, remote_id);
             }
         }
 
@@ -4574,96 +4580,106 @@ impl Project {
     fn update_local_worktree_buffers(
         &mut self,
         worktree_handle: &ModelHandle<Worktree>,
-        changes: &HashMap<Arc<Path>, PathChange>,
+        changes: &HashMap<(Arc<Path>, ProjectEntryId), PathChange>,
         cx: &mut ModelContext<Self>,
     ) {
         let snapshot = worktree_handle.read(cx).snapshot();
 
         let mut renamed_buffers = Vec::new();
-        for path in changes.keys() {
+        for (path, entry_id) in changes.keys() {
             let worktree_id = worktree_handle.read(cx).id();
             let project_path = ProjectPath {
                 worktree_id,
                 path: path.clone(),
             };
 
-            if let Some(&buffer_id) = self.local_buffer_ids_by_path.get(&project_path) {
-                if let Some(buffer) = self
-                    .opened_buffers
-                    .get(&buffer_id)
-                    .and_then(|buffer| buffer.upgrade(cx))
-                {
-                    buffer.update(cx, |buffer, cx| {
-                        if let Some(old_file) = File::from_dyn(buffer.file()) {
-                            if old_file.worktree != *worktree_handle {
-                                return;
-                            }
+            let buffer_id = match self.local_buffer_ids_by_entry_id.get(entry_id) {
+                Some(&buffer_id) => buffer_id,
+                None => match self.local_buffer_ids_by_path.get(&project_path) {
+                    Some(&buffer_id) => buffer_id,
+                    None => continue,
+                },
+            };
 
-                            let new_file =
-                                if let Some(entry) = snapshot.entry_for_id(old_file.entry_id) {
-                                    File {
-                                        is_local: true,
-                                        entry_id: entry.id,
-                                        mtime: entry.mtime,
-                                        path: entry.path.clone(),
-                                        worktree: worktree_handle.clone(),
-                                        is_deleted: false,
-                                    }
-                                } else if let Some(entry) =
-                                    snapshot.entry_for_path(old_file.path().as_ref())
-                                {
-                                    File {
-                                        is_local: true,
-                                        entry_id: entry.id,
-                                        mtime: entry.mtime,
-                                        path: entry.path.clone(),
-                                        worktree: worktree_handle.clone(),
-                                        is_deleted: false,
-                                    }
-                                } else {
-                                    File {
-                                        is_local: true,
-                                        entry_id: old_file.entry_id,
-                                        path: old_file.path().clone(),
-                                        mtime: old_file.mtime(),
-                                        worktree: worktree_handle.clone(),
-                                        is_deleted: true,
-                                    }
-                                };
-
-                            let old_path = old_file.abs_path(cx);
-                            if new_file.abs_path(cx) != old_path {
-                                renamed_buffers.push((cx.handle(), old_file.clone()));
-                                self.local_buffer_ids_by_path.remove(&project_path);
-                                self.local_buffer_ids_by_path.insert(
-                                    ProjectPath {
-                                        worktree_id,
-                                        path: path.clone(),
-                                    },
-                                    buffer_id,
-                                );
-                            }
+            let open_buffer = self.opened_buffers.get(&buffer_id);
+            let buffer = if let Some(buffer) = open_buffer.and_then(|buffer| buffer.upgrade(cx)) {
+                buffer
+            } else {
+                self.opened_buffers.remove(&buffer_id);
+                self.local_buffer_ids_by_path.remove(&project_path);
+                self.local_buffer_ids_by_entry_id.remove(entry_id);
+                continue;
+            };
 
-                            if new_file != *old_file {
-                                if let Some(project_id) = self.remote_id() {
-                                    self.client
-                                        .send(proto::UpdateBufferFile {
-                                            project_id,
-                                            buffer_id: buffer_id as u64,
-                                            file: Some(new_file.to_proto()),
-                                        })
-                                        .log_err();
-                                }
+            buffer.update(cx, |buffer, cx| {
+                if let Some(old_file) = File::from_dyn(buffer.file()) {
+                    if old_file.worktree != *worktree_handle {
+                        return;
+                    }
 
-                                buffer.file_updated(Arc::new(new_file), cx).detach();
-                            }
+                    let new_file = if let Some(entry) = snapshot.entry_for_id(old_file.entry_id) {
+                        File {
+                            is_local: true,
+                            entry_id: entry.id,
+                            mtime: entry.mtime,
+                            path: entry.path.clone(),
+                            worktree: worktree_handle.clone(),
+                            is_deleted: false,
                         }
-                    });
-                } else {
-                    self.opened_buffers.remove(&buffer_id);
-                    self.local_buffer_ids_by_path.remove(&project_path);
+                    } else if let Some(entry) = snapshot.entry_for_path(old_file.path().as_ref()) {
+                        File {
+                            is_local: true,
+                            entry_id: entry.id,
+                            mtime: entry.mtime,
+                            path: entry.path.clone(),
+                            worktree: worktree_handle.clone(),
+                            is_deleted: false,
+                        }
+                    } else {
+                        File {
+                            is_local: true,
+                            entry_id: old_file.entry_id,
+                            path: old_file.path().clone(),
+                            mtime: old_file.mtime(),
+                            worktree: worktree_handle.clone(),
+                            is_deleted: true,
+                        }
+                    };
+
+                    let old_path = old_file.abs_path(cx);
+                    if new_file.abs_path(cx) != old_path {
+                        renamed_buffers.push((cx.handle(), old_file.clone()));
+                        self.local_buffer_ids_by_path.remove(&project_path);
+                        self.local_buffer_ids_by_path.insert(
+                            ProjectPath {
+                                worktree_id,
+                                path: path.clone(),
+                            },
+                            buffer_id,
+                        );
+                    }
+
+                    if new_file.entry_id != *entry_id {
+                        self.local_buffer_ids_by_entry_id.remove(entry_id);
+                        self.local_buffer_ids_by_entry_id
+                            .insert(new_file.entry_id, buffer_id);
+                    }
+
+                    if new_file != *old_file {
+                        if let Some(project_id) = self.remote_id() {
+                            self.client
+                                .send(proto::UpdateBufferFile {
+                                    project_id,
+                                    buffer_id: buffer_id as u64,
+                                    file: Some(new_file.to_proto()),
+                                })
+                                .log_err();
+                        }
+
+                        buffer.file_updated(Arc::new(new_file), cx).detach();
+                    }
                 }
-            }
+            });
         }
 
         for (buffer, old_file) in renamed_buffers {
@@ -4676,7 +4692,7 @@ impl Project {
     fn update_local_worktree_language_servers(
         &mut self,
         worktree_handle: &ModelHandle<Worktree>,
-        changes: &HashMap<Arc<Path>, PathChange>,
+        changes: &HashMap<(Arc<Path>, ProjectEntryId), PathChange>,
         cx: &mut ModelContext<Self>,
     ) {
         let worktree_id = worktree_handle.read(cx).id();
@@ -4693,7 +4709,7 @@ impl Project {
                         let params = lsp::DidChangeWatchedFilesParams {
                             changes: changes
                                 .iter()
-                                .filter_map(|(path, change)| {
+                                .filter_map(|((path, _), change)| {
                                     let path = abs_path.join(path);
                                     if watched_paths.matches(&path) {
                                         Some(lsp::FileEvent {

crates/project/src/worktree.rs 🔗

@@ -265,7 +265,7 @@ enum ScanState {
     Started,
     Updated {
         snapshot: LocalSnapshot,
-        changes: HashMap<Arc<Path>, PathChange>,
+        changes: HashMap<(Arc<Path>, ProjectEntryId), PathChange>,
         barrier: Option<barrier::Sender>,
         scanning: bool,
     },
@@ -279,7 +279,7 @@ struct ShareState {
 }
 
 pub enum Event {
-    UpdatedEntries(HashMap<Arc<Path>, PathChange>),
+    UpdatedEntries(HashMap<(Arc<Path>, ProjectEntryId), PathChange>),
     UpdatedGitRepositories(HashMap<Arc<Path>, LocalRepositoryEntry>),
 }
 
@@ -3039,7 +3039,7 @@ impl BackgroundScanner {
         old_snapshot: &Snapshot,
         new_snapshot: &Snapshot,
         event_paths: &[Arc<Path>],
-    ) -> HashMap<Arc<Path>, PathChange> {
+    ) -> HashMap<(Arc<Path>, ProjectEntryId), PathChange> {
         use PathChange::{Added, AddedOrUpdated, Removed, Updated};
 
         let mut changes = HashMap::default();
@@ -3065,7 +3065,7 @@ impl BackgroundScanner {
 
                         match Ord::cmp(&old_entry.path, &new_entry.path) {
                             Ordering::Less => {
-                                changes.insert(old_entry.path.clone(), Removed);
+                                changes.insert((old_entry.path.clone(), old_entry.id), Removed);
                                 old_paths.next(&());
                             }
                             Ordering::Equal => {
@@ -3073,31 +3073,35 @@ impl BackgroundScanner {
                                     // If the worktree was not fully initialized when this event was generated,
                                     // we can't know whether this entry was added during the scan or whether
                                     // it was merely updated.
-                                    changes.insert(new_entry.path.clone(), AddedOrUpdated);
+                                    changes.insert(
+                                        (new_entry.path.clone(), new_entry.id),
+                                        AddedOrUpdated,
+                                    );
                                 } else if old_entry.mtime != new_entry.mtime {
-                                    changes.insert(new_entry.path.clone(), Updated);
+                                    changes.insert((new_entry.path.clone(), new_entry.id), Updated);
                                 }
                                 old_paths.next(&());
                                 new_paths.next(&());
                             }
                             Ordering::Greater => {
-                                changes.insert(new_entry.path.clone(), Added);
+                                changes.insert((new_entry.path.clone(), new_entry.id), Added);
                                 new_paths.next(&());
                             }
                         }
                     }
                     (Some(old_entry), None) => {
-                        changes.insert(old_entry.path.clone(), Removed);
+                        changes.insert((old_entry.path.clone(), old_entry.id), Removed);
                         old_paths.next(&());
                     }
                     (None, Some(new_entry)) => {
-                        changes.insert(new_entry.path.clone(), Added);
+                        changes.insert((new_entry.path.clone(), new_entry.id), Added);
                         new_paths.next(&());
                     }
                     (None, None) => break,
                 }
             }
         }
+
         changes
     }
 
@@ -3937,7 +3941,7 @@ mod tests {
 
             cx.subscribe(&worktree, move |tree, _, event, _| {
                 if let Event::UpdatedEntries(changes) = event {
-                    for (path, change_type) in changes.iter() {
+                    for ((path, _), change_type) in changes.iter() {
                         let path = path.clone();
                         let ix = match paths.binary_search(&path) {
                             Ok(ix) | Err(ix) => ix,
@@ -3947,13 +3951,16 @@ mod tests {
                                 assert_ne!(paths.get(ix), Some(&path));
                                 paths.insert(ix, path);
                             }
+
                             PathChange::Removed => {
                                 assert_eq!(paths.get(ix), Some(&path));
                                 paths.remove(ix);
                             }
+
                             PathChange::Updated => {
                                 assert_eq!(paths.get(ix), Some(&path));
                             }
+
                             PathChange::AddedOrUpdated => {
                                 if paths[ix] != path {
                                     paths.insert(ix, path);
@@ -3961,6 +3968,7 @@ mod tests {
                             }
                         }
                     }
+
                     let new_paths = tree.paths().cloned().collect::<Vec<_>>();
                     assert_eq!(paths, new_paths, "incorrect changes: {:?}", changes);
                 }
@@ -3970,7 +3978,6 @@ mod tests {
 
         let mut snapshots = Vec::new();
         let mut mutations_len = operations;
-        fs.as_fake().pause_events().await;
         while mutations_len > 1 {
             if rng.gen_bool(0.2) {
                 worktree