diff --git a/zed/src/editor.rs b/zed/src/editor.rs index 4d2a1fdc179163f55d2cb4e202f96d35717bbe4a..a9298fea9d59271d1667ccdf0d04aff0e2ba8262 100644 --- a/zed/src/editor.rs +++ b/zed/src/editor.rs @@ -7,7 +7,7 @@ use crate::{ settings::{Settings, StyleId}, util::{post_inc, Bias}, workspace, - worktree::FileHandle, + worktree::File, }; use anyhow::Result; pub use buffer::*; @@ -422,7 +422,7 @@ impl Editor { reversed: false, goal: SelectionGoal::None, }], - Some(cx), + cx, ) }); Self { @@ -723,7 +723,7 @@ impl Editor { let mut new_selections = Vec::new(); self.buffer.update(cx, |buffer, cx| { let edit_ranges = old_selections.iter().map(|(_, range)| range.clone()); - buffer.edit(edit_ranges, text.as_str(), Some(cx)); + buffer.edit(edit_ranges, text.as_str(), cx); let text_len = text.len() as isize; let mut delta = 0_isize; new_selections = old_selections @@ -886,7 +886,7 @@ impl Editor { }) .collect(); self.buffer - .update(cx, |buffer, cx| buffer.edit(edit_ranges, "", Some(cx))) + .update(cx, |buffer, cx| buffer.edit(edit_ranges, "", cx)) .unwrap(); self.update_selections(new_selections, true, cx); self.end_transaction(cx); @@ -939,7 +939,7 @@ impl Editor { self.buffer.update(cx, |buffer, cx| { for (offset, text) in edits.into_iter().rev() { - buffer.edit(Some(offset..offset), text, Some(cx)).unwrap(); + buffer.edit(Some(offset..offset), text, cx).unwrap(); } }); @@ -1029,7 +1029,7 @@ impl Editor { self.unfold_ranges(old_folds, cx); self.buffer.update(cx, |buffer, cx| { for (range, text) in edits.into_iter().rev() { - buffer.edit(Some(range), text, Some(cx)).unwrap(); + buffer.edit(Some(range), text, cx).unwrap(); } }); self.fold_ranges(new_folds, cx); @@ -1117,7 +1117,7 @@ impl Editor { self.unfold_ranges(old_folds, cx); self.buffer.update(cx, |buffer, cx| { for (range, text) in edits.into_iter().rev() { - buffer.edit(Some(range), text, Some(cx)).unwrap(); + buffer.edit(Some(range), text, cx).unwrap(); } }); self.fold_ranges(new_folds, cx); @@ -1227,11 +1227,11 @@ impl Editor { if selection_start == selection_end && clipboard_selection.is_entire_line { let line_start = Point::new(selection_start.row, 0); buffer - .edit(Some(line_start..line_start), to_insert, Some(cx)) + .edit(Some(line_start..line_start), to_insert, cx) .unwrap(); } else { buffer - .edit(Some(&selection.start..&selection.end), to_insert, Some(cx)) + .edit(Some(&selection.start..&selection.end), to_insert, cx) .unwrap(); }; @@ -1254,11 +1254,11 @@ impl Editor { } pub fn undo(&mut self, _: &(), cx: &mut ViewContext) { - self.buffer.update(cx, |buffer, cx| buffer.undo(Some(cx))); + self.buffer.update(cx, |buffer, cx| buffer.undo(cx)); } pub fn redo(&mut self, _: &(), cx: &mut ViewContext) { - self.buffer.update(cx, |buffer, cx| buffer.redo(Some(cx))); + self.buffer.update(cx, |buffer, cx| buffer.redo(cx)); } pub fn move_left(&mut self, _: &(), cx: &mut ViewContext) { @@ -1997,7 +1997,7 @@ impl Editor { self.buffer.update(cx, |buffer, cx| { buffer - .update_selection_set(self.selection_set_id, selections, Some(cx)) + .update_selection_set(self.selection_set_id, selections, cx) .unwrap() }); self.pause_cursor_blinking(cx); @@ -2012,9 +2012,9 @@ impl Editor { } fn start_transaction(&self, cx: &mut ViewContext) { - self.buffer.update(cx, |buffer, _| { + self.buffer.update(cx, |buffer, cx| { buffer - .start_transaction(Some(self.selection_set_id)) + .start_transaction(Some(self.selection_set_id), cx) .unwrap() }); } @@ -2022,7 +2022,7 @@ impl Editor { fn end_transaction(&self, cx: &mut ViewContext) { self.buffer.update(cx, |buffer, cx| { buffer - .end_transaction(Some(self.selection_set_id), Some(cx)) + .end_transaction(Some(self.selection_set_id), cx) .unwrap() }); } @@ -2444,7 +2444,7 @@ impl View for Editor { impl workspace::Item for Buffer { type View = Editor; - fn file(&self) -> Option<&FileHandle> { + fn file(&self) -> Option<&ModelHandle> { self.file() } @@ -2474,7 +2474,7 @@ impl workspace::ItemView for Editor { .buffer .read(cx) .file() - .and_then(|file| file.file_name(cx)); + .and_then(|file| file.read(cx).file_name(cx)); if let Some(name) = filename { name.to_string_lossy().into() } else { @@ -2483,7 +2483,10 @@ impl workspace::ItemView for Editor { } fn entry_id(&self, cx: &AppContext) -> Option<(usize, Arc)> { - self.buffer.read(cx).file().map(|file| file.entry_id()) + self.buffer + .read(cx) + .file() + .map(|file| file.read(cx).entry_id()) } fn clone_on_split(&self, cx: &mut ViewContext) -> Option @@ -2497,18 +2500,18 @@ impl workspace::ItemView for Editor { fn save( &mut self, - new_file: Option, + new_file: Option>, cx: &mut ViewContext, ) -> Task> { self.buffer.update(cx, |b, cx| b.save(new_file, cx)) } fn is_dirty(&self, cx: &AppContext) -> bool { - self.buffer.read(cx).is_dirty() + self.buffer.read(cx).is_dirty(cx) } fn has_conflict(&self, cx: &AppContext) -> bool { - self.buffer.read(cx).has_conflict() + self.buffer.read(cx).has_conflict(cx) } } @@ -2819,7 +2822,7 @@ mod tests { Point::new(1, 1)..Point::new(1, 1), ], "\t", - Some(cx), + cx, ) .unwrap(); }); diff --git a/zed/src/editor/buffer.rs b/zed/src/editor/buffer.rs index 776b83dbfca16c61d41bb076ac30eafaa3778fd9..b02be1541e005b76e70675aa7839e1eb459a02c5 100644 --- a/zed/src/editor/buffer.rs +++ b/zed/src/editor/buffer.rs @@ -19,10 +19,10 @@ use crate::{ sum_tree::{self, FilterCursor, SumTree}, time::{self, ReplicaId}, util::Bias, - worktree::FileHandle, + worktree::File, }; use anyhow::{anyhow, Result}; -use gpui::{AppContext, Entity, ModelContext, Task}; +use gpui::{AppContext, Entity, ModelContext, ModelHandle, Task}; use lazy_static::lazy_static; use std::{ cell::RefCell, @@ -114,7 +114,7 @@ pub struct Buffer { last_edit: time::Local, undo_map: UndoMap, history: History, - file: Option, + file: Option>, language: Option>, syntax_tree: Mutex>, is_parsing: bool, @@ -420,7 +420,7 @@ impl Buffer { pub fn from_history( replica_id: ReplicaId, history: History, - file: Option, + file: Option>, language: Option>, cx: &mut ModelContext, ) -> Self { @@ -430,42 +430,13 @@ impl Buffer { fn build( replica_id: ReplicaId, history: History, - file: Option, + file: Option>, language: Option>, cx: &mut ModelContext, ) -> Self { let saved_mtime; if let Some(file) = file.as_ref() { - saved_mtime = file.mtime(); - file.observe_from_model(cx, |this, file, cx| { - let version = this.version.clone(); - if this.version == this.saved_version { - if file.is_deleted() { - cx.emit(Event::Dirtied); - } else { - cx.spawn(|handle, mut cx| async move { - let (current_version, history) = handle.read_with(&cx, |this, cx| { - (this.version.clone(), file.load_history(cx.as_ref())) - }); - if let (Ok(history), true) = (history.await, current_version == version) - { - let diff = handle - .read_with(&cx, |this, cx| this.diff(history.base_text, cx)) - .await; - handle.update(&mut cx, |this, cx| { - if let Some(_ops) = this.set_text_via_diff(diff, cx) { - this.saved_version = this.version.clone(); - this.saved_mtime = file.mtime(); - cx.emit(Event::Reloaded); - } - }); - } - }) - .detach(); - } - } - cx.emit(Event::FileHandleChanged); - }); + saved_mtime = file.read(cx).mtime(cx.as_ref()); } else { saved_mtime = Duration::ZERO; } @@ -495,7 +466,7 @@ impl Buffer { last_edit: time::Local::default(), undo_map: Default::default(), history, - file, + file: None, syntax_tree: Mutex::new(None), is_parsing: false, language, @@ -508,6 +479,7 @@ impl Buffer { local_clock: time::Local::new(replica_id), lamport_clock: time::Lamport::new(replica_id), }; + result.set_file(file, cx); result.reparse(cx); result } @@ -521,13 +493,13 @@ impl Buffer { } } - pub fn file(&self) -> Option<&FileHandle> { + pub fn file(&self) -> Option<&ModelHandle> { self.file.as_ref() } pub fn save( &mut self, - new_file: Option, + new_file: Option>, cx: &mut ModelContext, ) -> Task> { let text = self.visible_text.clone(); @@ -536,7 +508,7 @@ impl Buffer { cx.spawn(|handle, mut cx| async move { if let Some(file) = new_file.as_ref().or(file.as_ref()) { - let result = cx.read(|cx| file.save(text, cx.as_ref())).await; + let result = file.read_with(&cx, |file, cx| file.save(text, cx)).await; if result.is_ok() { handle.update(&mut cx, |me, cx| me.did_save(version, new_file, cx)); } @@ -550,19 +522,57 @@ impl Buffer { fn did_save( &mut self, version: time::Global, - file: Option, - cx: &mut ModelContext, + file: Option>, + cx: &mut ModelContext, ) { if file.is_some() { - self.file = file; + self.set_file(file, cx); } if let Some(file) = &self.file { - self.saved_mtime = file.mtime(); + self.saved_mtime = file.read(cx).mtime(cx.as_ref()); } self.saved_version = version; cx.emit(Event::Saved); } + fn set_file(&mut self, file: Option>, cx: &mut ModelContext) { + self.file = file; + if let Some(file) = &self.file { + cx.observe(file, |this, file, cx| { + let version = this.version.clone(); + if this.version == this.saved_version { + if file.read(cx).is_deleted(cx.as_ref()) { + cx.emit(Event::Dirtied); + } else { + cx.spawn(|this, mut cx| async move { + let (current_version, history) = this.read_with(&cx, |this, cx| { + ( + this.version.clone(), + file.read(cx).load_history(cx.as_ref()), + ) + }); + if let (Ok(history), true) = (history.await, current_version == version) + { + let diff = this + .read_with(&cx, |this, cx| this.diff(history.base_text, cx)) + .await; + this.update(&mut cx, |this, cx| { + if let Some(_ops) = this.set_text_via_diff(diff, cx) { + this.saved_version = this.version.clone(); + this.saved_mtime = file.read(cx).mtime(cx.as_ref()); + cx.emit(Event::Reloaded); + } + }); + } + }) + .detach(); + } + } + cx.emit(Event::FileHandleChanged); + }); + } + } + pub fn syntax_tree(&self) -> Option { if let Some(syntax_tree) = self.syntax_tree.lock().as_mut() { let mut edited = false; @@ -734,42 +744,44 @@ impl Buffer { cx: &mut ModelContext, ) -> Option> { if self.version == diff.base_version { - self.start_transaction(None).unwrap(); + self.start_transaction(None, cx).unwrap(); let mut operations = Vec::new(); let mut offset = 0; for (tag, len) in diff.changes { let range = offset..(offset + len); match tag { ChangeTag::Equal => offset += len, - ChangeTag::Delete => { - operations.push(self.edit(Some(range), "", Some(cx)).unwrap()) - } + ChangeTag::Delete => operations.push(self.edit(Some(range), "", cx).unwrap()), ChangeTag::Insert => { operations.push( - self.edit(Some(offset..offset), &diff.new_text[range], Some(cx)) + self.edit(Some(offset..offset), &diff.new_text[range], cx) .unwrap(), ); offset += len; } } } - self.end_transaction(None, Some(cx)).unwrap(); + self.end_transaction(None, cx).unwrap(); Some(operations) } else { None } } - pub fn is_dirty(&self) -> bool { - self.version > self.saved_version || self.file.as_ref().map_or(false, |f| f.is_deleted()) + pub fn is_dirty(&self, cx: &AppContext) -> bool { + self.version > self.saved_version + || self + .file + .as_ref() + .map_or(false, |file| file.read(cx).is_deleted(cx)) } - pub fn has_conflict(&self) -> bool { + pub fn has_conflict(&self, cx: &AppContext) -> bool { self.version > self.saved_version && self .file .as_ref() - .map_or(false, |f| f.mtime() > self.saved_mtime) + .map_or(false, |file| file.read(cx).mtime(cx) > self.saved_mtime) } pub fn version(&self) -> time::Global { @@ -849,11 +861,20 @@ impl Buffer { self.deferred_ops.len() } - pub fn start_transaction(&mut self, set_id: Option) -> Result<()> { - self.start_transaction_at(set_id, Instant::now()) + pub fn start_transaction( + &mut self, + set_id: Option, + cx: &mut ModelContext, + ) -> Result<()> { + self.start_transaction_at(set_id, Instant::now(), cx) } - fn start_transaction_at(&mut self, set_id: Option, now: Instant) -> Result<()> { + fn start_transaction_at( + &mut self, + set_id: Option, + now: Instant, + cx: &mut ModelContext, + ) -> Result<()> { let selections = if let Some(set_id) = set_id { let selections = self .selections @@ -863,15 +884,19 @@ impl Buffer { } else { None }; - self.history - .start_transaction(self.version.clone(), self.is_dirty(), selections, now); + self.history.start_transaction( + self.version.clone(), + self.is_dirty(cx.as_ref()), + selections, + now, + ); Ok(()) } pub fn end_transaction( &mut self, set_id: Option, - cx: Option<&mut ModelContext>, + cx: &mut ModelContext, ) -> Result<()> { self.end_transaction_at(set_id, Instant::now(), cx) } @@ -880,7 +905,7 @@ impl Buffer { &mut self, set_id: Option, now: Instant, - cx: Option<&mut ModelContext>, + cx: &mut ModelContext, ) -> Result<()> { let selections = if let Some(set_id) = set_id { let selections = self @@ -897,13 +922,10 @@ impl Buffer { let was_dirty = transaction.buffer_was_dirty; self.history.group(); - if let Some(cx) = cx { - cx.notify(); - - if self.edits_since(since).next().is_some() { - self.did_edit(was_dirty, cx); - self.reparse(cx); - } + cx.notify(); + if self.edits_since(since).next().is_some() { + self.did_edit(was_dirty, cx); + self.reparse(cx); } } @@ -914,7 +936,7 @@ impl Buffer { &mut self, ranges_iter: I, new_text: T, - cx: Option<&mut ModelContext>, + cx: &mut ModelContext, ) -> Option where I: IntoIterator>, @@ -949,7 +971,7 @@ impl Buffer { if ranges.is_empty() { None } else { - self.start_transaction_at(None, Instant::now()).unwrap(); + self.start_transaction_at(None, Instant::now(), cx).unwrap(); let timestamp = InsertionTimestamp { replica_id: self.replica_id, local: self.local_clock.tick().value, @@ -978,7 +1000,7 @@ impl Buffer { pub fn add_selection_set( &mut self, selections: impl Into>, - cx: Option<&mut ModelContext>, + cx: &mut ModelContext, ) -> (SelectionSetId, Operation) { let selections = selections.into(); let lamport_timestamp = self.lamport_clock.tick(); @@ -986,9 +1008,7 @@ impl Buffer { .insert(lamport_timestamp, Arc::clone(&selections)); self.selections_last_update += 1; - if let Some(cx) = cx { - cx.notify(); - } + cx.notify(); ( lamport_timestamp, @@ -1004,7 +1024,7 @@ impl Buffer { &mut self, set_id: SelectionSetId, selections: impl Into>, - cx: Option<&mut ModelContext>, + cx: &mut ModelContext, ) -> Result { let selections = selections.into(); self.selections.insert(set_id, selections.clone()); @@ -1012,9 +1032,7 @@ impl Buffer { let lamport_timestamp = self.lamport_clock.tick(); self.selections_last_update += 1; - if let Some(cx) = cx { - cx.notify(); - } + cx.notify(); Ok(Operation::UpdateSelections { set_id, @@ -1026,7 +1044,7 @@ impl Buffer { pub fn remove_selection_set( &mut self, set_id: SelectionSetId, - cx: Option<&mut ModelContext>, + cx: &mut ModelContext, ) -> Result { self.selections .remove(&set_id) @@ -1034,9 +1052,7 @@ impl Buffer { let lamport_timestamp = self.lamport_clock.tick(); self.selections_last_update += 1; - if let Some(cx) = cx { - cx.notify(); - } + cx.notify(); Ok(Operation::UpdateSelections { set_id, @@ -1055,9 +1071,9 @@ impl Buffer { pub fn apply_ops>( &mut self, ops: I, - cx: Option<&mut ModelContext>, + cx: &mut ModelContext, ) -> Result<()> { - let was_dirty = self.is_dirty(); + let was_dirty = self.is_dirty(cx.as_ref()); let old_version = self.version.clone(); let mut deferred_ops = Vec::new(); @@ -1072,12 +1088,10 @@ impl Buffer { self.deferred_ops.insert(deferred_ops); self.flush_deferred_ops()?; - if let Some(cx) = cx { - cx.notify(); - if self.edits_since(old_version).next().is_some() { - self.did_edit(was_dirty, cx); - self.reparse(cx); - } + cx.notify(); + if self.edits_since(old_version).next().is_some() { + self.did_edit(was_dirty, cx); + self.reparse(cx); } Ok(()) @@ -1269,8 +1283,8 @@ impl Buffer { self.lamport_clock.observe(timestamp.lamport()); } - pub fn undo(&mut self, mut cx: Option<&mut ModelContext>) -> Vec { - let was_dirty = self.is_dirty(); + pub fn undo(&mut self, cx: &mut ModelContext) -> Vec { + let was_dirty = self.is_dirty(cx.as_ref()); let old_version = self.version.clone(); let mut ops = Vec::new(); @@ -1281,23 +1295,21 @@ impl Buffer { } if let Some((set_id, selections)) = selections { - let _ = self.update_selection_set(set_id, selections, cx.as_deref_mut()); + let _ = self.update_selection_set(set_id, selections, cx); } } - if let Some(cx) = cx { - cx.notify(); - if self.edits_since(old_version).next().is_some() { - self.did_edit(was_dirty, cx); - self.reparse(cx); - } + cx.notify(); + if self.edits_since(old_version).next().is_some() { + self.did_edit(was_dirty, cx); + self.reparse(cx); } ops } - pub fn redo(&mut self, mut cx: Option<&mut ModelContext>) -> Vec { - let was_dirty = self.is_dirty(); + pub fn redo(&mut self, cx: &mut ModelContext) -> Vec { + let was_dirty = self.is_dirty(cx.as_ref()); let old_version = self.version.clone(); let mut ops = Vec::new(); @@ -1308,16 +1320,14 @@ impl Buffer { } if let Some((set_id, selections)) = selections { - let _ = self.update_selection_set(set_id, selections, cx.as_deref_mut()); + let _ = self.update_selection_set(set_id, selections, cx); } } - if let Some(cx) = cx { - cx.notify(); - if self.edits_since(old_version).next().is_some() { - self.did_edit(was_dirty, cx); - self.reparse(cx); - } + cx.notify(); + if self.edits_since(old_version).next().is_some() { + self.did_edit(was_dirty, cx); + self.reparse(cx); } ops @@ -2230,15 +2240,15 @@ mod tests { cx.add_model(|cx| { let mut buffer = Buffer::new(0, "abc", cx); assert_eq!(buffer.text(), "abc"); - buffer.edit(vec![3..3], "def", None).unwrap(); + buffer.edit(vec![3..3], "def", cx).unwrap(); assert_eq!(buffer.text(), "abcdef"); - buffer.edit(vec![0..0], "ghi", None).unwrap(); + buffer.edit(vec![0..0], "ghi", cx).unwrap(); assert_eq!(buffer.text(), "ghiabcdef"); - buffer.edit(vec![5..5], "jkl", None).unwrap(); + buffer.edit(vec![5..5], "jkl", cx).unwrap(); assert_eq!(buffer.text(), "ghiabjklcdef"); - buffer.edit(vec![6..7], "", None).unwrap(); + buffer.edit(vec![6..7], "", cx).unwrap(); assert_eq!(buffer.text(), "ghiabjlcdef"); - buffer.edit(vec![4..9], "mno", None).unwrap(); + buffer.edit(vec![4..9], "mno", cx).unwrap(); assert_eq!(buffer.text(), "ghiamnoef"); buffer }); @@ -2265,29 +2275,29 @@ mod tests { // An edit emits an edited event, followed by a dirtied event, // since the buffer was previously in a clean state. - let op = buffer.edit(Some(2..4), "XYZ", Some(cx)).unwrap(); + let op = buffer.edit(Some(2..4), "XYZ", cx).unwrap(); buffer_ops.push(op); // An empty transaction does not emit any events. - buffer.start_transaction(None).unwrap(); - buffer.end_transaction(None, Some(cx)).unwrap(); + buffer.start_transaction(None, cx).unwrap(); + buffer.end_transaction(None, cx).unwrap(); // A transaction containing two edits emits one edited event. now += Duration::from_secs(1); - buffer.start_transaction_at(None, now).unwrap(); - buffer_ops.push(buffer.edit(Some(5..5), "u", Some(cx)).unwrap()); - buffer_ops.push(buffer.edit(Some(6..6), "w", Some(cx)).unwrap()); - buffer.end_transaction_at(None, now, Some(cx)).unwrap(); + buffer.start_transaction_at(None, now, cx).unwrap(); + buffer_ops.push(buffer.edit(Some(5..5), "u", cx).unwrap()); + buffer_ops.push(buffer.edit(Some(6..6), "w", cx).unwrap()); + buffer.end_transaction_at(None, now, cx).unwrap(); // Undoing a transaction emits one edited event. - let ops = buffer.undo(Some(cx)); + let ops = buffer.undo(cx); buffer_ops.extend_from_slice(&ops); }); // Incorporating a set of remote ops emits a single edited event, // followed by a dirtied event. buffer2.update(cx, |buffer, cx| { - buffer.apply_ops(buffer_ops, Some(cx)).unwrap(); + buffer.apply_ops(buffer_ops, cx).unwrap(); }); let buffer_1_events = buffer_1_events.borrow(); @@ -2329,7 +2339,7 @@ mod tests { ); for _i in 0..operations { - let (old_ranges, new_text, _) = buffer.randomly_mutate(rng, None); + let (old_ranges, new_text, _) = buffer.randomly_mutate(rng, cx); for old_range in old_ranges.iter().rev() { reference_string.replace_range(old_range.clone(), &new_text); } @@ -2379,7 +2389,7 @@ mod tests { let new_len = new_range.end - new_range.start; let old_start = (old_range.start as isize + delta) as usize; let new_text: String = buffer.text_for_range(new_range).collect(); - old_buffer.edit(Some(old_start..old_start + old_len), new_text, None); + old_buffer.edit(Some(old_start..old_start + old_len), new_text, cx); delta += new_len as isize - old_len as isize; } @@ -2395,10 +2405,10 @@ mod tests { fn test_line_len(cx: &mut gpui::MutableAppContext) { cx.add_model(|cx| { let mut buffer = Buffer::new(0, "", cx); - buffer.edit(vec![0..0], "abcd\nefg\nhij", None).unwrap(); - buffer.edit(vec![12..12], "kl\nmno", None).unwrap(); - buffer.edit(vec![18..18], "\npqrs\n", None).unwrap(); - buffer.edit(vec![18..21], "\nPQ", None).unwrap(); + buffer.edit(vec![0..0], "abcd\nefg\nhij", cx).unwrap(); + buffer.edit(vec![12..12], "kl\nmno", cx).unwrap(); + buffer.edit(vec![18..18], "\npqrs\n", cx).unwrap(); + buffer.edit(vec![18..21], "\nPQ", cx).unwrap(); assert_eq!(buffer.line_len(0), 4); assert_eq!(buffer.line_len(1), 3); @@ -2477,10 +2487,10 @@ mod tests { fn test_chars_at(cx: &mut gpui::MutableAppContext) { cx.add_model(|cx| { let mut buffer = Buffer::new(0, "", cx); - buffer.edit(vec![0..0], "abcd\nefgh\nij", None).unwrap(); - buffer.edit(vec![12..12], "kl\nmno", None).unwrap(); - buffer.edit(vec![18..18], "\npqrs", None).unwrap(); - buffer.edit(vec![18..21], "\nPQ", None).unwrap(); + buffer.edit(vec![0..0], "abcd\nefgh\nij", cx).unwrap(); + buffer.edit(vec![12..12], "kl\nmno", cx).unwrap(); + buffer.edit(vec![18..18], "\npqrs", cx).unwrap(); + buffer.edit(vec![18..21], "\nPQ", cx).unwrap(); let chars = buffer.chars_at(Point::new(0, 0)); assert_eq!(chars.collect::(), "abcd\nefgh\nijkl\nmno\nPQrs"); @@ -2499,8 +2509,8 @@ mod tests { // Regression test: let mut buffer = Buffer::new(0, "", cx); - buffer.edit(vec![0..0], "[workspace]\nmembers = [\n \"xray_core\",\n \"xray_server\",\n \"xray_cli\",\n \"xray_wasm\",\n]\n", None).unwrap(); - buffer.edit(vec![60..60], "\n", None).unwrap(); + buffer.edit(vec![0..0], "[workspace]\nmembers = [\n \"xray_core\",\n \"xray_server\",\n \"xray_cli\",\n \"xray_wasm\",\n]\n", cx).unwrap(); + buffer.edit(vec![60..60], "\n", cx).unwrap(); let chars = buffer.chars_at(Point::new(6, 0)); assert_eq!(chars.collect::(), " \"xray_wasm\",\n]\n"); @@ -2513,32 +2523,32 @@ mod tests { fn test_anchors(cx: &mut gpui::MutableAppContext) { cx.add_model(|cx| { let mut buffer = Buffer::new(0, "", cx); - buffer.edit(vec![0..0], "abc", None).unwrap(); + buffer.edit(vec![0..0], "abc", cx).unwrap(); let left_anchor = buffer.anchor_before(2); let right_anchor = buffer.anchor_after(2); - buffer.edit(vec![1..1], "def\n", None).unwrap(); + buffer.edit(vec![1..1], "def\n", cx).unwrap(); assert_eq!(buffer.text(), "adef\nbc"); assert_eq!(left_anchor.to_offset(&buffer), 6); assert_eq!(right_anchor.to_offset(&buffer), 6); assert_eq!(left_anchor.to_point(&buffer), Point { row: 1, column: 1 }); assert_eq!(right_anchor.to_point(&buffer), Point { row: 1, column: 1 }); - buffer.edit(vec![2..3], "", None).unwrap(); + buffer.edit(vec![2..3], "", cx).unwrap(); assert_eq!(buffer.text(), "adf\nbc"); assert_eq!(left_anchor.to_offset(&buffer), 5); assert_eq!(right_anchor.to_offset(&buffer), 5); assert_eq!(left_anchor.to_point(&buffer), Point { row: 1, column: 1 }); assert_eq!(right_anchor.to_point(&buffer), Point { row: 1, column: 1 }); - buffer.edit(vec![5..5], "ghi\n", None).unwrap(); + buffer.edit(vec![5..5], "ghi\n", cx).unwrap(); assert_eq!(buffer.text(), "adf\nbghi\nc"); assert_eq!(left_anchor.to_offset(&buffer), 5); assert_eq!(right_anchor.to_offset(&buffer), 9); assert_eq!(left_anchor.to_point(&buffer), Point { row: 1, column: 1 }); assert_eq!(right_anchor.to_point(&buffer), Point { row: 2, column: 0 }); - buffer.edit(vec![7..9], "", None).unwrap(); + buffer.edit(vec![7..9], "", cx).unwrap(); assert_eq!(buffer.text(), "adf\nbghc"); assert_eq!(left_anchor.to_offset(&buffer), 5); assert_eq!(right_anchor.to_offset(&buffer), 7); @@ -2655,7 +2665,7 @@ mod tests { let before_start_anchor = buffer.anchor_before(0); let after_end_anchor = buffer.anchor_after(0); - buffer.edit(vec![0..0], "abc", None).unwrap(); + buffer.edit(vec![0..0], "abc", cx).unwrap(); assert_eq!(buffer.text(), "abc"); assert_eq!(before_start_anchor.to_offset(&buffer), 0); assert_eq!(after_end_anchor.to_offset(&buffer), 3); @@ -2663,8 +2673,8 @@ mod tests { let after_start_anchor = buffer.anchor_after(0); let before_end_anchor = buffer.anchor_before(3); - buffer.edit(vec![3..3], "def", None).unwrap(); - buffer.edit(vec![0..0], "ghi", None).unwrap(); + buffer.edit(vec![3..3], "def", cx).unwrap(); + buffer.edit(vec![0..0], "ghi", cx).unwrap(); assert_eq!(buffer.text(), "ghiabcdef"); assert_eq!(before_start_anchor.to_offset(&buffer), 0); assert_eq!(after_start_anchor.to_offset(&buffer), 3); @@ -2687,7 +2697,7 @@ mod tests { cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete()) .await; - let file1 = cx.update(|cx| tree.file("file1", cx)).await.unwrap(); + let file1 = cx.update(|cx| tree.file("file1", cx)); let buffer1 = cx.add_model(|cx| { Buffer::from_history(0, History::new("abc".into()), Some(file1), None, cx) }); @@ -2700,16 +2710,16 @@ mod tests { move |_, event, _| events.borrow_mut().push(event.clone()) }); - assert!(!buffer.is_dirty()); + assert!(!buffer.is_dirty(cx.as_ref())); assert!(events.borrow().is_empty()); - buffer.edit(vec![1..2], "", Some(cx)).unwrap(); + buffer.edit(vec![1..2], "", cx).unwrap(); }); // after the first edit, the buffer is dirty, and emits a dirtied event. buffer1.update(&mut cx, |buffer, cx| { assert!(buffer.text() == "ac"); - assert!(buffer.is_dirty()); + assert!(buffer.is_dirty(cx.as_ref())); assert_eq!(*events.borrow(), &[Event::Edited, Event::Dirtied]); events.borrow_mut().clear(); @@ -2718,18 +2728,18 @@ mod tests { // after saving, the buffer is not dirty, and emits a saved event. buffer1.update(&mut cx, |buffer, cx| { - assert!(!buffer.is_dirty()); + assert!(!buffer.is_dirty(cx.as_ref())); assert_eq!(*events.borrow(), &[Event::Saved]); events.borrow_mut().clear(); - buffer.edit(vec![1..1], "B", Some(cx)).unwrap(); - buffer.edit(vec![2..2], "D", Some(cx)).unwrap(); + buffer.edit(vec![1..1], "B", cx).unwrap(); + buffer.edit(vec![2..2], "D", cx).unwrap(); }); // after editing again, the buffer is dirty, and emits another dirty event. buffer1.update(&mut cx, |buffer, cx| { assert!(buffer.text() == "aBDc"); - assert!(buffer.is_dirty()); + assert!(buffer.is_dirty(cx.as_ref())); assert_eq!( *events.borrow(), &[Event::Edited, Event::Dirtied, Event::Edited], @@ -2738,16 +2748,16 @@ mod tests { // TODO - currently, after restoring the buffer to its // previously-saved state, the is still considered dirty. - buffer.edit(vec![1..3], "", Some(cx)).unwrap(); + buffer.edit(vec![1..3], "", cx).unwrap(); assert!(buffer.text() == "ac"); - assert!(buffer.is_dirty()); + assert!(buffer.is_dirty(cx.as_ref())); }); assert_eq!(*events.borrow(), &[Event::Edited]); // When a file is deleted, the buffer is considered dirty. let events = Rc::new(RefCell::new(Vec::new())); - let file2 = cx.update(|cx| tree.file("file2", cx)).await.unwrap(); + let file2 = cx.update(|cx| tree.file("file2", cx)); let buffer2 = cx.add_model(|cx: &mut ModelContext| { cx.subscribe(&cx.handle(), { let events = events.clone(); @@ -2758,7 +2768,9 @@ mod tests { }); fs::remove_file(dir.path().join("file2")).unwrap(); - buffer2.condition(&cx, |b, _| b.is_dirty()).await; + buffer2 + .condition(&cx, |b, cx| b.is_dirty(cx.as_ref())) + .await; assert_eq!( *events.borrow(), &[Event::Dirtied, Event::FileHandleChanged] @@ -2766,7 +2778,7 @@ mod tests { // When a file is already dirty when deleted, we don't emit a Dirtied event. let events = Rc::new(RefCell::new(Vec::new())); - let file3 = cx.update(|cx| tree.file("file3", cx)).await.unwrap(); + let file3 = cx.update(|cx| tree.file("file3", cx)); let buffer3 = cx.add_model(|cx: &mut ModelContext| { cx.subscribe(&cx.handle(), { let events = events.clone(); @@ -2778,7 +2790,7 @@ mod tests { tree.flush_fs_events(&cx).await; buffer3.update(&mut cx, |buffer, cx| { - buffer.edit(Some(0..0), "x", Some(cx)).unwrap(); + buffer.edit(Some(0..0), "x", cx).unwrap(); }); events.borrow_mut().clear(); fs::remove_file(dir.path().join("file3")).unwrap(); @@ -2786,7 +2798,7 @@ mod tests { .condition(&cx, |_, _| !events.borrow().is_empty()) .await; assert_eq!(*events.borrow(), &[Event::FileHandleChanged]); - cx.read(|cx| assert!(buffer3.read(cx).is_dirty())); + cx.read(|cx| assert!(buffer3.read(cx).is_dirty(cx))); }); } @@ -2799,7 +2811,7 @@ mod tests { .await; let abs_path = dir.path().join("the-file"); - let file = cx.update(|cx| tree.file("the-file", cx)).await.unwrap(); + let file = cx.update(|cx| tree.file("the-file", cx)); let buffer = cx.add_model(|cx| { Buffer::from_history( 0, @@ -2812,7 +2824,7 @@ mod tests { // Add a cursor at the start of each row. let (selection_set_id, _) = buffer.update(&mut cx, |buffer, cx| { - assert!(!buffer.is_dirty()); + assert!(!buffer.is_dirty(cx.as_ref())); buffer.add_selection_set( (0..3) .map(|row| { @@ -2826,15 +2838,15 @@ mod tests { } }) .collect::>(), - Some(cx), + cx, ) }); // Change the file on disk, adding two new lines of text, and removing // one line. - buffer.read_with(&cx, |buffer, _| { - assert!(!buffer.is_dirty()); - assert!(!buffer.has_conflict()); + buffer.read_with(&cx, |buffer, cx| { + assert!(!buffer.is_dirty(cx.as_ref())); + assert!(!buffer.has_conflict(cx.as_ref())); }); let new_contents = "AAAA\naaa\nBB\nbbbbb\n"; fs::write(&abs_path, new_contents).unwrap(); @@ -2846,10 +2858,10 @@ mod tests { .condition(&cx, |buffer, _| buffer.text() != initial_contents) .await; - buffer.update(&mut cx, |buffer, _| { + buffer.update(&mut cx, |buffer, cx| { assert_eq!(buffer.text(), new_contents); - assert!(!buffer.is_dirty()); - assert!(!buffer.has_conflict()); + assert!(!buffer.is_dirty(cx.as_ref())); + assert!(!buffer.has_conflict(cx.as_ref())); let selections = buffer.selections(selection_set_id).unwrap(); let cursor_positions = selections @@ -2867,8 +2879,8 @@ mod tests { // Modify the buffer buffer.update(&mut cx, |buffer, cx| { - buffer.edit(vec![0..0], " ", Some(cx)).unwrap(); - assert!(buffer.is_dirty()); + buffer.edit(vec![0..0], " ", cx).unwrap(); + assert!(buffer.is_dirty(cx.as_ref())); }); // Change the file on disk again, adding blank lines to the beginning. @@ -2877,7 +2889,7 @@ mod tests { // Becaues the buffer is modified, it doesn't reload from disk, but is // marked as having a conflict. buffer - .condition(&cx, |buffer, _| buffer.has_conflict()) + .condition(&cx, |buffer, cx| buffer.has_conflict(cx.as_ref())) .await; } @@ -2902,9 +2914,9 @@ mod tests { cx.add_model(|cx| { let mut buffer = Buffer::new(0, "1234", cx); - let edit1 = buffer.edit(vec![1..1], "abx", None).unwrap(); - let edit2 = buffer.edit(vec![3..4], "yzef", None).unwrap(); - let edit3 = buffer.edit(vec![3..5], "cd", None).unwrap(); + let edit1 = buffer.edit(vec![1..1], "abx", cx).unwrap(); + let edit2 = buffer.edit(vec![3..4], "yzef", cx).unwrap(); + let edit3 = buffer.edit(vec![3..5], "cd", cx).unwrap(); assert_eq!(buffer.text(), "1abcdef234"); buffer.undo_or_redo(edit1.edit_id().unwrap()).unwrap(); @@ -2939,60 +2951,60 @@ mod tests { let mut buffer = Buffer::new(0, "123456", cx); let (set_id, _) = - buffer.add_selection_set(buffer.selections_from_ranges(vec![4..4]).unwrap(), None); - buffer.start_transaction_at(Some(set_id), now).unwrap(); - buffer.edit(vec![2..4], "cd", None).unwrap(); - buffer.end_transaction_at(Some(set_id), now, None).unwrap(); + buffer.add_selection_set(buffer.selections_from_ranges(vec![4..4]).unwrap(), cx); + buffer.start_transaction_at(Some(set_id), now, cx).unwrap(); + buffer.edit(vec![2..4], "cd", cx).unwrap(); + buffer.end_transaction_at(Some(set_id), now, cx).unwrap(); assert_eq!(buffer.text(), "12cd56"); assert_eq!(buffer.selection_ranges(set_id).unwrap(), vec![4..4]); - buffer.start_transaction_at(Some(set_id), now).unwrap(); + buffer.start_transaction_at(Some(set_id), now, cx).unwrap(); buffer .update_selection_set( set_id, buffer.selections_from_ranges(vec![1..3]).unwrap(), - None, + cx, ) .unwrap(); - buffer.edit(vec![4..5], "e", None).unwrap(); - buffer.end_transaction_at(Some(set_id), now, None).unwrap(); + buffer.edit(vec![4..5], "e", cx).unwrap(); + buffer.end_transaction_at(Some(set_id), now, cx).unwrap(); assert_eq!(buffer.text(), "12cde6"); assert_eq!(buffer.selection_ranges(set_id).unwrap(), vec![1..3]); now += UNDO_GROUP_INTERVAL + Duration::from_millis(1); - buffer.start_transaction_at(Some(set_id), now).unwrap(); + buffer.start_transaction_at(Some(set_id), now, cx).unwrap(); buffer .update_selection_set( set_id, buffer.selections_from_ranges(vec![2..2]).unwrap(), - None, + cx, ) .unwrap(); - buffer.edit(vec![0..1], "a", None).unwrap(); - buffer.edit(vec![1..1], "b", None).unwrap(); - buffer.end_transaction_at(Some(set_id), now, None).unwrap(); + buffer.edit(vec![0..1], "a", cx).unwrap(); + buffer.edit(vec![1..1], "b", cx).unwrap(); + buffer.end_transaction_at(Some(set_id), now, cx).unwrap(); assert_eq!(buffer.text(), "ab2cde6"); assert_eq!(buffer.selection_ranges(set_id).unwrap(), vec![3..3]); // Last transaction happened past the group interval, undo it on its // own. - buffer.undo(None); + buffer.undo(cx); assert_eq!(buffer.text(), "12cde6"); assert_eq!(buffer.selection_ranges(set_id).unwrap(), vec![1..3]); // First two transactions happened within the group interval, undo them // together. - buffer.undo(None); + buffer.undo(cx); assert_eq!(buffer.text(), "123456"); assert_eq!(buffer.selection_ranges(set_id).unwrap(), vec![4..4]); // Redo the first two transactions together. - buffer.redo(None); + buffer.redo(cx); assert_eq!(buffer.text(), "12cde6"); assert_eq!(buffer.selection_ranges(set_id).unwrap(), vec![1..3]); // Redo the last transaction on its own. - buffer.redo(None); + buffer.redo(cx); assert_eq!(buffer.text(), "ab2cde6"); assert_eq!(buffer.selection_ranges(set_id).unwrap(), vec![3..3]); @@ -3009,17 +3021,17 @@ mod tests { let buffer3 = cx.add_model(|cx| Buffer::new(3, text, cx)); let buf1_op = buffer1.update(cx, |buffer, cx| { - let op = buffer.edit(vec![1..2], "12", Some(cx)).unwrap(); + let op = buffer.edit(vec![1..2], "12", cx).unwrap(); assert_eq!(buffer.text(), "a12cdef"); op }); let buf2_op = buffer2.update(cx, |buffer, cx| { - let op = buffer.edit(vec![3..4], "34", Some(cx)).unwrap(); + let op = buffer.edit(vec![3..4], "34", cx).unwrap(); assert_eq!(buffer.text(), "abc34ef"); op }); let buf3_op = buffer3.update(cx, |buffer, cx| { - let op = buffer.edit(vec![5..6], "56", Some(cx)).unwrap(); + let op = buffer.edit(vec![5..6], "56", cx).unwrap(); assert_eq!(buffer.text(), "abcde56"); op }); @@ -3082,9 +3094,9 @@ mod tests { loop { let replica_index = rng.gen_range(0..peers); let replica_id = replica_ids[replica_index]; - buffers[replica_index].update(cx, |buffer, _| match rng.gen_range(0..=100) { + buffers[replica_index].update(cx, |buffer, cx| match rng.gen_range(0..=100) { 0..=50 if mutation_count != 0 => { - let (_, _, ops) = buffer.randomly_mutate(&mut rng, None); + let (_, _, ops) = buffer.randomly_mutate(&mut rng, cx); log::info!("buffer {} text: {:?}", buffer.replica_id, buffer.text()); network.broadcast(replica_id, ops, &mut rng); mutation_count -= 1; @@ -3102,7 +3114,7 @@ mod tests { replica_id, ops.len() ); - buffer.apply_ops(ops, None).unwrap(); + buffer.apply_ops(ops, cx).unwrap(); } } _ => {} @@ -3166,17 +3178,17 @@ mod tests { // Perform some edits (add parameter and variable reference) // Parsing doesn't begin until the transaction is complete buffer.update(&mut cx, |buf, cx| { - buf.start_transaction(None).unwrap(); + buf.start_transaction(None, cx).unwrap(); let offset = buf.text().find(")").unwrap(); - buf.edit(vec![offset..offset], "b: C", Some(cx)).unwrap(); + buf.edit(vec![offset..offset], "b: C", cx).unwrap(); assert!(!buf.is_parsing()); let offset = buf.text().find("}").unwrap(); - buf.edit(vec![offset..offset], " d; ", Some(cx)).unwrap(); + buf.edit(vec![offset..offset], " d; ", cx).unwrap(); assert!(!buf.is_parsing()); - buf.end_transaction(None, Some(cx)).unwrap(); + buf.end_transaction(None, cx).unwrap(); assert_eq!(buf.text(), "fn a(b: C) { d; }"); assert!(buf.is_parsing()); }); @@ -3198,19 +3210,19 @@ mod tests { // * add a turbofish to the method call buffer.update(&mut cx, |buf, cx| { let offset = buf.text().find(";").unwrap(); - buf.edit(vec![offset..offset], ".e", Some(cx)).unwrap(); + buf.edit(vec![offset..offset], ".e", cx).unwrap(); assert_eq!(buf.text(), "fn a(b: C) { d.e; }"); assert!(buf.is_parsing()); }); buffer.update(&mut cx, |buf, cx| { let offset = buf.text().find(";").unwrap(); - buf.edit(vec![offset..offset], "(f)", Some(cx)).unwrap(); + buf.edit(vec![offset..offset], "(f)", cx).unwrap(); assert_eq!(buf.text(), "fn a(b: C) { d.e(f); }"); assert!(buf.is_parsing()); }); buffer.update(&mut cx, |buf, cx| { let offset = buf.text().find("(f)").unwrap(); - buf.edit(vec![offset..offset], "::", Some(cx)).unwrap(); + buf.edit(vec![offset..offset], "::", cx).unwrap(); assert_eq!(buf.text(), "fn a(b: C) { d.e::(f); }"); assert!(buf.is_parsing()); }); @@ -3231,7 +3243,7 @@ mod tests { ); buffer.update(&mut cx, |buf, cx| { - buf.undo(Some(cx)); + buf.undo(cx); assert_eq!(buf.text(), "fn a() {}"); assert!(buf.is_parsing()); }); @@ -3248,7 +3260,7 @@ mod tests { ); buffer.update(&mut cx, |buf, cx| { - buf.redo(Some(cx)); + buf.redo(cx); assert_eq!(buf.text(), "fn a(b: C) { d.e::(f); }"); assert!(buf.is_parsing()); }); @@ -3334,7 +3346,7 @@ mod tests { &mut self, rng: &mut T, old_range_count: usize, - cx: Option<&mut ModelContext>, + cx: &mut ModelContext, ) -> (Vec>, String, Option) where T: Rng, @@ -3362,12 +3374,12 @@ mod tests { pub fn randomly_mutate( &mut self, rng: &mut T, - mut cx: Option<&mut ModelContext>, + cx: &mut ModelContext, ) -> (Vec>, String, Vec) where T: Rng, { - let (old_ranges, new_text, operation) = self.randomly_edit(rng, 5, cx.as_deref_mut()); + let (old_ranges, new_text, operation) = self.randomly_edit(rng, 5, cx); let mut operations = Vec::from_iter(operation); // Randomly add, remove or mutate selection sets. @@ -3378,7 +3390,7 @@ mod tests { .collect::>(); let set_id = replica_selection_sets.choose(rng); if set_id.is_some() && rng.gen_bool(1.0 / 6.0) { - let op = self.remove_selection_set(*set_id.unwrap(), None).unwrap(); + let op = self.remove_selection_set(*set_id.unwrap(), cx).unwrap(); operations.push(op); } else { let mut ranges = Vec::new(); @@ -3388,9 +3400,9 @@ mod tests { let new_selections = self.selections_from_ranges(ranges).unwrap(); let op = if set_id.is_none() || rng.gen_bool(1.0 / 5.0) { - self.add_selection_set(new_selections, None).1 + self.add_selection_set(new_selections, cx).1 } else { - self.update_selection_set(*set_id.unwrap(), new_selections, None) + self.update_selection_set(*set_id.unwrap(), new_selections, cx) .unwrap() }; operations.push(op); diff --git a/zed/src/editor/display_map.rs b/zed/src/editor/display_map.rs index ccb8b7c4080ca8fcb25bc2d1f7c055e4bcb3ad00..e2713b16eb117958c877e61993e767d87605051b 100644 --- a/zed/src/editor/display_map.rs +++ b/zed/src/editor/display_map.rs @@ -475,7 +475,7 @@ mod tests { Point::new(2, 1)..Point::new(2, 1), ], "\t", - Some(cx), + cx, ) }) .unwrap(); diff --git a/zed/src/editor/display_map/fold_map.rs b/zed/src/editor/display_map/fold_map.rs index 39b7b91b13230546126396480d601270da8c1ba8..056c8410d8c611f77713326e5d4f026ea6a43620 100644 --- a/zed/src/editor/display_map/fold_map.rs +++ b/zed/src/editor/display_map/fold_map.rs @@ -880,7 +880,7 @@ mod tests { Point::new(2, 3)..Point::new(2, 3), ], "123", - Some(cx), + cx, ) .unwrap(); }); @@ -889,7 +889,7 @@ mod tests { buffer.update(cx, |buffer, cx| { let start_version = buffer.version.clone(); buffer - .edit(Some(Point::new(2, 6)..Point::new(4, 3)), "456", Some(cx)) + .edit(Some(Point::new(2, 6)..Point::new(4, 3)), "456", cx) .unwrap(); buffer.edits_since(start_version).collect::>() }); @@ -932,7 +932,7 @@ mod tests { // Edit within one of the folds. buffer.update(cx, |buffer, cx| { let version = buffer.version(); - buffer.edit(vec![0..1], "12345", Some(cx)).unwrap(); + buffer.edit(vec![0..1], "12345", cx).unwrap(); buffer.edits_since(version).collect::>() }); map.check_invariants(cx.as_ref()); @@ -972,7 +972,7 @@ mod tests { buffer.update(cx, |buffer, cx| { buffer - .edit(Some(Point::new(2, 2)..Point::new(3, 1)), "", Some(cx)) + .edit(Some(Point::new(2, 2)..Point::new(3, 1)), "", cx) .unwrap(); }); assert_eq!(map.text(cx.as_ref()), "aa…eeeee"); @@ -1067,7 +1067,7 @@ mod tests { let edits = buffer.update(cx, |buffer, cx| { let start_version = buffer.version.clone(); let edit_count = rng.gen_range(1..=5); - buffer.randomly_edit(&mut rng, edit_count, Some(cx)); + buffer.randomly_edit(&mut rng, edit_count, cx); buffer.edits_since(start_version).collect::>() }); log::info!("editing {:?}", edits); diff --git a/zed/src/rpc.rs b/zed/src/rpc.rs index 06695e1f90df1a96bbe6cd06b68ed5404cf593e8..ee3f58f0055eafd09e25b93a5e89a7293e261ff3 100644 --- a/zed/src/rpc.rs +++ b/zed/src/rpc.rs @@ -1,4 +1,4 @@ -use crate::worktree::{FileHandle, Worktree}; +use crate::worktree::{File, Worktree}; use super::util::SurfResultExt as _; use anyhow::{anyhow, Context, Result}; @@ -32,7 +32,7 @@ pub struct Client { pub struct ClientState { connection_id: Option, pub shared_worktrees: HashMap>, - pub shared_files: HashMap>, + pub shared_files: HashMap>, } impl Client { diff --git a/zed/src/workspace.rs b/zed/src/workspace.rs index c228e50122d422d8f5d7861bd326ef88df634c4a..7c8f63ae199099f65f780f20833c875ccebfc65a 100644 --- a/zed/src/workspace.rs +++ b/zed/src/workspace.rs @@ -7,7 +7,7 @@ use crate::{ rpc, settings::Settings, time::ReplicaId, - worktree::{FileHandle, Worktree, WorktreeHandle}, + worktree::{File, Worktree, WorktreeHandle}, AppState, }; use anyhow::{anyhow, Result}; @@ -116,31 +116,31 @@ mod remote { rpc: &rpc::Client, cx: &mut AsyncAppContext, ) -> anyhow::Result<()> { - let message = &request.payload; - let peer_id = request - .original_sender_id - .ok_or_else(|| anyhow!("missing original sender id"))?; - - let mut state = rpc.state.lock().await; - let worktree = state - .shared_worktrees - .get(&message.worktree_id) - .ok_or_else(|| anyhow!("worktree {} not found", message.worktree_id))? - .clone(); - - let file = cx.update(|cx| worktree.file(&message.path, cx)).await?; - let id = file.id() as u64; - let mtime = file.mtime().as_secs(); - - *state - .shared_files - .entry(file) - .or_insert(Default::default()) - .entry(peer_id) - .or_insert(0) += 1; - - rpc.respond(request.receipt(), proto::OpenFileResponse { id, mtime }) - .await?; + // let message = &request.payload; + // let peer_id = request + // .original_sender_id + // .ok_or_else(|| anyhow!("missing original sender id"))?; + + // let mut state = rpc.state.lock().await; + // let worktree = state + // .shared_worktrees + // .get(&message.worktree_id) + // .ok_or_else(|| anyhow!("worktree {} not found", message.worktree_id))? + // .clone(); + + // let file = worktree.file(&message.path); + // let id = file.id() as u64; + // let mtime = file.mtime().as_secs(); + + // *state + // .shared_files + // .entry(file) + // .or_insert(Default::default()) + // .entry(peer_id) + // .or_insert(0) += 1; + + // rpc.respond(request.receipt(), proto::OpenFileResponse { id, mtime }) + // .await?; Ok(()) } @@ -154,19 +154,19 @@ mod remote { .original_sender_id .ok_or_else(|| anyhow!("missing original sender id"))?; - let mut state = rpc.state.lock().await; - if let Some((_, ref_counts)) = state - .shared_files - .iter_mut() - .find(|(file, _)| file.id() == message.id) - { - if let Some(count) = ref_counts.get_mut(&peer_id) { - *count -= 1; - if *count == 0 { - ref_counts.remove(&peer_id); - } - } - } + // let mut state = rpc.state.lock().await; + // if let Some((_, ref_counts)) = state + // .shared_files + // .iter_mut() + // .find(|(file, _)| file.id() == message.id) + // { + // if let Some(count) = ref_counts.get_mut(&peer_id) { + // *count -= 1; + // if *count == 0 { + // ref_counts.remove(&peer_id); + // } + // } + // } Ok(()) } @@ -176,24 +176,24 @@ mod remote { rpc: &rpc::Client, cx: &mut AsyncAppContext, ) -> anyhow::Result<()> { - let message = &request.payload; - let handle = { - let state = rpc.state.lock().await; - let mut files = state.shared_files.keys(); - files.find(|file| file.id() == message.id).cloned() - }; - let buffer = if let Some(handle) = handle { - let history = cx.read(|cx| handle.load_history(cx)).await?; - Some(proto::Buffer { - content: history.base_text.to_string(), - history: Vec::new(), - }) - } else { - None - }; - - rpc.respond(request.receipt(), proto::OpenBufferResponse { buffer }) - .await?; + // let message = &request.payload; + // let handle = { + // let state = rpc.state.lock().await; + // let mut files = state.shared_files.keys(); + // files.find(|file| file.id() == message.id).cloned() + // }; + // let buffer = if let Some(handle) = handle { + // let history = cx.read(|cx| handle.load_history(cx)).await?; + // Some(proto::Buffer { + // content: history.base_text.to_string(), + // history: Vec::new(), + // }) + // } else { + // None + // }; + + // rpc.respond(request.receipt(), proto::OpenBufferResponse { buffer }) + // .await?; Ok(()) } @@ -208,7 +208,7 @@ pub trait Item: Entity + Sized { cx: &mut ViewContext, ) -> Self::View; - fn file(&self) -> Option<&FileHandle>; + fn file(&self) -> Option<&ModelHandle>; } pub trait ItemView: View { @@ -228,7 +228,7 @@ pub trait ItemView: View { } fn save( &mut self, - _: Option, + _: Option>, _: &mut ViewContext, ) -> Task>; fn should_activate_item_on_event(_: &Self::Event) -> bool { @@ -245,7 +245,7 @@ pub trait ItemHandle: Send + Sync { } pub trait WeakItemHandle: Send + Sync { - fn file<'a>(&'a self, cx: &'a AppContext) -> Option<&'a FileHandle>; + fn file<'a>(&'a self, cx: &'a AppContext) -> Option<&'a ModelHandle>; fn add_view( &self, window_id: usize, @@ -267,7 +267,7 @@ pub trait ItemViewHandle: Send + Sync { fn has_conflict(&self, cx: &AppContext) -> bool; fn save( &self, - file: Option, + file: Option>, cx: &mut MutableAppContext, ) -> Task>; } @@ -283,7 +283,7 @@ impl ItemHandle for ModelHandle { } impl WeakItemHandle for WeakModelHandle { - fn file<'a>(&'a self, cx: &'a AppContext) -> Option<&'a FileHandle> { + fn file<'a>(&'a self, cx: &'a AppContext) -> Option<&'a ModelHandle> { self.upgrade(cx).and_then(|h| h.read(cx).file()) } @@ -345,7 +345,7 @@ impl ItemViewHandle for ViewHandle { fn save( &self, - file: Option, + file: Option>, cx: &mut MutableAppContext, ) -> Task> { self.update(cx, |item, cx| item.save(file, cx)) @@ -484,13 +484,13 @@ impl Workspace { .map(|(abs_path, file)| { let is_file = bg.spawn(async move { abs_path.is_file() }); cx.spawn(|this, mut cx| async move { - if let Ok(file) = file.await { - if is_file.await { - return this - .update(&mut cx, |this, cx| this.open_entry(file.entry_id(), cx)); - } + if is_file.await { + return this.update(&mut cx, |this, cx| { + this.open_entry(file.read(cx).entry_id(), cx) + }); + } else { + None } - None }) }) .collect::>(); @@ -503,11 +503,7 @@ impl Workspace { } } - fn file_for_path( - &mut self, - abs_path: &Path, - cx: &mut ViewContext, - ) -> Task> { + fn file_for_path(&mut self, abs_path: &Path, cx: &mut ViewContext) -> ModelHandle { for tree in self.worktrees.iter() { if let Some(relative_path) = tree .read(cx) @@ -592,7 +588,7 @@ impl Workspace { if view_for_existing_item.is_none() && item .file(cx.as_ref()) - .map_or(false, |f| f.entry_id() == entry) + .map_or(false, |file| file.read(cx).entry_id() == entry) { view_for_existing_item = Some( item.add_view(cx.window_id(), settings.clone(), cx.as_mut()) @@ -629,8 +625,7 @@ impl Workspace { cx.as_mut() .spawn(|mut cx| async move { let buffer = async move { - let file = file.await?; - let history = cx.read(|cx| file.load_history(cx)); + let history = cx.read(|cx| file.read(cx).load_history(cx)); let history = cx.background_executor().spawn(history).await?; let buffer = cx.add_model(|cx| { let language = language_registry.select_language(path); @@ -696,9 +691,8 @@ impl Workspace { if let Some(path) = path { cx.spawn(|mut cx| async move { let result = async move { - let file = handle - .update(&mut cx, |me, cx| me.file_for_path(&path, cx)) - .await?; + let file = + handle.update(&mut cx, |me, cx| me.file_for_path(&path, cx)); cx.update(|cx| item.save(Some(file), cx)).await } .await; diff --git a/zed/src/worktree.rs b/zed/src/worktree.rs index 0ddf3a1a8137134313b8c4804ae91f9944e757ce..6ec6bff56dfe635b04c581beeccd4fcc1b653fe9 100644 --- a/zed/src/worktree.rs +++ b/zed/src/worktree.rs @@ -10,7 +10,7 @@ use crate::{ util::Bias, }; use ::ignore::gitignore::Gitignore; -use anyhow::{anyhow, Context, Result}; +use anyhow::{Context, Result}; pub use fuzzy::{match_paths, PathMatch}; use gpui::{scoped_pool, AppContext, Entity, ModelContext, ModelHandle, MutableAppContext, Task}; use lazy_static::lazy_static; @@ -19,22 +19,18 @@ use postage::{ prelude::{Sink, Stream}, watch, }; -use smol::{channel::Sender, lock::Mutex as AsyncMutex}; +use smol::channel::Sender; use std::{ cmp, collections::{HashMap, HashSet}, ffi::{OsStr, OsString}, fmt, fs, future::Future, - hash::Hash, io::{self, Read, Write}, ops::Deref, os::unix::fs::MetadataExt, path::{Path, PathBuf}, - sync::{ - atomic::{AtomicU64, Ordering::SeqCst}, - Arc, Weak, - }, + sync::{atomic::AtomicU64, Arc}, time::{Duration, SystemTime, UNIX_EPOCH}, }; @@ -123,7 +119,6 @@ impl Deref for Worktree { pub struct LocalWorktree { snapshot: Snapshot, background_snapshot: Arc>, - handles: Arc, Weak>>>>, next_handle_id: AtomicU64, scan_state: (watch::Sender, watch::Receiver), _event_stream_handle: fsevent::Handle, @@ -131,40 +126,6 @@ pub struct LocalWorktree { rpc: Option, } -#[derive(Clone)] -pub struct FileHandle { - worktree: ModelHandle, - state: Arc>, -} - -#[derive(Clone)] -struct FileHandleState { - path: Arc, - is_deleted: bool, - mtime: Duration, - worktree_id: usize, - id: u64, - rpc: Option<(ConnectionId, rpc::Client)>, -} - -impl Drop for FileHandleState { - fn drop(&mut self) { - if let Some((connection_id, rpc)) = self.rpc.take() { - let id = self.id; - let worktree_id = self.worktree_id as u64; - smol::spawn(async move { - if let Err(error) = rpc - .send(connection_id, proto::CloseFile { worktree_id, id }) - .await - { - log::warn!("error closing file {}: {}", id, error); - } - }) - .detach(); - } - } -} - impl LocalWorktree { fn new(path: impl Into>, cx: &mut ModelContext) -> Self { let abs_path = path.into(); @@ -182,12 +143,10 @@ impl LocalWorktree { fsevent::EventStream::new(&[snapshot.abs_path.as_ref()], Duration::from_millis(100)); let background_snapshot = Arc::new(Mutex::new(snapshot.clone())); - let handles = Arc::new(Mutex::new(Default::default())); let tree = Self { snapshot, background_snapshot: background_snapshot.clone(), - handles: handles.clone(), next_handle_id: Default::default(), scan_state: watch::channel_with(ScanState::Scanning), _event_stream_handle: event_stream_handle, @@ -196,7 +155,7 @@ impl LocalWorktree { }; std::thread::spawn(move || { - let scanner = BackgroundScanner::new(background_snapshot, handles, scan_state_tx, id); + let scanner = BackgroundScanner::new(background_snapshot, scan_state_tx, id); scanner.run(event_stream) }); @@ -297,7 +256,6 @@ impl LocalWorktree { } pub fn save(&self, path: &Path, content: Rope, cx: &AppContext) -> Task> { - let handles = self.handles.clone(); let path = path.to_path_buf(); let abs_path = self.absolutize(&path); cx.background_executor().spawn(async move { @@ -308,13 +266,6 @@ impl LocalWorktree { writer.write(chunk.as_bytes())?; } writer.flush()?; - - if let Some(handle) = handles.lock().get(&*path).and_then(Weak::upgrade) { - let mut handle = handle.lock(); - handle.mtime = file.metadata()?.modified()?.duration_since(UNIX_EPOCH)?; - handle.is_deleted = false; - } - Ok(()) }) } @@ -387,7 +338,6 @@ impl fmt::Debug for LocalWorktree { pub struct RemoteWorktree { remote_id: usize, snapshot: Snapshot, - handles: Arc, Arc>>>>>>, rpc: rpc::Client, connection_id: ConnectionId, } @@ -442,7 +392,6 @@ impl RemoteWorktree { Self { remote_id, snapshot, - handles: Default::default(), rpc, connection_id, } @@ -695,44 +644,74 @@ pub struct Diff { pub modified: HashSet>, } -impl FileHandle { - pub fn id(&self) -> u64 { - self.state.lock().id +#[derive(Clone, PartialEq)] +pub struct File { + worktree: ModelHandle, + path: Arc, +} + +impl Entity for File { + type Event = (); +} + +impl File { + pub fn new( + worktree: ModelHandle, + path: Arc, + cx: &mut ModelContext, + ) -> Self { + cx.subscribe(&worktree, Self::handle_worktree_update); + Self { worktree, path } + } + + fn handle_worktree_update(&mut self, diff: &Diff, cx: &mut ModelContext) { + if let Some(new_path) = diff.moved.get(&self.path) { + self.path = new_path.clone(); + cx.notify(); + } else if diff.added.contains(&self.path) + || diff.removed.contains(&self.path) + || diff.modified.contains(&self.path) + { + cx.notify(); + } } /// Returns this file's path relative to the root of its worktree. pub fn path(&self) -> Arc { - self.state.lock().path.clone() + self.path.clone() } /// Returns the last component of this handle's absolute path. If this handle refers to the root /// of its worktree, then this method will return the name of the worktree itself. pub fn file_name<'a>(&'a self, cx: &'a AppContext) -> Option { - self.state - .lock() - .path + self.path .file_name() .or_else(|| Some(OsStr::new(self.worktree.read(cx).root_name()))) .map(Into::into) } - pub fn is_deleted(&self) -> bool { - self.state.lock().is_deleted + pub fn is_deleted(&self, cx: &AppContext) -> bool { + let snapshot = self.worktree.read(cx).snapshot(); + snapshot.entry_for_path(&self.path).is_none() && !snapshot.path_is_pending(&self.path) } - pub fn mtime(&self) -> Duration { - self.state.lock().mtime + pub fn exists(&self, cx: &AppContext) -> bool { + !self.is_deleted(cx) } - pub fn exists(&self) -> bool { - !self.is_deleted() + pub fn mtime(&self, cx: &AppContext) -> Duration { + let snapshot = self.worktree.read(cx).snapshot(); + snapshot + .entry_for_path(&self.path) + .map_or(Duration::ZERO, |entry| { + entry.mtime.duration_since(UNIX_EPOCH).unwrap() + }) } pub fn load_history(&self, cx: &AppContext) -> Task> { match self.worktree.read(cx) { Worktree::Local(worktree) => { - let path = self.state.lock().path.to_path_buf(); - let abs_path = worktree.absolutize(&path); + let abs_path = worktree.absolutize(&self.path); cx.background_executor().spawn(async move { let mut file = fs::File::open(&abs_path)?; let mut base_text = String::new(); @@ -741,20 +720,21 @@ impl FileHandle { }) } Worktree::Remote(worktree) => { - let state = self.state.lock(); - let id = state.id; - let worktree_id = worktree.remote_id as u64; - let (connection_id, rpc) = state.rpc.clone().unwrap(); - cx.background_executor().spawn(async move { - let response = rpc - .request(connection_id, proto::OpenBuffer { worktree_id, id }) - .await?; - let buffer = response - .buffer - .ok_or_else(|| anyhow!("buffer must be present"))?; - let history = History::new(buffer.content.into()); - Ok(history) - }) + todo!() + // let state = self.state.lock(); + // let id = state.id; + // let worktree_id = worktree.remote_id as u64; + // let (connection_id, rpc) = state.rpc.clone().unwrap(); + // cx.background_executor().spawn(async move { + // let response = rpc + // .request(connection_id, proto::OpenBuffer { worktree_id, id }) + // .await?; + // let buffer = response + // .buffer + // .ok_or_else(|| anyhow!("buffer must be present"))?; + // let history = History::new(buffer.content.into()); + // Ok(history) + // }) } } } @@ -771,55 +751,6 @@ impl FileHandle { pub fn entry_id(&self) -> (usize, Arc) { (self.worktree.id(), self.path()) } - - pub fn observe_from_model( - &self, - cx: &mut ModelContext, - mut callback: impl FnMut(&mut T, FileHandle, &mut ModelContext) + 'static, - ) { - let mut prev_state = self.state.lock().clone(); - let cur_state = Arc::downgrade(&self.state); - cx.observe(&self.worktree, move |observer, worktree, cx| { - if let Some(cur_state) = cur_state.upgrade() { - let cur_state_unlocked = cur_state.lock(); - if cur_state_unlocked.mtime != prev_state.mtime - || cur_state_unlocked.path != prev_state.path - { - prev_state = cur_state_unlocked.clone(); - drop(cur_state_unlocked); - callback( - observer, - FileHandle { - worktree, - state: cur_state, - }, - cx, - ); - } - } - }); - } -} - -impl PartialEq for FileHandle { - fn eq(&self, other: &Self) -> bool { - if Arc::ptr_eq(&self.state, &other.state) { - true - } else { - let self_state = self.state.lock(); - let other_state = other.state.lock(); - self_state.worktree_id == other_state.worktree_id && self_state.id == other_state.id - } - } -} - -impl Eq for FileHandle {} - -impl Hash for FileHandle { - fn hash(&self, state: &mut H) { - self.state.lock().id.hash(state); - self.worktree.hash(state); - } } #[derive(Clone, Debug)] @@ -998,23 +929,16 @@ impl<'a> sum_tree::Dimension<'a, EntrySummary> for VisibleFileCount { struct BackgroundScanner { snapshot: Arc>, notify: Sender, - handles: Arc, Weak>>>>, thread_pool: scoped_pool::Pool, root_char_bag: CharBag, } impl BackgroundScanner { - fn new( - snapshot: Arc>, - handles: Arc, Weak>>>>, - notify: Sender, - worktree_id: usize, - ) -> Self { + fn new(snapshot: Arc>, notify: Sender, worktree_id: usize) -> Self { Self { root_char_bag: Default::default(), snapshot, notify, - handles, thread_pool: scoped_pool::Pool::new(16, format!("worktree-{}-scanner", worktree_id)), } } @@ -1125,7 +1049,6 @@ impl BackgroundScanner { }); } - self.mark_deleted_file_handles(); Ok(()) } @@ -1231,73 +1154,6 @@ impl BackgroundScanner { return false; }; - let mut renamed_paths: HashMap = HashMap::new(); - let mut handles = self.handles.lock(); - let mut updated_handles = HashMap::new(); - for event in &events { - let path = if let Ok(path) = event.path.strip_prefix(&root_abs_path) { - path - } else { - continue; - }; - - let metadata = fs::metadata(&event.path); - if event.flags.contains(fsevent::StreamFlags::ITEM_RENAMED) { - if let Some(inode) = snapshot.inode_for_path(path) { - renamed_paths.insert(inode, path.to_path_buf()); - } else if let Ok(metadata) = &metadata { - let new_path = path; - if let Some(old_path) = renamed_paths.get(&metadata.ino()) { - handles.retain(|handle_path, handle_state| { - if let Ok(path_suffix) = handle_path.strip_prefix(&old_path) { - let new_handle_path: Arc = - if path_suffix.file_name().is_some() { - new_path.join(path_suffix) - } else { - new_path.to_path_buf() - } - .into(); - if let Some(handle_state) = Weak::upgrade(&handle_state) { - let mut state = handle_state.lock(); - state.path = new_handle_path.clone(); - updated_handles - .insert(new_handle_path, Arc::downgrade(&handle_state)); - } - false - } else { - true - } - }); - handles.extend(updated_handles.drain()); - } - } - } - - for state in handles.values_mut() { - if let Some(state) = Weak::upgrade(&state) { - let mut state = state.lock(); - if state.path.as_ref() == path { - if let Ok(metadata) = &metadata { - state.mtime = metadata - .modified() - .unwrap() - .duration_since(UNIX_EPOCH) - .unwrap(); - } - } else if state.path.starts_with(path) { - if let Ok(metadata) = fs::metadata(state.path.as_ref()) { - state.mtime = metadata - .modified() - .unwrap() - .duration_since(UNIX_EPOCH) - .unwrap(); - } - } - } - } - } - drop(handles); - events.sort_unstable_by(|a, b| a.path.cmp(&b.path)); let mut abs_paths = events.into_iter().map(|e| e.path).peekable(); let (scan_queue_tx, scan_queue_rx) = crossbeam_channel::unbounded(); @@ -1363,7 +1219,6 @@ impl BackgroundScanner { }); self.update_ignore_statuses(); - self.mark_deleted_file_handles(); true } @@ -1453,20 +1308,6 @@ impl BackgroundScanner { self.snapshot.lock().entries.edit(edits, &()); } - fn mark_deleted_file_handles(&self) { - let mut handles = self.handles.lock(); - let snapshot = self.snapshot.lock(); - handles.retain(|path, handle_state| { - if let Some(handle_state) = Weak::upgrade(&handle_state) { - let mut handle_state = handle_state.lock(); - handle_state.is_deleted = snapshot.entry_for_path(&path).is_none(); - true - } else { - false - } - }); - } - fn fs_entry_for_path(&self, path: Arc, abs_path: &Path) -> Result> { let metadata = match fs::metadata(&abs_path) { Err(err) => { @@ -1526,7 +1367,7 @@ struct UpdateIgnoreStatusJob { } pub trait WorktreeHandle { - fn file(&self, path: impl AsRef, cx: &mut MutableAppContext) -> Task>; + fn file(&self, path: impl AsRef, cx: &mut MutableAppContext) -> ModelHandle; #[cfg(test)] fn flush_fs_events<'a>( @@ -1536,108 +1377,10 @@ pub trait WorktreeHandle { } impl WorktreeHandle for ModelHandle { - fn file(&self, path: impl AsRef, cx: &mut MutableAppContext) -> Task> { + fn file(&self, path: impl AsRef, cx: &mut MutableAppContext) -> ModelHandle { let path = Arc::from(path.as_ref()); let handle = self.clone(); - let tree = self.read(cx); - match tree { - Worktree::Local(tree) => { - let worktree_id = handle.id(); - let abs_path = tree.absolutize(&path); - cx.spawn(|cx| async move { - let mtime = cx - .background_executor() - .spawn(async move { fs::metadata(&abs_path) }) - .await? - .modified()? - .duration_since(UNIX_EPOCH)?; - let state = handle.read_with(&cx, |tree, _| { - let mut handles = tree.as_local().unwrap().handles.lock(); - handles - .get(&path) - .and_then(Weak::upgrade) - .unwrap_or_else(|| { - let id = - tree.as_local().unwrap().next_handle_id.fetch_add(1, SeqCst); - let handle_state = if let Some(entry) = tree.entry_for_path(&path) { - FileHandleState { - path: entry.path().clone(), - is_deleted: false, - mtime, - worktree_id, - id, - rpc: None, - } - } else { - FileHandleState { - path: path.clone(), - is_deleted: !tree.path_is_pending(&path), - mtime, - worktree_id, - id, - rpc: None, - } - }; - - let state = Arc::new(Mutex::new(handle_state.clone())); - handles.insert(path, Arc::downgrade(&state)); - state - }) - }); - Ok(FileHandle { - worktree: handle.clone(), - state, - }) - }) - } - Worktree::Remote(tree) => { - let remote_worktree_id = tree.remote_id; - let connection_id = tree.connection_id; - let rpc = tree.rpc.clone(); - let handles = tree.handles.clone(); - cx.spawn(|cx| async move { - let state = handles - .lock() - .entry(path.clone()) - .or_insert_with(|| Arc::new(AsyncMutex::new(Weak::new()))) - .clone(); - - let mut state = state.lock().await; - if let Some(state) = Weak::upgrade(&state) { - Ok(FileHandle { - worktree: handle, - state, - }) - } else { - let response = rpc - .request( - connection_id, - proto::OpenFile { - worktree_id: remote_worktree_id as u64, - path: path.to_string_lossy().to_string(), - }, - ) - .await?; - let is_deleted = handle.read_with(&cx, |tree, _| { - tree.entry_for_path(&path).is_none() && !tree.path_is_pending(&path) - }); - let new_state = Arc::new(Mutex::new(FileHandleState { - path, - is_deleted, - mtime: Duration::from_secs(response.mtime), - worktree_id: remote_worktree_id, - id: response.id, - rpc: Some((connection_id, rpc)), - })); - *state = Arc::downgrade(&new_state); - Ok(FileHandle { - worktree: handle, - state: new_state, - }) - } - }) - } - } + cx.add_model(|cx| File::new(handle, path, cx)) } // When the worktree's FS event stream sometimes delivers "redundant" events for FS changes that @@ -1850,8 +1593,10 @@ mod tests { path }); - let file = cx.update(|cx| tree.file(&path, cx)).await.unwrap(); - let history = cx.read(|cx| file.load_history(cx)).await.unwrap(); + let history = cx + .update(|cx| tree.file(&path, cx).read(cx).load_history(cx.as_ref())) + .await + .unwrap(); cx.read(|cx| { assert_eq!(history.base_text.as_ref(), buffer.read(cx).text()); }); @@ -1869,14 +1614,17 @@ mod tests { cx.read(|cx| assert_eq!(tree.read(cx).file_count(), 1)); let buffer = cx.add_model(|cx| Buffer::new(1, "a line of text.\n".repeat(10 * 1024), cx)); + let file = cx.update(|cx| tree.file("", cx)); - let file = cx.update(|cx| tree.file("", cx)).await.unwrap(); - cx.update(|cx| { - assert_eq!(file.path().file_name(), None); - smol::block_on(file.save(buffer.read(cx).snapshot().text(), cx.as_ref())).unwrap(); - }); + let history = file + .read_with(&cx, |file, cx| { + assert_eq!(file.path().file_name(), None); + smol::block_on(file.save(buffer.read(cx).snapshot().text(), cx.as_ref())).unwrap(); + file.load_history(cx) + }) + .await + .unwrap(); - let history = cx.read(|cx| file.load_history(cx)).await.unwrap(); cx.read(|cx| assert_eq!(history.base_text.as_ref(), buffer.read(cx).text())); } @@ -1897,20 +1645,23 @@ mod tests { })); let tree = cx.add_model(|cx| Worktree::local(dir.path(), cx)); - let file2 = cx.update(|cx| tree.file("a/file2", cx)).await.unwrap(); - let file3 = cx.update(|cx| tree.file("a/file3", cx)).await.unwrap(); - let file4 = cx.update(|cx| tree.file("b/c/file4", cx)).await.unwrap(); - let file5 = cx.update(|cx| tree.file("b/c/file5", cx)).await.unwrap(); - let non_existent_file = cx.update(|cx| tree.file("a/file_x", cx)).await.unwrap(); + let file2 = cx.update(|cx| tree.file("a/file2", cx)); + let file3 = cx.update(|cx| tree.file("a/file3", cx)); + let file4 = cx.update(|cx| tree.file("b/c/file4", cx)); + let file5 = cx.update(|cx| tree.file("b/c/file5", cx)); + let non_existent_file = cx.update(|cx| tree.file("a/file_x", cx)); // After scanning, the worktree knows which files exist and which don't. cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete()) .await; - assert!(!file2.is_deleted()); - assert!(!file3.is_deleted()); - assert!(!file4.is_deleted()); - assert!(!file5.is_deleted()); - assert!(non_existent_file.is_deleted()); + + cx.read(|cx| { + assert!(!file2.read(cx).is_deleted(cx)); + assert!(!file3.read(cx).is_deleted(cx)); + assert!(!file4.read(cx).is_deleted(cx)); + assert!(!file5.read(cx).is_deleted(cx)); + assert!(non_existent_file.read(cx).is_deleted(cx)); + }); tree.flush_fs_events(&cx).await; std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap(); @@ -1936,18 +1687,14 @@ mod tests { ] ); - assert_eq!(file2.path().to_str().unwrap(), "a/file2.new"); - assert_eq!(file4.path().as_ref(), Path::new("d/file4")); - assert_eq!(file5.path().as_ref(), Path::new("d/file5")); - assert!(!file2.is_deleted()); - assert!(!file4.is_deleted()); - assert!(file5.is_deleted()); - - // Right now, this rename isn't detected because the target path - // no longer exists on the file system by the time we process the - // rename event. - assert_eq!(file3.path().as_ref(), Path::new("a/file3")); - assert!(file3.is_deleted()); + assert_eq!(file2.read(cx).path().as_ref(), Path::new("a/file2.new")); + assert_eq!(file3.read(cx).path().as_ref(), Path::new("d/file3")); + assert_eq!(file4.read(cx).path().as_ref(), Path::new("d/file4")); + assert_eq!(file5.read(cx).path().as_ref(), Path::new("b/c/file5")); + assert!(!file2.read(cx).is_deleted(cx)); + assert!(!file3.read(cx).is_deleted(cx)); + assert!(!file4.read(cx).is_deleted(cx)); + assert!(file5.read(cx).is_deleted(cx)); }); } @@ -2158,7 +1905,6 @@ mod tests { ignores: Default::default(), root_name: Default::default(), })), - Arc::new(Mutex::new(Default::default())), notify_tx, 0, ); @@ -2193,7 +1939,6 @@ mod tests { ignores: Default::default(), root_name: Default::default(), })), - Arc::new(Mutex::new(Default::default())), notify_tx, 1, );