From 30e2e2014d904be86de8da3b39e00d6829917660 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Tue, 19 Oct 2021 19:17:52 +0200 Subject: [PATCH 1/7] Extract a TextBuffer from Buffer, which has no tree or file Co-Authored-By: Antonio Scandurra Co-Authored-By: Nathan Sobo --- crates/buffer/src/lib.rs | 2999 +++++++++++++++-------------- crates/buffer/src/tests/buffer.rs | 1079 +++++------ crates/buffer/src/tests/syntax.rs | 13 + crates/editor/src/display_map.rs | 2 +- 4 files changed, 2090 insertions(+), 2003 deletions(-) diff --git a/crates/buffer/src/lib.rs b/crates/buffer/src/lib.rs index 3817b7131a09c58dec463088ecaafd5465393fa1..ad3a3194219039deee82b4945d47e0f60f2dcaff 100644 --- a/crates/buffer/src/lib.rs +++ b/crates/buffer/src/lib.rs @@ -158,17 +158,29 @@ impl Drop for QueryCursorHandle { } } -pub struct Buffer { +#[derive(Clone)] +pub struct TextBuffer { fragments: SumTree, visible_text: Rope, deleted_text: Rope, pub version: clock::Global, - saved_version: clock::Global, - saved_mtime: SystemTime, last_edit: clock::Local, undo_map: UndoMap, history: History, + selections: HashMap, + deferred_ops: OperationQueue, + deferred_replicas: HashSet, + replica_id: ReplicaId, + remote_id: u64, + local_clock: clock::Local, + lamport_clock: clock::Lamport, +} + +pub struct Buffer { + buffer: TextBuffer, file: Option>, + saved_version: clock::Global, + saved_mtime: SystemTime, language: Option>, autoindent_requests: Vec>, pending_autoindent: Option>, @@ -176,13 +188,6 @@ pub struct Buffer { syntax_tree: Mutex>, parsing_in_background: bool, parse_count: usize, - selections: HashMap, - deferred_ops: OperationQueue, - deferred_replicas: HashSet, - replica_id: ReplicaId, - remote_id: u64, - local_clock: clock::Local, - lamport_clock: clock::Lamport, #[cfg(test)] operations: Vec, } @@ -208,10 +213,9 @@ struct AutoindentRequest { } #[derive(Clone, Debug)] -struct Transaction { +pub struct Transaction { start: clock::Global, end: clock::Global, - buffer_was_dirty: bool, edits: Vec, ranges: Vec>, selections_before: HashMap>, @@ -221,6 +225,10 @@ struct Transaction { } impl Transaction { + pub fn starting_selection_set_ids<'a>(&'a self) -> impl Iterator + 'a { + self.selections_before.keys().copied() + } + fn push_edit(&mut self, edit: &EditOperation) { self.edits.push(edit.timestamp.local()); self.end.observe(edit.timestamp.local()); @@ -298,7 +306,6 @@ impl History { fn start_transaction( &mut self, start: clock::Global, - buffer_was_dirty: bool, selections_before: HashMap>, now: Instant, ) { @@ -307,7 +314,6 @@ impl History { self.undo_stack.push(Transaction { start: start.clone(), end: start, - buffer_was_dirty, edits: Vec::new(), ranges: Vec::new(), selections_before, @@ -574,54 +580,16 @@ pub struct UndoOperation { version: clock::Global, } -impl Buffer { - pub fn new>>( - replica_id: ReplicaId, - base_text: T, - cx: &mut ModelContext, - ) -> Self { - Self::build( - replica_id, - History::new(base_text.into()), - None, - cx.model_id() as u64, - None, - cx, - ) - } +impl Deref for Buffer { + type Target = TextBuffer; - pub fn from_history( - replica_id: ReplicaId, - history: History, - file: Option>, - language: Option>, - cx: &mut ModelContext, - ) -> Self { - Self::build( - replica_id, - history, - file, - cx.model_id() as u64, - language, - cx, - ) + fn deref(&self) -> &Self::Target { + &self.buffer } +} - fn build( - replica_id: ReplicaId, - history: History, - file: Option>, - remote_id: u64, - language: Option>, - cx: &mut ModelContext, - ) -> Self { - let saved_mtime; - if let Some(file) = file.as_ref() { - saved_mtime = file.mtime(); - } else { - saved_mtime = UNIX_EPOCH; - } - +impl TextBuffer { + pub fn new(replica_id: u16, remote_id: u64, history: History) -> TextBuffer { let mut fragments = SumTree::new(); let visible_text = Rope::from(history.base_text.as_ref()); @@ -638,24 +606,14 @@ impl Buffer { ); } - let mut result = Self { + TextBuffer { visible_text, deleted_text: Rope::new(), fragments, version: clock::Global::new(), - saved_version: clock::Global::new(), last_edit: clock::Local::default(), undo_map: Default::default(), history, - file, - syntax_tree: Mutex::new(None), - parsing_in_background: false, - parse_count: 0, - sync_parse_timeout: Duration::from_millis(1), - autoindent_requests: Default::default(), - pending_autoindent: Default::default(), - language, - saved_mtime, selections: HashMap::default(), deferred_ops: OperationQueue::new(), deferred_replicas: HashSet::default(), @@ -663,741 +621,637 @@ impl Buffer { remote_id, local_clock: clock::Local::new(replica_id), lamport_clock: clock::Lamport::new(replica_id), + } + } - #[cfg(test)] - operations: Default::default(), - }; - result.reparse(cx); - result + pub fn version(&self) -> clock::Global { + self.version.clone() + } + + fn content<'a>(&'a self) -> Content<'a> { + self.into() + } + + pub fn as_rope(&self) -> &Rope { + &self.visible_text + } + + pub fn text_summary_for_range(&self, range: Range) -> TextSummary { + self.content().text_summary_for_range(range) + } + + pub fn anchor_before(&self, position: T) -> Anchor { + self.anchor_at(position, Bias::Left) + } + + pub fn anchor_after(&self, position: T) -> Anchor { + self.anchor_at(position, Bias::Right) + } + + pub fn anchor_at(&self, position: T, bias: Bias) -> Anchor { + self.content().anchor_at(position, bias) + } + + pub fn point_for_offset(&self, offset: usize) -> Result { + self.content().point_for_offset(offset) + } + + pub fn clip_point(&self, point: Point, bias: Bias) -> Point { + self.visible_text.clip_point(point, bias) + } + + pub fn clip_offset(&self, offset: usize, bias: Bias) -> usize { + self.visible_text.clip_offset(offset, bias) } pub fn replica_id(&self) -> ReplicaId { self.local_clock.replica_id } - pub fn snapshot(&self) -> Snapshot { - Snapshot { - visible_text: self.visible_text.clone(), - fragments: self.fragments.clone(), - version: self.version.clone(), - tree: self.syntax_tree(), - is_parsing: self.parsing_in_background, - language: self.language.clone(), - query_cursor: QueryCursorHandle::new(), - } + pub fn remote_id(&self) -> u64 { + self.remote_id } - pub fn from_proto( - replica_id: ReplicaId, - message: proto::Buffer, - file: Option>, - language: Option>, - cx: &mut ModelContext, - ) -> Result { - let mut buffer = Buffer::build( - replica_id, - History::new(message.content.into()), - file, - message.id, - language, - cx, - ); - let ops = message - .history - .into_iter() - .map(|op| Operation::Edit(op.into())); - buffer.apply_ops(ops, cx)?; - buffer.selections = message - .selections - .into_iter() - .map(|set| { - let set_id = clock::Lamport { - replica_id: set.replica_id as ReplicaId, - value: set.local_timestamp, - }; - let selections: Vec = set - .selections - .into_iter() - .map(TryFrom::try_from) - .collect::>()?; - let set = SelectionSet { - selections: Arc::from(selections), - active: set.is_active, - }; - Result::<_, anyhow::Error>::Ok((set_id, set)) - }) - .collect::>()?; - Ok(buffer) + pub fn text_summary(&self) -> TextSummary { + self.visible_text.summary() } - pub fn to_proto(&self, cx: &mut ModelContext) -> proto::Buffer { - let ops = self.history.ops.values().map(Into::into).collect(); - proto::Buffer { - id: cx.model_id() as u64, - content: self.history.base_text.to_string(), - history: ops, - selections: self - .selections - .iter() - .map(|(set_id, set)| proto::SelectionSetSnapshot { - replica_id: set_id.replica_id as u32, - local_timestamp: set_id.value, - selections: set.selections.iter().map(Into::into).collect(), - is_active: set.active, - }) - .collect(), - } + pub fn len(&self) -> usize { + self.content().len() } - pub fn file(&self) -> Option<&dyn File> { - self.file.as_deref() + pub fn line_len(&self, row: u32) -> u32 { + self.content().line_len(row) } - pub fn file_mut(&mut self) -> Option<&mut dyn File> { - self.file.as_mut().map(|f| f.deref_mut() as &mut dyn File) + pub fn max_point(&self) -> Point { + self.visible_text.max_point() } - pub fn save( - &mut self, - cx: &mut ModelContext, - ) -> Result>> { - let file = self - .file - .as_ref() - .ok_or_else(|| anyhow!("buffer has no file"))?; - let text = self.visible_text.clone(); - let version = self.version.clone(); - let save = file.save(self.remote_id, text, version, cx.as_mut()); - Ok(cx.spawn(|this, mut cx| async move { - let (version, mtime) = save.await?; - this.update(&mut cx, |this, cx| { - this.did_save(version.clone(), mtime, None, cx); - }); - Ok((version, mtime)) - })) + pub fn row_count(&self) -> u32 { + self.max_point().row + 1 } - pub fn as_rope(&self) -> &Rope { - &self.visible_text + pub fn text(&self) -> String { + self.text_for_range(0..self.len()).collect() } - pub fn set_language(&mut self, language: Option>, cx: &mut ModelContext) { - self.language = language; - self.reparse(cx); + pub fn text_for_range<'a, T: ToOffset>(&'a self, range: Range) -> Chunks<'a> { + self.content().text_for_range(range) } - pub fn did_save( - &mut self, - version: clock::Global, - mtime: SystemTime, - new_file: Option>, - cx: &mut ModelContext, - ) { - self.saved_mtime = mtime; - self.saved_version = version; - if let Some(new_file) = new_file { - self.file = Some(new_file); - } - cx.emit(Event::Saved); + pub fn chars(&self) -> impl Iterator + '_ { + self.chars_at(0) } - pub fn file_updated( - &mut self, - path: Arc, - mtime: SystemTime, - new_text: Option, - cx: &mut ModelContext, - ) { - let file = self.file.as_mut().unwrap(); - let mut changed = false; - if path != *file.path() { - file.set_path(path); - changed = true; - } - - if mtime != file.mtime() { - file.set_mtime(mtime); - changed = true; - if let Some(new_text) = new_text { - if self.version == self.saved_version { - cx.spawn(|this, mut cx| async move { - let diff = this - .read_with(&cx, |this, cx| this.diff(new_text.into(), cx)) - .await; - this.update(&mut cx, |this, cx| { - if this.apply_diff(diff, cx) { - this.saved_version = this.version.clone(); - this.saved_mtime = mtime; - cx.emit(Event::Reloaded); - } - }); - }) - .detach(); - } - } - } + pub fn chars_at<'a, T: 'a + ToOffset>( + &'a self, + position: T, + ) -> impl Iterator + 'a { + self.content().chars_at(position) + } - if changed { - cx.emit(Event::FileHandleChanged); - } + pub fn reversed_chars_at<'a, T: 'a + ToOffset>( + &'a self, + position: T, + ) -> impl Iterator + 'a { + self.content().reversed_chars_at(position) } - pub fn file_deleted(&mut self, cx: &mut ModelContext) { - if self.version == self.saved_version { - cx.emit(Event::Dirtied); - } - cx.emit(Event::FileHandleChanged); + pub fn chars_for_range(&self, range: Range) -> impl Iterator + '_ { + self.text_for_range(range).flat_map(str::chars) } - pub fn close(&mut self, cx: &mut ModelContext) { - cx.emit(Event::Closed); + pub fn bytes_at(&self, position: T) -> impl Iterator + '_ { + let offset = position.to_offset(self); + self.visible_text.bytes_at(offset) } - pub fn language(&self) -> Option<&Arc> { - self.language.as_ref() + pub fn contains_str_at(&self, position: T, needle: &str) -> bool + where + T: ToOffset, + { + let position = position.to_offset(self); + position == self.clip_offset(position, Bias::Left) + && self + .bytes_at(position) + .take(needle.len()) + .eq(needle.bytes()) } - pub fn parse_count(&self) -> usize { - self.parse_count + pub fn deferred_ops_len(&self) -> usize { + self.deferred_ops.len() } - fn syntax_tree(&self) -> Option { - if let Some(syntax_tree) = self.syntax_tree.lock().as_mut() { - self.interpolate_tree(syntax_tree); - Some(syntax_tree.tree.clone()) + pub fn edit(&mut self, ranges: R, new_text: T) -> EditOperation + where + R: IntoIterator, + I: ExactSizeIterator>, + S: ToOffset, + T: Into, + { + let new_text = new_text.into(); + let new_text_len = new_text.len(); + let new_text = if new_text_len > 0 { + Some(new_text) } else { None - } - } + }; - #[cfg(any(test, feature = "test-support"))] - pub fn is_parsing(&self) -> bool { - self.parsing_in_background - } + self.start_transaction(None).unwrap(); + let timestamp = InsertionTimestamp { + replica_id: self.replica_id, + local: self.local_clock.tick().value, + lamport: self.lamport_clock.tick().value, + }; + let edit = self.apply_local_edit(ranges.into_iter(), new_text, timestamp); - #[cfg(test)] - pub fn set_sync_parse_timeout(&mut self, timeout: Duration) { - self.sync_parse_timeout = timeout; + self.history.push(edit.clone()); + self.history.push_undo(edit.timestamp.local()); + self.last_edit = edit.timestamp.local(); + self.version.observe(edit.timestamp.local()); + self.end_transaction(None); + edit } - fn reparse(&mut self, cx: &mut ModelContext) -> bool { - if self.parsing_in_background { - return false; - } + fn apply_local_edit( + &mut self, + ranges: impl ExactSizeIterator>, + new_text: Option, + timestamp: InsertionTimestamp, + ) -> EditOperation { + let mut edit = EditOperation { + timestamp, + version: self.version(), + ranges: Vec::with_capacity(ranges.len()), + new_text: None, + }; - if let Some(language) = self.language.clone() { - let old_tree = self.syntax_tree(); - let text = self.visible_text.clone(); - let parsed_version = self.version(); - let parse_task = cx.background().spawn({ - let language = language.clone(); - async move { Self::parse_text(&text, old_tree, &language) } - }); + let mut ranges = ranges + .map(|range| range.start.to_offset(&*self)..range.end.to_offset(&*self)) + .peekable(); - match cx - .background() - .block_with_timeout(self.sync_parse_timeout, parse_task) - { - Ok(new_tree) => { - self.did_finish_parsing(new_tree, parsed_version, cx); - return true; - } - Err(parse_task) => { - self.parsing_in_background = true; - cx.spawn(move |this, mut cx| async move { - let new_tree = parse_task.await; - this.update(&mut cx, move |this, cx| { - let language_changed = - this.language.as_ref().map_or(true, |curr_language| { - !Arc::ptr_eq(curr_language, &language) - }); - let parse_again = this.version > parsed_version || language_changed; - this.parsing_in_background = false; - this.did_finish_parsing(new_tree, parsed_version, cx); + let mut new_ropes = + RopeBuilder::new(self.visible_text.cursor(0), self.deleted_text.cursor(0)); + let mut old_fragments = self.fragments.cursor::(); + let mut new_fragments = + old_fragments.slice(&ranges.peek().unwrap().start, Bias::Right, &None); + new_ropes.push_tree(new_fragments.summary().text); - if parse_again && this.reparse(cx) { - return; - } - }); - }) - .detach(); + let mut fragment_start = old_fragments.start().visible; + for range in ranges { + let fragment_end = old_fragments.end(&None).visible; + + // If the current fragment ends before this range, then jump ahead to the first fragment + // that extends past the start of this range, reusing any intervening fragments. + if fragment_end < range.start { + // If the current fragment has been partially consumed, then consume the rest of it + // and advance to the next fragment before slicing. + if fragment_start > old_fragments.start().visible { + if fragment_end > fragment_start { + let mut suffix = old_fragments.item().unwrap().clone(); + suffix.len = fragment_end - fragment_start; + new_ropes.push_fragment(&suffix, suffix.visible); + new_fragments.push(suffix, &None); + } + old_fragments.next(&None); } - } - } - false - } - fn parse_text(text: &Rope, old_tree: Option, language: &Language) -> Tree { - PARSER.with(|parser| { - let mut parser = parser.borrow_mut(); - parser - .set_language(language.grammar) - .expect("incompatible grammar"); - let mut chunks = text.chunks_in_range(0..text.len()); - let tree = parser - .parse_with( - &mut move |offset, _| { - chunks.seek(offset); - chunks.next().unwrap_or("").as_bytes() - }, - old_tree.as_ref(), - ) - .unwrap(); - tree - }) - } + let slice = old_fragments.slice(&range.start, Bias::Right, &None); + new_ropes.push_tree(slice.summary().text); + new_fragments.push_tree(slice, &None); + fragment_start = old_fragments.start().visible; + } - fn interpolate_tree(&self, tree: &mut SyntaxTree) { - let mut delta = 0_isize; - for edit in self.edits_since(tree.version.clone()) { - let start_offset = (edit.old_bytes.start as isize + delta) as usize; - let start_point = self.visible_text.to_point(start_offset); - tree.tree.edit(&InputEdit { - start_byte: start_offset, - old_end_byte: start_offset + edit.deleted_bytes(), - new_end_byte: start_offset + edit.inserted_bytes(), - start_position: start_point.into(), - old_end_position: (start_point + edit.deleted_lines()).into(), - new_end_position: self - .visible_text - .to_point(start_offset + edit.inserted_bytes()) - .into(), - }); - delta += edit.inserted_bytes() as isize - edit.deleted_bytes() as isize; - } - tree.version = self.version(); - } + let full_range_start = range.start + old_fragments.start().deleted; - fn did_finish_parsing( - &mut self, - tree: Tree, - version: clock::Global, - cx: &mut ModelContext, - ) { - self.parse_count += 1; - *self.syntax_tree.lock() = Some(SyntaxTree { tree, version }); - self.request_autoindent(cx); - cx.emit(Event::Reparsed); - cx.notify(); - } + // Preserve any portion of the current fragment that precedes this range. + if fragment_start < range.start { + let mut prefix = old_fragments.item().unwrap().clone(); + prefix.len = range.start - fragment_start; + new_ropes.push_fragment(&prefix, prefix.visible); + new_fragments.push(prefix, &None); + fragment_start = range.start; + } - fn request_autoindent(&mut self, cx: &mut ModelContext) { - if let Some(indent_columns) = self.compute_autoindents() { - let indent_columns = cx.background().spawn(indent_columns); - match cx - .background() - .block_with_timeout(Duration::from_micros(500), indent_columns) - { - Ok(indent_columns) => self.apply_autoindents(indent_columns, cx), - Err(indent_columns) => { - self.pending_autoindent = Some(cx.spawn(|this, mut cx| async move { - let indent_columns = indent_columns.await; - this.update(&mut cx, |this, cx| { - this.apply_autoindents(indent_columns, cx); - }); - })); + // Insert the new text before any existing fragments within the range. + if let Some(new_text) = new_text.as_deref() { + new_ropes.push_str(new_text); + new_fragments.push( + Fragment { + timestamp, + len: new_text.len(), + deletions: Default::default(), + max_undos: Default::default(), + visible: true, + }, + &None, + ); + } + + // Advance through every fragment that intersects this range, marking the intersecting + // portions as deleted. + while fragment_start < range.end { + let fragment = old_fragments.item().unwrap(); + let fragment_end = old_fragments.end(&None).visible; + let mut intersection = fragment.clone(); + let intersection_end = cmp::min(range.end, fragment_end); + if fragment.visible { + intersection.len = intersection_end - fragment_start; + intersection.deletions.insert(timestamp.local()); + intersection.visible = false; + } + if intersection.len > 0 { + new_ropes.push_fragment(&intersection, fragment.visible); + new_fragments.push(intersection, &None); + fragment_start = intersection_end; + } + if fragment_end <= range.end { + old_fragments.next(&None); } } + + let full_range_end = range.end + old_fragments.start().deleted; + edit.ranges.push(full_range_start..full_range_end); } - } - fn compute_autoindents(&self) -> Option>> { - let max_rows_between_yields = 100; - let snapshot = self.snapshot(); - if snapshot.language.is_none() - || snapshot.tree.is_none() - || self.autoindent_requests.is_empty() - { - return None; + // If the current fragment has been partially consumed, then consume the rest of it + // and advance to the next fragment before slicing. + if fragment_start > old_fragments.start().visible { + let fragment_end = old_fragments.end(&None).visible; + if fragment_end > fragment_start { + let mut suffix = old_fragments.item().unwrap().clone(); + suffix.len = fragment_end - fragment_start; + new_ropes.push_fragment(&suffix, suffix.visible); + new_fragments.push(suffix, &None); + } + old_fragments.next(&None); } - let autoindent_requests = self.autoindent_requests.clone(); - Some(async move { - let mut indent_columns = BTreeMap::new(); - for request in autoindent_requests { - let old_to_new_rows = request - .edited - .to_points(&request.before_edit) - .map(|point| point.row) - .zip(request.edited.to_points(&snapshot).map(|point| point.row)) - .collect::>(); + let suffix = old_fragments.suffix(&None); + new_ropes.push_tree(suffix.summary().text); + new_fragments.push_tree(suffix, &None); + let (visible_text, deleted_text) = new_ropes.finish(); + drop(old_fragments); - let mut old_suggestions = HashMap::default(); - let old_edited_ranges = - contiguous_ranges(old_to_new_rows.keys().copied(), max_rows_between_yields); - for old_edited_range in old_edited_ranges { - let suggestions = request - .before_edit - .suggest_autoindents(old_edited_range.clone()) - .into_iter() - .flatten(); - for (old_row, suggestion) in old_edited_range.zip(suggestions) { - let indentation_basis = old_to_new_rows - .get(&suggestion.basis_row) - .and_then(|from_row| old_suggestions.get(from_row).copied()) - .unwrap_or_else(|| { - request - .before_edit - .indent_column_for_line(suggestion.basis_row) - }); - let delta = if suggestion.indent { INDENT_SIZE } else { 0 }; - old_suggestions.insert( - *old_to_new_rows.get(&old_row).unwrap(), - indentation_basis + delta, - ); - } - yield_now().await; - } + self.fragments = new_fragments; + self.visible_text = visible_text; + self.deleted_text = deleted_text; + edit.new_text = new_text; + edit + } - // At this point, old_suggestions contains the suggested indentation for all edited lines with respect to the state of the - // buffer before the edit, but keyed by the row for these lines after the edits were applied. - let new_edited_row_ranges = - contiguous_ranges(old_to_new_rows.values().copied(), max_rows_between_yields); - for new_edited_row_range in new_edited_row_ranges { - let suggestions = snapshot - .suggest_autoindents(new_edited_row_range.clone()) - .into_iter() - .flatten(); - for (new_row, suggestion) in new_edited_row_range.zip(suggestions) { - let delta = if suggestion.indent { INDENT_SIZE } else { 0 }; - let new_indentation = indent_columns - .get(&suggestion.basis_row) - .copied() - .unwrap_or_else(|| { - snapshot.indent_column_for_line(suggestion.basis_row) - }) - + delta; - if old_suggestions - .get(&new_row) - .map_or(true, |old_indentation| new_indentation != *old_indentation) - { - indent_columns.insert(new_row, new_indentation); - } - } - yield_now().await; - } + pub fn apply_ops>(&mut self, ops: I) -> Result<()> { + let mut deferred_ops = Vec::new(); + for op in ops { + if self.can_apply_op(&op) { + self.apply_op(op)?; + } else { + self.deferred_replicas.insert(op.replica_id()); + deferred_ops.push(op); + } + } + self.deferred_ops.insert(deferred_ops); + self.flush_deferred_ops()?; + Ok(()) + } - if let Some(inserted) = request.inserted.as_ref() { - let inserted_row_ranges = contiguous_ranges( - inserted - .to_point_ranges(&snapshot) - .flat_map(|range| range.start.row..range.end.row + 1), - max_rows_between_yields, + fn apply_op(&mut self, op: Operation) -> Result<()> { + match op { + Operation::Edit(edit) => { + if !self.version.observed(edit.timestamp.local()) { + self.apply_remote_edit( + &edit.version, + &edit.ranges, + edit.new_text.as_deref(), + edit.timestamp, ); - for inserted_row_range in inserted_row_ranges { - let suggestions = snapshot - .suggest_autoindents(inserted_row_range.clone()) - .into_iter() - .flatten(); - for (row, suggestion) in inserted_row_range.zip(suggestions) { - let delta = if suggestion.indent { INDENT_SIZE } else { 0 }; - let new_indentation = indent_columns - .get(&suggestion.basis_row) - .copied() - .unwrap_or_else(|| { - snapshot.indent_column_for_line(suggestion.basis_row) - }) - + delta; - indent_columns.insert(row, new_indentation); + self.version.observe(edit.timestamp.local()); + self.history.push(edit); + } + } + Operation::Undo { + undo, + lamport_timestamp, + } => { + if !self.version.observed(undo.id) { + self.apply_undo(&undo)?; + self.version.observe(undo.id); + self.lamport_clock.observe(lamport_timestamp); + } + } + Operation::UpdateSelections { + set_id, + selections, + lamport_timestamp, + } => { + if let Some(selections) = selections { + if let Some(set) = self.selections.get_mut(&set_id) { + set.selections = selections; + } else { + self.selections.insert( + set_id, + SelectionSet { + selections, + active: false, + }, + ); + } + } else { + self.selections.remove(&set_id); + } + self.lamport_clock.observe(lamport_timestamp); + } + Operation::SetActiveSelections { + set_id, + lamport_timestamp, + } => { + for (id, set) in &mut self.selections { + if id.replica_id == lamport_timestamp.replica_id { + if Some(*id) == set_id { + set.active = true; + } else { + set.active = false; } - yield_now().await; } } + self.lamport_clock.observe(lamport_timestamp); } - indent_columns - }) + #[cfg(test)] + Operation::Test(_) => {} + } + Ok(()) } - fn apply_autoindents( + fn apply_remote_edit( &mut self, - indent_columns: BTreeMap, - cx: &mut ModelContext, + version: &clock::Global, + ranges: &[Range], + new_text: Option<&str>, + timestamp: InsertionTimestamp, ) { - let selection_set_ids = self - .autoindent_requests - .drain(..) - .flat_map(|req| req.selection_set_ids.clone()) - .collect::>(); - - self.start_transaction(selection_set_ids.iter().copied()) - .unwrap(); - for (row, indent_column) in &indent_columns { - self.set_indent_column_for_line(*row, *indent_column, cx); + if ranges.is_empty() { + return; } - for selection_set_id in &selection_set_ids { - if let Some(set) = self.selections.get(selection_set_id) { - let new_selections = set - .selections - .iter() - .map(|selection| { - let start_point = selection.start.to_point(&*self); - if start_point.column == 0 { - let end_point = selection.end.to_point(&*self); - let delta = Point::new( - 0, - indent_columns.get(&start_point.row).copied().unwrap_or(0), - ); - if delta.column > 0 { - return Selection { - id: selection.id, - goal: selection.goal, - reversed: selection.reversed, - start: self - .anchor_at(start_point + delta, selection.start.bias), - end: self.anchor_at(end_point + delta, selection.end.bias), - }; - } - } - selection.clone() - }) - .collect::>(); - self.update_selection_set(*selection_set_id, new_selections, cx) - .unwrap(); - } - } - - self.end_transaction(selection_set_ids.iter().copied(), cx) - .unwrap(); - } + let cx = Some(version.clone()); + let mut new_ropes = + RopeBuilder::new(self.visible_text.cursor(0), self.deleted_text.cursor(0)); + let mut old_fragments = self.fragments.cursor::(); + let mut new_fragments = + old_fragments.slice(&VersionedOffset::Offset(ranges[0].start), Bias::Left, &cx); + new_ropes.push_tree(new_fragments.summary().text); - pub fn indent_column_for_line(&self, row: u32) -> u32 { - self.content().indent_column_for_line(row) - } + let mut fragment_start = old_fragments.start().offset(); + for range in ranges { + let fragment_end = old_fragments.end(&cx).offset(); - fn set_indent_column_for_line(&mut self, row: u32, column: u32, cx: &mut ModelContext) { - let current_column = self.indent_column_for_line(row); - if column > current_column { - let offset = self.visible_text.to_offset(Point::new(row, 0)); - self.edit( - [offset..offset], - " ".repeat((column - current_column) as usize), - cx, - ); - } else if column < current_column { - self.edit( - [Point::new(row, 0)..Point::new(row, current_column - column)], - "", - cx, - ); - } - } + // If the current fragment ends before this range, then jump ahead to the first fragment + // that extends past the start of this range, reusing any intervening fragments. + if fragment_end < range.start { + // If the current fragment has been partially consumed, then consume the rest of it + // and advance to the next fragment before slicing. + if fragment_start > old_fragments.start().offset() { + if fragment_end > fragment_start { + let mut suffix = old_fragments.item().unwrap().clone(); + suffix.len = fragment_end - fragment_start; + new_ropes.push_fragment(&suffix, suffix.visible); + new_fragments.push(suffix, &None); + } + old_fragments.next(&cx); + } - pub fn range_for_syntax_ancestor(&self, range: Range) -> Option> { - if let Some(tree) = self.syntax_tree() { - let root = tree.root_node(); - let range = range.start.to_offset(self)..range.end.to_offset(self); - let mut node = root.descendant_for_byte_range(range.start, range.end); - while node.map_or(false, |n| n.byte_range() == range) { - node = node.unwrap().parent(); + let slice = + old_fragments.slice(&VersionedOffset::Offset(range.start), Bias::Left, &cx); + new_ropes.push_tree(slice.summary().text); + new_fragments.push_tree(slice, &None); + fragment_start = old_fragments.start().offset(); } - node.map(|n| n.byte_range()) - } else { - None - } - } - - pub fn enclosing_bracket_ranges( - &self, - range: Range, - ) -> Option<(Range, Range)> { - let (lang, tree) = self.language.as_ref().zip(self.syntax_tree())?; - let open_capture_ix = lang.brackets_query.capture_index_for_name("open")?; - let close_capture_ix = lang.brackets_query.capture_index_for_name("close")?; - // Find bracket pairs that *inclusively* contain the given range. - let range = range.start.to_offset(self).saturating_sub(1)..range.end.to_offset(self) + 1; - let mut cursor = QueryCursorHandle::new(); - let matches = cursor.set_byte_range(range).matches( - &lang.brackets_query, - tree.root_node(), - TextProvider(&self.visible_text), - ); + // If we are at the end of a non-concurrent fragment, advance to the next one. + let fragment_end = old_fragments.end(&cx).offset(); + if fragment_end == range.start && fragment_end > fragment_start { + let mut fragment = old_fragments.item().unwrap().clone(); + fragment.len = fragment_end - fragment_start; + new_ropes.push_fragment(&fragment, fragment.visible); + new_fragments.push(fragment, &None); + old_fragments.next(&cx); + fragment_start = old_fragments.start().offset(); + } - // Get the ranges of the innermost pair of brackets. - matches - .filter_map(|mat| { - let open = mat.nodes_for_capture_index(open_capture_ix).next()?; - let close = mat.nodes_for_capture_index(close_capture_ix).next()?; - Some((open.byte_range(), close.byte_range())) - }) - .min_by_key(|(open_range, close_range)| close_range.end - open_range.start) - } + // Skip over insertions that are concurrent to this edit, but have a lower lamport + // timestamp. + while let Some(fragment) = old_fragments.item() { + if fragment_start == range.start + && fragment.timestamp.lamport() > timestamp.lamport() + { + new_ropes.push_fragment(fragment, fragment.visible); + new_fragments.push(fragment.clone(), &None); + old_fragments.next(&cx); + debug_assert_eq!(fragment_start, range.start); + } else { + break; + } + } + debug_assert!(fragment_start <= range.start); - fn diff(&self, new_text: Arc, cx: &AppContext) -> Task { - // TODO: it would be nice to not allocate here. - let old_text = self.text(); - let base_version = self.version(); - cx.background().spawn(async move { - let changes = TextDiff::from_lines(old_text.as_str(), new_text.as_ref()) - .iter_all_changes() - .map(|c| (c.tag(), c.value().len())) - .collect::>(); - Diff { - base_version, - new_text, - changes, + // Preserve any portion of the current fragment that precedes this range. + if fragment_start < range.start { + let mut prefix = old_fragments.item().unwrap().clone(); + prefix.len = range.start - fragment_start; + fragment_start = range.start; + new_ropes.push_fragment(&prefix, prefix.visible); + new_fragments.push(prefix, &None); } - }) - } - pub fn set_text_from_disk(&self, new_text: Arc, cx: &mut ModelContext) -> Task<()> { - cx.spawn(|this, mut cx| async move { - let diff = this - .read_with(&cx, |this, cx| this.diff(new_text, cx)) - .await; + // Insert the new text before any existing fragments within the range. + if let Some(new_text) = new_text { + new_ropes.push_str(new_text); + new_fragments.push( + Fragment { + timestamp, + len: new_text.len(), + deletions: Default::default(), + max_undos: Default::default(), + visible: true, + }, + &None, + ); + } - this.update(&mut cx, |this, cx| { - if this.apply_diff(diff, cx) { - this.saved_version = this.version.clone(); + // Advance through every fragment that intersects this range, marking the intersecting + // portions as deleted. + while fragment_start < range.end { + let fragment = old_fragments.item().unwrap(); + let fragment_end = old_fragments.end(&cx).offset(); + let mut intersection = fragment.clone(); + let intersection_end = cmp::min(range.end, fragment_end); + if fragment.was_visible(version, &self.undo_map) { + intersection.len = intersection_end - fragment_start; + intersection.deletions.insert(timestamp.local()); + intersection.visible = false; } - }); - }) - } - - fn apply_diff(&mut self, diff: Diff, cx: &mut ModelContext) -> bool { - if self.version == diff.base_version { - self.start_transaction(None).unwrap(); - let mut offset = 0; - for (tag, len) in diff.changes { - let range = offset..(offset + len); - match tag { - ChangeTag::Equal => offset += len, - ChangeTag::Delete => self.edit(Some(range), "", cx), - ChangeTag::Insert => { - self.edit(Some(offset..offset), &diff.new_text[range], cx); - offset += len; - } + if intersection.len > 0 { + new_ropes.push_fragment(&intersection, fragment.visible); + new_fragments.push(intersection, &None); + fragment_start = intersection_end; + } + if fragment_end <= range.end { + old_fragments.next(&cx); } } - self.end_transaction(None, cx).unwrap(); - true - } else { - false } - } - pub fn is_dirty(&self) -> bool { - self.version > self.saved_version - || self.file.as_ref().map_or(false, |file| file.is_deleted()) - } + // If the current fragment has been partially consumed, then consume the rest of it + // and advance to the next fragment before slicing. + if fragment_start > old_fragments.start().offset() { + let fragment_end = old_fragments.end(&cx).offset(); + if fragment_end > fragment_start { + let mut suffix = old_fragments.item().unwrap().clone(); + suffix.len = fragment_end - fragment_start; + new_ropes.push_fragment(&suffix, suffix.visible); + new_fragments.push(suffix, &None); + } + old_fragments.next(&cx); + } - pub fn has_conflict(&self) -> bool { - self.version > self.saved_version - && self - .file - .as_ref() - .map_or(false, |file| file.mtime() > self.saved_mtime) - } + let suffix = old_fragments.suffix(&cx); + new_ropes.push_tree(suffix.summary().text); + new_fragments.push_tree(suffix, &None); + let (visible_text, deleted_text) = new_ropes.finish(); + drop(old_fragments); - pub fn remote_id(&self) -> u64 { - self.remote_id - } - - pub fn version(&self) -> clock::Global { - self.version.clone() - } - - pub fn text_summary(&self) -> TextSummary { - self.visible_text.summary() - } - - pub fn len(&self) -> usize { - self.content().len() + self.fragments = new_fragments; + self.visible_text = visible_text; + self.deleted_text = deleted_text; + self.local_clock.observe(timestamp.local()); + self.lamport_clock.observe(timestamp.lamport()); } - pub fn line_len(&self, row: u32) -> u32 { - self.content().line_len(row) - } + fn apply_undo(&mut self, undo: &UndoOperation) -> Result<()> { + self.undo_map.insert(undo); - pub fn max_point(&self) -> Point { - self.visible_text.max_point() - } + let mut cx = undo.version.clone(); + for edit_id in undo.counts.keys().copied() { + cx.observe(edit_id); + } + let cx = Some(cx); - pub fn row_count(&self) -> u32 { - self.max_point().row + 1 - } + let mut old_fragments = self.fragments.cursor::(); + let mut new_fragments = old_fragments.slice( + &VersionedOffset::Offset(undo.ranges[0].start), + Bias::Right, + &cx, + ); + let mut new_ropes = + RopeBuilder::new(self.visible_text.cursor(0), self.deleted_text.cursor(0)); + new_ropes.push_tree(new_fragments.summary().text); - pub fn text(&self) -> String { - self.text_for_range(0..self.len()).collect() - } + for range in &undo.ranges { + let mut end_offset = old_fragments.end(&cx).offset(); - pub fn text_for_range<'a, T: ToOffset>(&'a self, range: Range) -> Chunks<'a> { - self.content().text_for_range(range) - } + if end_offset < range.start { + let preceding_fragments = + old_fragments.slice(&VersionedOffset::Offset(range.start), Bias::Right, &cx); + new_ropes.push_tree(preceding_fragments.summary().text); + new_fragments.push_tree(preceding_fragments, &None); + } - pub fn chars(&self) -> impl Iterator + '_ { - self.chars_at(0) - } + while end_offset <= range.end { + if let Some(fragment) = old_fragments.item() { + let mut fragment = fragment.clone(); + let fragment_was_visible = fragment.visible; - pub fn chars_at<'a, T: 'a + ToOffset>( - &'a self, - position: T, - ) -> impl Iterator + 'a { - self.content().chars_at(position) - } + if fragment.was_visible(&undo.version, &self.undo_map) + || undo.counts.contains_key(&fragment.timestamp.local()) + { + fragment.visible = fragment.is_visible(&self.undo_map); + fragment.max_undos.observe(undo.id); + } + new_ropes.push_fragment(&fragment, fragment_was_visible); + new_fragments.push(fragment, &None); - pub fn reversed_chars_at<'a, T: 'a + ToOffset>( - &'a self, - position: T, - ) -> impl Iterator + 'a { - self.content().reversed_chars_at(position) - } + old_fragments.next(&cx); + if end_offset == old_fragments.end(&cx).offset() { + let unseen_fragments = old_fragments.slice( + &VersionedOffset::Offset(end_offset), + Bias::Right, + &cx, + ); + new_ropes.push_tree(unseen_fragments.summary().text); + new_fragments.push_tree(unseen_fragments, &None); + } + end_offset = old_fragments.end(&cx).offset(); + } else { + break; + } + } + } - pub fn chars_for_range(&self, range: Range) -> impl Iterator + '_ { - self.text_for_range(range).flat_map(str::chars) - } + let suffix = old_fragments.suffix(&cx); + new_ropes.push_tree(suffix.summary().text); + new_fragments.push_tree(suffix, &None); - pub fn bytes_at(&self, position: T) -> impl Iterator + '_ { - let offset = position.to_offset(self); - self.visible_text.bytes_at(offset) + drop(old_fragments); + let (visible_text, deleted_text) = new_ropes.finish(); + self.fragments = new_fragments; + self.visible_text = visible_text; + self.deleted_text = deleted_text; + Ok(()) } - pub fn contains_str_at(&self, position: T, needle: &str) -> bool - where - T: ToOffset, - { - let position = position.to_offset(self); - position == self.clip_offset(position, Bias::Left) - && self - .bytes_at(position) - .take(needle.len()) - .eq(needle.bytes()) + fn flush_deferred_ops(&mut self) -> Result<()> { + self.deferred_replicas.clear(); + let mut deferred_ops = Vec::new(); + for op in self.deferred_ops.drain().cursor().cloned() { + if self.can_apply_op(&op) { + self.apply_op(op)?; + } else { + self.deferred_replicas.insert(op.replica_id()); + deferred_ops.push(op); + } + } + self.deferred_ops.insert(deferred_ops); + Ok(()) } - pub fn edits_since<'a>(&'a self, since: clock::Global) -> impl 'a + Iterator { - let since_2 = since.clone(); - let cursor = if since == self.version { - None + fn can_apply_op(&self, op: &Operation) -> bool { + if self.deferred_replicas.contains(&op.replica_id()) { + false } else { - Some(self.fragments.filter( - move |summary| summary.max_version.changed_since(&since_2), - &None, - )) - }; - - Edits { - visible_text: &self.visible_text, - deleted_text: &self.deleted_text, - cursor, - undos: &self.undo_map, - since, - old_offset: 0, - new_offset: 0, - old_point: Point::zero(), - new_point: Point::zero(), + match op { + Operation::Edit(edit) => self.version >= edit.version, + Operation::Undo { undo, .. } => self.version >= undo.version, + Operation::UpdateSelections { selections, .. } => { + if let Some(selections) = selections { + selections.iter().all(|selection| { + let contains_start = self.version >= selection.start.version; + let contains_end = self.version >= selection.end.version; + contains_start && contains_end + }) + } else { + true + } + } + Operation::SetActiveSelections { set_id, .. } => { + set_id.map_or(true, |set_id| self.selections.contains_key(&set_id)) + } + #[cfg(test)] + Operation::Test(_) => true, + } } } - pub fn deferred_ops_len(&self) -> usize { - self.deferred_ops.len() + pub fn peek_undo_stack(&self) -> Option<&Transaction> { + self.history.undo_stack.last() } pub fn start_transaction( &mut self, selection_set_ids: impl IntoIterator, ) -> Result<()> { - self.start_transaction_at(selection_set_ids, Instant::now()) + self.start_transaction_at(selection_set_ids, Instant::now())?; + Ok(()) } fn start_transaction_at( @@ -1416,24 +1270,19 @@ impl Buffer { }) .collect(); self.history - .start_transaction(self.version.clone(), self.is_dirty(), selections, now); + .start_transaction(self.version.clone(), selections, now); Ok(()) } - pub fn end_transaction( - &mut self, - selection_set_ids: impl IntoIterator, - cx: &mut ModelContext, - ) -> Result<()> { - self.end_transaction_at(selection_set_ids, Instant::now(), cx) + fn end_transaction(&mut self, selection_set_ids: impl IntoIterator) { + self.end_transaction_at(selection_set_ids, Instant::now()); } fn end_transaction_at( &mut self, selection_set_ids: impl IntoIterator, now: Instant, - cx: &mut ModelContext, - ) -> Result<()> { + ) -> Option { let selections = selection_set_ids .into_iter() .map(|set_id| { @@ -1447,209 +1296,112 @@ impl Buffer { if let Some(transaction) = self.history.end_transaction(selections, now) { let since = transaction.start.clone(); - let was_dirty = transaction.buffer_was_dirty; self.history.group(); + Some(since) + } else { + None + } + } - cx.notify(); - if self.edits_since(since).next().is_some() { - self.did_edit(was_dirty, cx); - self.reparse(cx); + fn remove_peer(&mut self, replica_id: ReplicaId) { + self.selections + .retain(|set_id, _| set_id.replica_id != replica_id) + } + + fn undo(&mut self) -> Vec { + let mut ops = Vec::new(); + if let Some(transaction) = self.history.pop_undo().cloned() { + let selections = transaction.selections_before.clone(); + ops.push(self.undo_or_redo(transaction).unwrap()); + for (set_id, selections) in selections { + ops.extend(self.update_selection_set(set_id, selections)); } } + ops + } - Ok(()) + fn redo(&mut self) -> Vec { + let mut ops = Vec::new(); + if let Some(transaction) = self.history.pop_redo().cloned() { + let selections = transaction.selections_after.clone(); + ops.push(self.undo_or_redo(transaction).unwrap()); + for (set_id, selections) in selections { + ops.extend(self.update_selection_set(set_id, selections)); + } + } + ops } - pub fn edit(&mut self, ranges_iter: I, new_text: T, cx: &mut ModelContext) - where - I: IntoIterator>, - S: ToOffset, - T: Into, - { - self.edit_internal(ranges_iter, new_text, false, cx) + fn undo_or_redo(&mut self, transaction: Transaction) -> Result { + let mut counts = HashMap::default(); + for edit_id in transaction.edits { + counts.insert(edit_id, self.undo_map.undo_count(edit_id) + 1); + } + + let undo = UndoOperation { + id: self.local_clock.tick(), + counts, + ranges: transaction.ranges, + version: transaction.start.clone(), + }; + self.apply_undo(&undo)?; + self.version.observe(undo.id); + + Ok(Operation::Undo { + undo, + lamport_timestamp: self.lamport_clock.tick(), + }) } - pub fn edit_with_autoindent( - &mut self, - ranges_iter: I, - new_text: T, - cx: &mut ModelContext, - ) where - I: IntoIterator>, - S: ToOffset, - T: Into, - { - self.edit_internal(ranges_iter, new_text, true, cx) + pub fn selection_set(&self, set_id: SelectionSetId) -> Result<&SelectionSet> { + self.selections + .get(&set_id) + .ok_or_else(|| anyhow!("invalid selection set id {:?}", set_id)) } - pub fn edit_internal( - &mut self, - ranges_iter: I, - new_text: T, - autoindent: bool, - cx: &mut ModelContext, - ) where - I: IntoIterator>, - S: ToOffset, - T: Into, - { - let new_text = new_text.into(); - - // Skip invalid ranges and coalesce contiguous ones. - let mut ranges: Vec> = Vec::new(); - for range in ranges_iter { - let range = range.start.to_offset(&*self)..range.end.to_offset(&*self); - if !new_text.is_empty() || !range.is_empty() { - if let Some(prev_range) = ranges.last_mut() { - if prev_range.end >= range.start { - prev_range.end = cmp::max(prev_range.end, range.end); - } else { - ranges.push(range); - } - } else { - ranges.push(range); - } - } - } - if ranges.is_empty() { - return; - } - - self.pending_autoindent.take(); - let autoindent_request = if autoindent && self.language.is_some() { - let before_edit = self.snapshot(); - let edited = self.content().anchor_set(ranges.iter().filter_map(|range| { - let start = range.start.to_point(&*self); - if new_text.starts_with('\n') && start.column == self.line_len(start.row) { - None - } else { - Some((range.start, Bias::Left)) - } - })); - Some((before_edit, edited)) - } else { - None - }; - - let first_newline_ix = new_text.find('\n'); - let new_text_len = new_text.len(); - let new_text = if new_text_len > 0 { - Some(new_text) - } else { - None - }; - - self.start_transaction(None).unwrap(); - let timestamp = InsertionTimestamp { - replica_id: self.replica_id, - local: self.local_clock.tick().value, - lamport: self.lamport_clock.tick().value, - }; - let edit = self.apply_local_edit(&ranges, new_text, timestamp); - - self.history.push(edit.clone()); - self.history.push_undo(edit.timestamp.local()); - self.last_edit = edit.timestamp.local(); - self.version.observe(edit.timestamp.local()); - - if let Some((before_edit, edited)) = autoindent_request { - let mut inserted = None; - if let Some(first_newline_ix) = first_newline_ix { - let mut delta = 0isize; - inserted = Some(self.content().anchor_range_set(ranges.iter().map(|range| { - let start = (delta + range.start as isize) as usize + first_newline_ix + 1; - let end = (delta + range.start as isize) as usize + new_text_len; - delta += (range.end as isize - range.start as isize) + new_text_len as isize; - (start, Bias::Left)..(end, Bias::Right) - }))); - } - - let selection_set_ids = self - .history - .undo_stack - .last() - .unwrap() - .selections_before - .keys() - .copied() - .collect(); - self.autoindent_requests.push(Arc::new(AutoindentRequest { - selection_set_ids, - before_edit, - edited, - inserted, - })); - } - - self.end_transaction(None, cx).unwrap(); - self.send_operation(Operation::Edit(edit), cx); - } - - fn did_edit(&self, was_dirty: bool, cx: &mut ModelContext) { - cx.emit(Event::Edited); - if !was_dirty { - cx.emit(Event::Dirtied); - } - } - - pub fn add_selection_set( - &mut self, - selections: impl Into>, - cx: &mut ModelContext, - ) -> SelectionSetId { - let selections = selections.into(); - let lamport_timestamp = self.lamport_clock.tick(); - self.selections.insert( - lamport_timestamp, - SelectionSet { - selections: selections.clone(), - active: false, - }, - ); - cx.notify(); - - self.send_operation( - Operation::UpdateSelections { - set_id: lamport_timestamp, - selections: Some(selections), - lamport_timestamp, - }, - cx, - ); - - lamport_timestamp - } - - pub fn update_selection_set( + pub fn selection_sets(&self) -> impl Iterator { + self.selections.iter() + } + + pub fn update_selection_set( &mut self, set_id: SelectionSetId, selections: impl Into>, - cx: &mut ModelContext, - ) -> Result<()> { + ) -> Result { let selections = selections.into(); let set = self .selections .get_mut(&set_id) .ok_or_else(|| anyhow!("invalid selection set id {:?}", set_id))?; set.selections = selections.clone(); + Ok(Operation::UpdateSelections { + set_id, + selections: Some(selections), + lamport_timestamp: self.lamport_clock.tick(), + }) + } + + pub fn add_selection_set(&mut self, selections: impl Into>) -> Operation { + let selections = selections.into(); let lamport_timestamp = self.lamport_clock.tick(); - cx.notify(); - self.send_operation( - Operation::UpdateSelections { - set_id, - selections: Some(selections), - lamport_timestamp, + self.selections.insert( + lamport_timestamp, + SelectionSet { + selections: selections.clone(), + active: false, }, - cx, ); - Ok(()) + Operation::UpdateSelections { + set_id: lamport_timestamp, + selections: Some(selections), + lamport_timestamp, + } } pub fn set_active_selection_set( &mut self, set_id: Option, - cx: &mut ModelContext, - ) -> Result<()> { + ) -> Result { if let Some(set_id) = set_id { assert_eq!(set_id.replica_id, self.replica_id()); } @@ -1664,646 +1416,1030 @@ impl Buffer { } } - let lamport_timestamp = self.lamport_clock.tick(); - self.send_operation( - Operation::SetActiveSelections { - set_id, - lamport_timestamp, - }, - cx, - ); - Ok(()) + Ok(Operation::SetActiveSelections { + set_id, + lamport_timestamp: self.lamport_clock.tick(), + }) } - pub fn remove_selection_set( - &mut self, - set_id: SelectionSetId, - cx: &mut ModelContext, - ) -> Result<()> { + pub fn remove_selection_set(&mut self, set_id: SelectionSetId) -> Result { self.selections .remove(&set_id) .ok_or_else(|| anyhow!("invalid selection set id {:?}", set_id))?; - let lamport_timestamp = self.lamport_clock.tick(); - cx.notify(); - self.send_operation( - Operation::UpdateSelections { - set_id, - selections: None, - lamport_timestamp, - }, - cx, - ); - Ok(()) + Ok(Operation::UpdateSelections { + set_id, + selections: None, + lamport_timestamp: self.lamport_clock.tick(), + }) } - pub fn selection_set(&self, set_id: SelectionSetId) -> Result<&SelectionSet> { - self.selections - .get(&set_id) - .ok_or_else(|| anyhow!("invalid selection set id {:?}", set_id)) + pub fn edits_since<'a>(&'a self, since: clock::Global) -> impl 'a + Iterator { + let since_2 = since.clone(); + let cursor = if since == self.version { + None + } else { + Some(self.fragments.filter( + move |summary| summary.max_version.changed_since(&since_2), + &None, + )) + }; + + Edits { + visible_text: &self.visible_text, + deleted_text: &self.deleted_text, + cursor, + undos: &self.undo_map, + since, + old_offset: 0, + new_offset: 0, + old_point: Point::zero(), + new_point: Point::zero(), + } } +} - pub fn selection_sets(&self) -> impl Iterator { - self.selections.iter() +impl Buffer { + pub fn new>>( + replica_id: ReplicaId, + base_text: T, + cx: &mut ModelContext, + ) -> Self { + Self::build( + replica_id, + History::new(base_text.into()), + None, + cx.model_id() as u64, + None, + cx, + ) } - pub fn apply_ops>( - &mut self, - ops: I, + pub fn from_history( + replica_id: ReplicaId, + history: History, + file: Option>, + language: Option>, cx: &mut ModelContext, - ) -> Result<()> { - self.pending_autoindent.take(); + ) -> Self { + Self::build( + replica_id, + history, + file, + cx.model_id() as u64, + language, + cx, + ) + } - let was_dirty = self.is_dirty(); - let old_version = self.version.clone(); + fn build( + replica_id: ReplicaId, + history: History, + file: Option>, + remote_id: u64, + language: Option>, + cx: &mut ModelContext, + ) -> Self { + let saved_mtime; + if let Some(file) = file.as_ref() { + saved_mtime = file.mtime(); + } else { + saved_mtime = UNIX_EPOCH; + } - let mut deferred_ops = Vec::new(); - for op in ops { - if self.can_apply_op(&op) { - self.apply_op(op)?; - } else { - self.deferred_replicas.insert(op.replica_id()); - deferred_ops.push(op); - } + let mut result = Self { + buffer: TextBuffer::new(replica_id, remote_id, history), + saved_mtime, + saved_version: clock::Global::new(), + file, + syntax_tree: Mutex::new(None), + parsing_in_background: false, + parse_count: 0, + sync_parse_timeout: Duration::from_millis(1), + autoindent_requests: Default::default(), + pending_autoindent: Default::default(), + language, + + #[cfg(test)] + operations: Default::default(), + }; + result.reparse(cx); + result + } + + pub fn snapshot(&self) -> Snapshot { + Snapshot { + visible_text: self.visible_text.clone(), + fragments: self.fragments.clone(), + version: self.version.clone(), + tree: self.syntax_tree(), + is_parsing: self.parsing_in_background, + language: self.language.clone(), + query_cursor: QueryCursorHandle::new(), } - self.deferred_ops.insert(deferred_ops); - self.flush_deferred_ops()?; + } - cx.notify(); - if self.edits_since(old_version).next().is_some() { - self.did_edit(was_dirty, cx); - self.reparse(cx); + pub fn from_proto( + replica_id: ReplicaId, + message: proto::Buffer, + file: Option>, + language: Option>, + cx: &mut ModelContext, + ) -> Result { + let mut buffer = Buffer::build( + replica_id, + History::new(message.content.into()), + file, + message.id, + language, + cx, + ); + let ops = message + .history + .into_iter() + .map(|op| Operation::Edit(op.into())); + buffer.apply_ops(ops, cx)?; + buffer.buffer.selections = message + .selections + .into_iter() + .map(|set| { + let set_id = clock::Lamport { + replica_id: set.replica_id as ReplicaId, + value: set.local_timestamp, + }; + let selections: Vec = set + .selections + .into_iter() + .map(TryFrom::try_from) + .collect::>()?; + let set = SelectionSet { + selections: Arc::from(selections), + active: set.is_active, + }; + Result::<_, anyhow::Error>::Ok((set_id, set)) + }) + .collect::>()?; + Ok(buffer) + } + + pub fn to_proto(&self, cx: &mut ModelContext) -> proto::Buffer { + let ops = self.history.ops.values().map(Into::into).collect(); + proto::Buffer { + id: cx.model_id() as u64, + content: self.history.base_text.to_string(), + history: ops, + selections: self + .selections + .iter() + .map(|(set_id, set)| proto::SelectionSetSnapshot { + replica_id: set_id.replica_id as u32, + local_timestamp: set_id.value, + selections: set.selections.iter().map(Into::into).collect(), + is_active: set.active, + }) + .collect(), } + } - Ok(()) + pub fn file(&self) -> Option<&dyn File> { + self.file.as_deref() } - fn apply_op(&mut self, op: Operation) -> Result<()> { - match op { - Operation::Edit(edit) => { - if !self.version.observed(edit.timestamp.local()) { - self.apply_remote_edit( - &edit.version, - &edit.ranges, - edit.new_text.as_deref(), - edit.timestamp, - ); - self.version.observe(edit.timestamp.local()); - self.history.push(edit); - } - } - Operation::Undo { - undo, - lamport_timestamp, - } => { - if !self.version.observed(undo.id) { - self.apply_undo(&undo)?; - self.version.observe(undo.id); - self.lamport_clock.observe(lamport_timestamp); - } - } - Operation::UpdateSelections { - set_id, - selections, - lamport_timestamp, - } => { - if let Some(selections) = selections { - if let Some(set) = self.selections.get_mut(&set_id) { - set.selections = selections; - } else { - self.selections.insert( - set_id, - SelectionSet { - selections, - active: false, - }, - ); - } - } else { - self.selections.remove(&set_id); - } - self.lamport_clock.observe(lamport_timestamp); - } - Operation::SetActiveSelections { - set_id, - lamport_timestamp, - } => { - for (id, set) in &mut self.selections { - if id.replica_id == lamport_timestamp.replica_id { - if Some(*id) == set_id { - set.active = true; - } else { - set.active = false; - } - } + pub fn file_mut(&mut self) -> Option<&mut dyn File> { + self.file.as_mut().map(|f| f.deref_mut() as &mut dyn File) + } + + pub fn save( + &mut self, + cx: &mut ModelContext, + ) -> Result>> { + let file = self + .file + .as_ref() + .ok_or_else(|| anyhow!("buffer has no file"))?; + let text = self.visible_text.clone(); + let version = self.version.clone(); + let save = file.save(self.remote_id, text, version, cx.as_mut()); + Ok(cx.spawn(|this, mut cx| async move { + let (version, mtime) = save.await?; + this.update(&mut cx, |this, cx| { + this.did_save(version.clone(), mtime, None, cx); + }); + Ok((version, mtime)) + })) + } + + pub fn set_language(&mut self, language: Option>, cx: &mut ModelContext) { + self.language = language; + self.reparse(cx); + } + + pub fn did_save( + &mut self, + version: clock::Global, + mtime: SystemTime, + new_file: Option>, + cx: &mut ModelContext, + ) { + self.saved_mtime = mtime; + self.saved_version = version; + if let Some(new_file) = new_file { + self.file = Some(new_file); + } + cx.emit(Event::Saved); + } + + pub fn file_updated( + &mut self, + path: Arc, + mtime: SystemTime, + new_text: Option, + cx: &mut ModelContext, + ) { + let file = self.file.as_mut().unwrap(); + let mut changed = false; + if path != *file.path() { + file.set_path(path); + changed = true; + } + + if mtime != file.mtime() { + file.set_mtime(mtime); + changed = true; + if let Some(new_text) = new_text { + if self.version == self.saved_version { + cx.spawn(|this, mut cx| async move { + let diff = this + .read_with(&cx, |this, cx| this.diff(new_text.into(), cx)) + .await; + this.update(&mut cx, |this, cx| { + if this.apply_diff(diff, cx) { + this.saved_version = this.version.clone(); + this.saved_mtime = mtime; + cx.emit(Event::Reloaded); + } + }); + }) + .detach(); } - self.lamport_clock.observe(lamport_timestamp); } - #[cfg(test)] - Operation::Test(_) => {} } - Ok(()) + + if changed { + cx.emit(Event::FileHandleChanged); + } + } + + pub fn file_deleted(&mut self, cx: &mut ModelContext) { + if self.version == self.saved_version { + cx.emit(Event::Dirtied); + } + cx.emit(Event::FileHandleChanged); + } + + pub fn close(&mut self, cx: &mut ModelContext) { + cx.emit(Event::Closed); + } + + pub fn language(&self) -> Option<&Arc> { + self.language.as_ref() + } + + pub fn parse_count(&self) -> usize { + self.parse_count + } + + fn syntax_tree(&self) -> Option { + if let Some(syntax_tree) = self.syntax_tree.lock().as_mut() { + self.interpolate_tree(syntax_tree); + Some(syntax_tree.tree.clone()) + } else { + None + } } - fn apply_remote_edit( - &mut self, - version: &clock::Global, - ranges: &[Range], - new_text: Option<&str>, - timestamp: InsertionTimestamp, - ) { - if ranges.is_empty() { - return; - } + #[cfg(any(test, feature = "test-support"))] + pub fn is_parsing(&self) -> bool { + self.parsing_in_background + } - let cx = Some(version.clone()); - let mut new_ropes = - RopeBuilder::new(self.visible_text.cursor(0), self.deleted_text.cursor(0)); - let mut old_fragments = self.fragments.cursor::(); - let mut new_fragments = - old_fragments.slice(&VersionedOffset::Offset(ranges[0].start), Bias::Left, &cx); - new_ropes.push_tree(new_fragments.summary().text); + #[cfg(test)] + pub fn set_sync_parse_timeout(&mut self, timeout: Duration) { + self.sync_parse_timeout = timeout; + } - let mut fragment_start = old_fragments.start().offset(); - for range in ranges { - let fragment_end = old_fragments.end(&cx).offset(); + fn reparse(&mut self, cx: &mut ModelContext) -> bool { + if self.parsing_in_background { + return false; + } - // If the current fragment ends before this range, then jump ahead to the first fragment - // that extends past the start of this range, reusing any intervening fragments. - if fragment_end < range.start { - // If the current fragment has been partially consumed, then consume the rest of it - // and advance to the next fragment before slicing. - if fragment_start > old_fragments.start().offset() { - if fragment_end > fragment_start { - let mut suffix = old_fragments.item().unwrap().clone(); - suffix.len = fragment_end - fragment_start; - new_ropes.push_fragment(&suffix, suffix.visible); - new_fragments.push(suffix, &None); - } - old_fragments.next(&cx); + if let Some(language) = self.language.clone() { + let old_tree = self.syntax_tree(); + let text = self.visible_text.clone(); + let parsed_version = self.version(); + let parse_task = cx.background().spawn({ + let language = language.clone(); + async move { Self::parse_text(&text, old_tree, &language) } + }); + + match cx + .background() + .block_with_timeout(self.sync_parse_timeout, parse_task) + { + Ok(new_tree) => { + self.did_finish_parsing(new_tree, parsed_version, cx); + return true; } + Err(parse_task) => { + self.parsing_in_background = true; + cx.spawn(move |this, mut cx| async move { + let new_tree = parse_task.await; + this.update(&mut cx, move |this, cx| { + let language_changed = + this.language.as_ref().map_or(true, |curr_language| { + !Arc::ptr_eq(curr_language, &language) + }); + let parse_again = this.version > parsed_version || language_changed; + this.parsing_in_background = false; + this.did_finish_parsing(new_tree, parsed_version, cx); - let slice = - old_fragments.slice(&VersionedOffset::Offset(range.start), Bias::Left, &cx); - new_ropes.push_tree(slice.summary().text); - new_fragments.push_tree(slice, &None); - fragment_start = old_fragments.start().offset(); + if parse_again && this.reparse(cx) { + return; + } + }); + }) + .detach(); + } } + } + false + } - // If we are at the end of a non-concurrent fragment, advance to the next one. - let fragment_end = old_fragments.end(&cx).offset(); - if fragment_end == range.start && fragment_end > fragment_start { - let mut fragment = old_fragments.item().unwrap().clone(); - fragment.len = fragment_end - fragment_start; - new_ropes.push_fragment(&fragment, fragment.visible); - new_fragments.push(fragment, &None); - old_fragments.next(&cx); - fragment_start = old_fragments.start().offset(); - } + fn parse_text(text: &Rope, old_tree: Option, language: &Language) -> Tree { + PARSER.with(|parser| { + let mut parser = parser.borrow_mut(); + parser + .set_language(language.grammar) + .expect("incompatible grammar"); + let mut chunks = text.chunks_in_range(0..text.len()); + let tree = parser + .parse_with( + &mut move |offset, _| { + chunks.seek(offset); + chunks.next().unwrap_or("").as_bytes() + }, + old_tree.as_ref(), + ) + .unwrap(); + tree + }) + } - // Skip over insertions that are concurrent to this edit, but have a lower lamport - // timestamp. - while let Some(fragment) = old_fragments.item() { - if fragment_start == range.start - && fragment.timestamp.lamport() > timestamp.lamport() - { - new_ropes.push_fragment(fragment, fragment.visible); - new_fragments.push(fragment.clone(), &None); - old_fragments.next(&cx); - debug_assert_eq!(fragment_start, range.start); - } else { - break; + fn interpolate_tree(&self, tree: &mut SyntaxTree) { + let mut delta = 0_isize; + for edit in self.edits_since(tree.version.clone()) { + let start_offset = (edit.old_bytes.start as isize + delta) as usize; + let start_point = self.visible_text.to_point(start_offset); + tree.tree.edit(&InputEdit { + start_byte: start_offset, + old_end_byte: start_offset + edit.deleted_bytes(), + new_end_byte: start_offset + edit.inserted_bytes(), + start_position: start_point.into(), + old_end_position: (start_point + edit.deleted_lines()).into(), + new_end_position: self + .visible_text + .to_point(start_offset + edit.inserted_bytes()) + .into(), + }); + delta += edit.inserted_bytes() as isize - edit.deleted_bytes() as isize; + } + tree.version = self.version(); + } + + fn did_finish_parsing( + &mut self, + tree: Tree, + version: clock::Global, + cx: &mut ModelContext, + ) { + self.parse_count += 1; + *self.syntax_tree.lock() = Some(SyntaxTree { tree, version }); + self.request_autoindent(cx); + cx.emit(Event::Reparsed); + cx.notify(); + } + + fn request_autoindent(&mut self, cx: &mut ModelContext) { + if let Some(indent_columns) = self.compute_autoindents() { + let indent_columns = cx.background().spawn(indent_columns); + match cx + .background() + .block_with_timeout(Duration::from_micros(500), indent_columns) + { + Ok(indent_columns) => self.apply_autoindents(indent_columns, cx), + Err(indent_columns) => { + self.pending_autoindent = Some(cx.spawn(|this, mut cx| async move { + let indent_columns = indent_columns.await; + this.update(&mut cx, |this, cx| { + this.apply_autoindents(indent_columns, cx); + }); + })); } } - debug_assert!(fragment_start <= range.start); + } + } - // Preserve any portion of the current fragment that precedes this range. - if fragment_start < range.start { - let mut prefix = old_fragments.item().unwrap().clone(); - prefix.len = range.start - fragment_start; - fragment_start = range.start; - new_ropes.push_fragment(&prefix, prefix.visible); - new_fragments.push(prefix, &None); - } + fn compute_autoindents(&self) -> Option>> { + let max_rows_between_yields = 100; + let snapshot = self.snapshot(); + if snapshot.language.is_none() + || snapshot.tree.is_none() + || self.autoindent_requests.is_empty() + { + return None; + } - // Insert the new text before any existing fragments within the range. - if let Some(new_text) = new_text { - new_ropes.push_str(new_text); - new_fragments.push( - Fragment { - timestamp, - len: new_text.len(), - deletions: Default::default(), - max_undos: Default::default(), - visible: true, - }, - &None, - ); - } + let autoindent_requests = self.autoindent_requests.clone(); + Some(async move { + let mut indent_columns = BTreeMap::new(); + for request in autoindent_requests { + let old_to_new_rows = request + .edited + .to_points(&request.before_edit) + .map(|point| point.row) + .zip(request.edited.to_points(&snapshot).map(|point| point.row)) + .collect::>(); - // Advance through every fragment that intersects this range, marking the intersecting - // portions as deleted. - while fragment_start < range.end { - let fragment = old_fragments.item().unwrap(); - let fragment_end = old_fragments.end(&cx).offset(); - let mut intersection = fragment.clone(); - let intersection_end = cmp::min(range.end, fragment_end); - if fragment.was_visible(version, &self.undo_map) { - intersection.len = intersection_end - fragment_start; - intersection.deletions.insert(timestamp.local()); - intersection.visible = false; - } - if intersection.len > 0 { - new_ropes.push_fragment(&intersection, fragment.visible); - new_fragments.push(intersection, &None); - fragment_start = intersection_end; + let mut old_suggestions = HashMap::default(); + let old_edited_ranges = + contiguous_ranges(old_to_new_rows.keys().copied(), max_rows_between_yields); + for old_edited_range in old_edited_ranges { + let suggestions = request + .before_edit + .suggest_autoindents(old_edited_range.clone()) + .into_iter() + .flatten(); + for (old_row, suggestion) in old_edited_range.zip(suggestions) { + let indentation_basis = old_to_new_rows + .get(&suggestion.basis_row) + .and_then(|from_row| old_suggestions.get(from_row).copied()) + .unwrap_or_else(|| { + request + .before_edit + .indent_column_for_line(suggestion.basis_row) + }); + let delta = if suggestion.indent { INDENT_SIZE } else { 0 }; + old_suggestions.insert( + *old_to_new_rows.get(&old_row).unwrap(), + indentation_basis + delta, + ); + } + yield_now().await; } - if fragment_end <= range.end { - old_fragments.next(&cx); + + // At this point, old_suggestions contains the suggested indentation for all edited lines with respect to the state of the + // buffer before the edit, but keyed by the row for these lines after the edits were applied. + let new_edited_row_ranges = + contiguous_ranges(old_to_new_rows.values().copied(), max_rows_between_yields); + for new_edited_row_range in new_edited_row_ranges { + let suggestions = snapshot + .suggest_autoindents(new_edited_row_range.clone()) + .into_iter() + .flatten(); + for (new_row, suggestion) in new_edited_row_range.zip(suggestions) { + let delta = if suggestion.indent { INDENT_SIZE } else { 0 }; + let new_indentation = indent_columns + .get(&suggestion.basis_row) + .copied() + .unwrap_or_else(|| { + snapshot.indent_column_for_line(suggestion.basis_row) + }) + + delta; + if old_suggestions + .get(&new_row) + .map_or(true, |old_indentation| new_indentation != *old_indentation) + { + indent_columns.insert(new_row, new_indentation); + } + } + yield_now().await; } - } - } - // If the current fragment has been partially consumed, then consume the rest of it - // and advance to the next fragment before slicing. - if fragment_start > old_fragments.start().offset() { - let fragment_end = old_fragments.end(&cx).offset(); - if fragment_end > fragment_start { - let mut suffix = old_fragments.item().unwrap().clone(); - suffix.len = fragment_end - fragment_start; - new_ropes.push_fragment(&suffix, suffix.visible); - new_fragments.push(suffix, &None); + if let Some(inserted) = request.inserted.as_ref() { + let inserted_row_ranges = contiguous_ranges( + inserted + .to_point_ranges(&snapshot) + .flat_map(|range| range.start.row..range.end.row + 1), + max_rows_between_yields, + ); + for inserted_row_range in inserted_row_ranges { + let suggestions = snapshot + .suggest_autoindents(inserted_row_range.clone()) + .into_iter() + .flatten(); + for (row, suggestion) in inserted_row_range.zip(suggestions) { + let delta = if suggestion.indent { INDENT_SIZE } else { 0 }; + let new_indentation = indent_columns + .get(&suggestion.basis_row) + .copied() + .unwrap_or_else(|| { + snapshot.indent_column_for_line(suggestion.basis_row) + }) + + delta; + indent_columns.insert(row, new_indentation); + } + yield_now().await; + } + } } - old_fragments.next(&cx); - } - - let suffix = old_fragments.suffix(&cx); - new_ropes.push_tree(suffix.summary().text); - new_fragments.push_tree(suffix, &None); - let (visible_text, deleted_text) = new_ropes.finish(); - drop(old_fragments); - - self.fragments = new_fragments; - self.visible_text = visible_text; - self.deleted_text = deleted_text; - self.local_clock.observe(timestamp.local()); - self.lamport_clock.observe(timestamp.lamport()); - } - - #[cfg(not(test))] - pub fn send_operation(&mut self, operation: Operation, cx: &mut ModelContext) { - if let Some(file) = &self.file { - file.buffer_updated(self.remote_id, operation, cx.as_mut()); - } - } - - #[cfg(test)] - pub fn send_operation(&mut self, operation: Operation, _: &mut ModelContext) { - self.operations.push(operation); + indent_columns + }) } - pub fn remove_peer(&mut self, replica_id: ReplicaId, cx: &mut ModelContext) { - self.selections - .retain(|set_id, _| set_id.replica_id != replica_id); - cx.notify(); - } + fn apply_autoindents( + &mut self, + indent_columns: BTreeMap, + cx: &mut ModelContext, + ) { + let selection_set_ids = self + .autoindent_requests + .drain(..) + .flat_map(|req| req.selection_set_ids.clone()) + .collect::>(); - pub fn undo(&mut self, cx: &mut ModelContext) { - let was_dirty = self.is_dirty(); - let old_version = self.version.clone(); + self.start_transaction(selection_set_ids.iter().copied()) + .unwrap(); + for (row, indent_column) in &indent_columns { + self.set_indent_column_for_line(*row, *indent_column, cx); + } - if let Some(transaction) = self.history.pop_undo().cloned() { - let selections = transaction.selections_before.clone(); - self.undo_or_redo(transaction, cx).unwrap(); - for (set_id, selections) in selections { - let _ = self.update_selection_set(set_id, selections, cx); + for selection_set_id in &selection_set_ids { + if let Some(set) = self.selections.get(selection_set_id) { + let new_selections = set + .selections + .iter() + .map(|selection| { + let start_point = selection.start.to_point(&self.buffer); + if start_point.column == 0 { + let end_point = selection.end.to_point(&self.buffer); + let delta = Point::new( + 0, + indent_columns.get(&start_point.row).copied().unwrap_or(0), + ); + if delta.column > 0 { + return Selection { + id: selection.id, + goal: selection.goal, + reversed: selection.reversed, + start: self + .anchor_at(start_point + delta, selection.start.bias), + end: self.anchor_at(end_point + delta, selection.end.bias), + }; + } + } + selection.clone() + }) + .collect::>(); + self.update_selection_set(*selection_set_id, new_selections, cx) + .unwrap(); } } - cx.notify(); - if self.edits_since(old_version).next().is_some() { - self.did_edit(was_dirty, cx); - self.reparse(cx); - } + self.end_transaction(selection_set_ids.iter().copied(), cx) + .unwrap(); } - pub fn redo(&mut self, cx: &mut ModelContext) { - let was_dirty = self.is_dirty(); - let old_version = self.version.clone(); - - if let Some(transaction) = self.history.pop_redo().cloned() { - let selections = transaction.selections_after.clone(); - self.undo_or_redo(transaction, cx).unwrap(); - for (set_id, selections) in selections { - let _ = self.update_selection_set(set_id, selections, cx); - } - } + pub fn indent_column_for_line(&self, row: u32) -> u32 { + self.content().indent_column_for_line(row) + } - cx.notify(); - if self.edits_since(old_version).next().is_some() { - self.did_edit(was_dirty, cx); - self.reparse(cx); + fn set_indent_column_for_line(&mut self, row: u32, column: u32, cx: &mut ModelContext) { + let current_column = self.indent_column_for_line(row); + if column > current_column { + let offset = self.visible_text.to_offset(Point::new(row, 0)); + self.edit( + [offset..offset], + " ".repeat((column - current_column) as usize), + cx, + ); + } else if column < current_column { + self.edit( + [Point::new(row, 0)..Point::new(row, current_column - column)], + "", + cx, + ); } } - fn undo_or_redo( - &mut self, - transaction: Transaction, - cx: &mut ModelContext, - ) -> Result<()> { - let mut counts = HashMap::default(); - for edit_id in transaction.edits { - counts.insert(edit_id, self.undo_map.undo_count(edit_id) + 1); + pub fn range_for_syntax_ancestor(&self, range: Range) -> Option> { + if let Some(tree) = self.syntax_tree() { + let root = tree.root_node(); + let range = range.start.to_offset(self)..range.end.to_offset(self); + let mut node = root.descendant_for_byte_range(range.start, range.end); + while node.map_or(false, |n| n.byte_range() == range) { + node = node.unwrap().parent(); + } + node.map(|n| n.byte_range()) + } else { + None } - - let undo = UndoOperation { - id: self.local_clock.tick(), - counts, - ranges: transaction.ranges, - version: transaction.start.clone(), - }; - self.apply_undo(&undo)?; - self.version.observe(undo.id); - - let operation = Operation::Undo { - undo, - lamport_timestamp: self.lamport_clock.tick(), - }; - self.send_operation(operation, cx); - - Ok(()) } - fn apply_undo(&mut self, undo: &UndoOperation) -> Result<()> { - self.undo_map.insert(undo); - - let mut cx = undo.version.clone(); - for edit_id in undo.counts.keys().copied() { - cx.observe(edit_id); - } - let cx = Some(cx); + pub fn enclosing_bracket_ranges( + &self, + range: Range, + ) -> Option<(Range, Range)> { + let (lang, tree) = self.language.as_ref().zip(self.syntax_tree())?; + let open_capture_ix = lang.brackets_query.capture_index_for_name("open")?; + let close_capture_ix = lang.brackets_query.capture_index_for_name("close")?; - let mut old_fragments = self.fragments.cursor::(); - let mut new_fragments = old_fragments.slice( - &VersionedOffset::Offset(undo.ranges[0].start), - Bias::Right, - &cx, + // Find bracket pairs that *inclusively* contain the given range. + let range = range.start.to_offset(self).saturating_sub(1)..range.end.to_offset(self) + 1; + let mut cursor = QueryCursorHandle::new(); + let matches = cursor.set_byte_range(range).matches( + &lang.brackets_query, + tree.root_node(), + TextProvider(&self.visible_text), ); - let mut new_ropes = - RopeBuilder::new(self.visible_text.cursor(0), self.deleted_text.cursor(0)); - new_ropes.push_tree(new_fragments.summary().text); - for range in &undo.ranges { - let mut end_offset = old_fragments.end(&cx).offset(); + // Get the ranges of the innermost pair of brackets. + matches + .filter_map(|mat| { + let open = mat.nodes_for_capture_index(open_capture_ix).next()?; + let close = mat.nodes_for_capture_index(close_capture_ix).next()?; + Some((open.byte_range(), close.byte_range())) + }) + .min_by_key(|(open_range, close_range)| close_range.end - open_range.start) + } - if end_offset < range.start { - let preceding_fragments = - old_fragments.slice(&VersionedOffset::Offset(range.start), Bias::Right, &cx); - new_ropes.push_tree(preceding_fragments.summary().text); - new_fragments.push_tree(preceding_fragments, &None); + fn diff(&self, new_text: Arc, cx: &AppContext) -> Task { + // TODO: it would be nice to not allocate here. + let old_text = self.text(); + let base_version = self.version(); + cx.background().spawn(async move { + let changes = TextDiff::from_lines(old_text.as_str(), new_text.as_ref()) + .iter_all_changes() + .map(|c| (c.tag(), c.value().len())) + .collect::>(); + Diff { + base_version, + new_text, + changes, } + }) + } - while end_offset <= range.end { - if let Some(fragment) = old_fragments.item() { - let mut fragment = fragment.clone(); - let fragment_was_visible = fragment.visible; + pub fn set_text_from_disk(&self, new_text: Arc, cx: &mut ModelContext) -> Task<()> { + cx.spawn(|this, mut cx| async move { + let diff = this + .read_with(&cx, |this, cx| this.diff(new_text, cx)) + .await; - if fragment.was_visible(&undo.version, &self.undo_map) - || undo.counts.contains_key(&fragment.timestamp.local()) - { - fragment.visible = fragment.is_visible(&self.undo_map); - fragment.max_undos.observe(undo.id); - } - new_ropes.push_fragment(&fragment, fragment_was_visible); - new_fragments.push(fragment, &None); + this.update(&mut cx, |this, cx| { + if this.apply_diff(diff, cx) { + this.saved_version = this.version.clone(); + } + }); + }) + } - old_fragments.next(&cx); - if end_offset == old_fragments.end(&cx).offset() { - let unseen_fragments = old_fragments.slice( - &VersionedOffset::Offset(end_offset), - Bias::Right, - &cx, - ); - new_ropes.push_tree(unseen_fragments.summary().text); - new_fragments.push_tree(unseen_fragments, &None); + fn apply_diff(&mut self, diff: Diff, cx: &mut ModelContext) -> bool { + if self.version == diff.base_version { + self.start_transaction(None).unwrap(); + let mut offset = 0; + for (tag, len) in diff.changes { + let range = offset..(offset + len); + match tag { + ChangeTag::Equal => offset += len, + ChangeTag::Delete => self.edit(Some(range), "", cx), + ChangeTag::Insert => { + self.edit(Some(offset..offset), &diff.new_text[range], cx); + offset += len; } - end_offset = old_fragments.end(&cx).offset(); - } else { - break; } } + self.end_transaction(None, cx).unwrap(); + true + } else { + false } + } - let suffix = old_fragments.suffix(&cx); - new_ropes.push_tree(suffix.summary().text); - new_fragments.push_tree(suffix, &None); + pub fn is_dirty(&self) -> bool { + self.version > self.saved_version + || self.file.as_ref().map_or(false, |file| file.is_deleted()) + } - drop(old_fragments); - let (visible_text, deleted_text) = new_ropes.finish(); - self.fragments = new_fragments; - self.visible_text = visible_text; - self.deleted_text = deleted_text; + pub fn has_conflict(&self) -> bool { + self.version > self.saved_version + && self + .file + .as_ref() + .map_or(false, |file| file.mtime() > self.saved_mtime) + } + + pub fn start_transaction( + &mut self, + selection_set_ids: impl IntoIterator, + ) -> Result<()> { + self.start_transaction_at(selection_set_ids, Instant::now())?; Ok(()) } - fn flush_deferred_ops(&mut self) -> Result<()> { - self.deferred_replicas.clear(); - let mut deferred_ops = Vec::new(); - for op in self.deferred_ops.drain().cursor().cloned() { - if self.can_apply_op(&op) { - self.apply_op(op)?; - } else { - self.deferred_replicas.insert(op.replica_id()); - deferred_ops.push(op); - } - } - self.deferred_ops.insert(deferred_ops); + fn start_transaction_at( + &mut self, + selection_set_ids: impl IntoIterator, + now: Instant, + ) -> Result<()> { + self.buffer.start_transaction_at(selection_set_ids, now)?; Ok(()) } - fn can_apply_op(&self, op: &Operation) -> bool { - if self.deferred_replicas.contains(&op.replica_id()) { - false - } else { - match op { - Operation::Edit(edit) => self.version >= edit.version, - Operation::Undo { undo, .. } => self.version >= undo.version, - Operation::UpdateSelections { selections, .. } => { - if let Some(selections) = selections { - selections.iter().all(|selection| { - let contains_start = self.version >= selection.start.version; - let contains_end = self.version >= selection.end.version; - contains_start && contains_end - }) - } else { - true - } - } - Operation::SetActiveSelections { set_id, .. } => { - set_id.map_or(true, |set_id| self.selections.contains_key(&set_id)) - } - #[cfg(test)] - Operation::Test(_) => true, + pub fn end_transaction( + &mut self, + selection_set_ids: impl IntoIterator, + cx: &mut ModelContext, + ) -> Result<()> { + self.end_transaction_at(selection_set_ids, Instant::now(), cx) + } + + fn end_transaction_at( + &mut self, + selection_set_ids: impl IntoIterator, + now: Instant, + cx: &mut ModelContext, + ) -> Result<()> { + if let Some(start_version) = self.buffer.end_transaction_at(selection_set_ids, now) { + cx.notify(); + let was_dirty = start_version != self.saved_version; + let edited = self.edits_since(start_version).next().is_some(); + if edited { + self.did_edit(was_dirty, cx); + self.reparse(cx); } } + Ok(()) } - fn apply_local_edit( - &mut self, - ranges: &[Range], - new_text: Option, - timestamp: InsertionTimestamp, - ) -> EditOperation { - let mut edit = EditOperation { - timestamp, - version: self.version(), - ranges: Vec::with_capacity(ranges.len()), - new_text: None, - }; + pub fn edit(&mut self, ranges_iter: I, new_text: T, cx: &mut ModelContext) + where + I: IntoIterator>, + S: ToOffset, + T: Into, + { + self.edit_internal(ranges_iter, new_text, false, cx) + } - let mut new_ropes = - RopeBuilder::new(self.visible_text.cursor(0), self.deleted_text.cursor(0)); - let mut old_fragments = self.fragments.cursor::(); - let mut new_fragments = old_fragments.slice(&ranges[0].start, Bias::Right, &None); - new_ropes.push_tree(new_fragments.summary().text); + pub fn edit_with_autoindent( + &mut self, + ranges_iter: I, + new_text: T, + cx: &mut ModelContext, + ) where + I: IntoIterator>, + S: ToOffset, + T: Into, + { + self.edit_internal(ranges_iter, new_text, true, cx) + } - let mut fragment_start = old_fragments.start().visible; - for range in ranges { - let fragment_end = old_fragments.end(&None).visible; + pub fn edit_internal( + &mut self, + ranges_iter: I, + new_text: T, + autoindent: bool, + cx: &mut ModelContext, + ) where + I: IntoIterator>, + S: ToOffset, + T: Into, + { + let new_text = new_text.into(); - // If the current fragment ends before this range, then jump ahead to the first fragment - // that extends past the start of this range, reusing any intervening fragments. - if fragment_end < range.start { - // If the current fragment has been partially consumed, then consume the rest of it - // and advance to the next fragment before slicing. - if fragment_start > old_fragments.start().visible { - if fragment_end > fragment_start { - let mut suffix = old_fragments.item().unwrap().clone(); - suffix.len = fragment_end - fragment_start; - new_ropes.push_fragment(&suffix, suffix.visible); - new_fragments.push(suffix, &None); + // Skip invalid ranges and coalesce contiguous ones. + let mut ranges: Vec> = Vec::new(); + for range in ranges_iter { + let range = range.start.to_offset(&*self)..range.end.to_offset(&*self); + if !new_text.is_empty() || !range.is_empty() { + if let Some(prev_range) = ranges.last_mut() { + if prev_range.end >= range.start { + prev_range.end = cmp::max(prev_range.end, range.end); + } else { + ranges.push(range); } - old_fragments.next(&None); + } else { + ranges.push(range); + } + } + } + if ranges.is_empty() { + return; + } + + self.start_transaction(None).unwrap(); + self.pending_autoindent.take(); + let autoindent_request = if autoindent && self.language.is_some() { + let before_edit = self.snapshot(); + let edited = self.content().anchor_set(ranges.iter().filter_map(|range| { + let start = range.start.to_point(&*self); + if new_text.starts_with('\n') && start.column == self.line_len(start.row) { + None + } else { + Some((range.start, Bias::Left)) } + })); + Some((before_edit, edited)) + } else { + None + }; - let slice = old_fragments.slice(&range.start, Bias::Right, &None); - new_ropes.push_tree(slice.summary().text); - new_fragments.push_tree(slice, &None); - fragment_start = old_fragments.start().visible; - } + let first_newline_ix = new_text.find('\n'); + let new_text_len = new_text.len(); - let full_range_start = range.start + old_fragments.start().deleted; + let edit = self.buffer.edit(ranges.iter().cloned(), new_text); - // Preserve any portion of the current fragment that precedes this range. - if fragment_start < range.start { - let mut prefix = old_fragments.item().unwrap().clone(); - prefix.len = range.start - fragment_start; - new_ropes.push_fragment(&prefix, prefix.visible); - new_fragments.push(prefix, &None); - fragment_start = range.start; + if let Some((before_edit, edited)) = autoindent_request { + let mut inserted = None; + if let Some(first_newline_ix) = first_newline_ix { + let mut delta = 0isize; + inserted = Some(self.content().anchor_range_set(ranges.iter().map(|range| { + let start = (delta + range.start as isize) as usize + first_newline_ix + 1; + let end = (delta + range.start as isize) as usize + new_text_len; + delta += (range.end as isize - range.start as isize) + new_text_len as isize; + (start, Bias::Left)..(end, Bias::Right) + }))); } - // Insert the new text before any existing fragments within the range. - if let Some(new_text) = new_text.as_deref() { - new_ropes.push_str(new_text); - new_fragments.push( - Fragment { - timestamp, - len: new_text.len(), - deletions: Default::default(), - max_undos: Default::default(), - visible: true, - }, - &None, - ); - } + let selection_set_ids = self + .buffer + .peek_undo_stack() + .unwrap() + .starting_selection_set_ids() + .collect(); + self.autoindent_requests.push(Arc::new(AutoindentRequest { + selection_set_ids, + before_edit, + edited, + inserted, + })); + } - // Advance through every fragment that intersects this range, marking the intersecting - // portions as deleted. - while fragment_start < range.end { - let fragment = old_fragments.item().unwrap(); - let fragment_end = old_fragments.end(&None).visible; - let mut intersection = fragment.clone(); - let intersection_end = cmp::min(range.end, fragment_end); - if fragment.visible { - intersection.len = intersection_end - fragment_start; - intersection.deletions.insert(timestamp.local()); - intersection.visible = false; - } - if intersection.len > 0 { - new_ropes.push_fragment(&intersection, fragment.visible); - new_fragments.push(intersection, &None); - fragment_start = intersection_end; - } - if fragment_end <= range.end { - old_fragments.next(&None); - } - } + self.end_transaction(None, cx).unwrap(); + self.send_operation(Operation::Edit(edit), cx); + } - let full_range_end = range.end + old_fragments.start().deleted; - edit.ranges.push(full_range_start..full_range_end); + fn did_edit(&self, was_dirty: bool, cx: &mut ModelContext) { + cx.emit(Event::Edited); + if !was_dirty { + cx.emit(Event::Dirtied); } + } - // If the current fragment has been partially consumed, then consume the rest of it - // and advance to the next fragment before slicing. - if fragment_start > old_fragments.start().visible { - let fragment_end = old_fragments.end(&None).visible; - if fragment_end > fragment_start { - let mut suffix = old_fragments.item().unwrap().clone(); - suffix.len = fragment_end - fragment_start; - new_ropes.push_fragment(&suffix, suffix.visible); - new_fragments.push(suffix, &None); - } - old_fragments.next(&None); + pub fn add_selection_set( + &mut self, + selections: impl Into>, + cx: &mut ModelContext, + ) -> SelectionSetId { + let operation = self.buffer.add_selection_set(selections); + if let Operation::UpdateSelections { set_id, .. } = &operation { + let set_id = *set_id; + cx.notify(); + self.send_operation(operation, cx); + set_id + } else { + unreachable!() } + } - let suffix = old_fragments.suffix(&None); - new_ropes.push_tree(suffix.summary().text); - new_fragments.push_tree(suffix, &None); - let (visible_text, deleted_text) = new_ropes.finish(); - drop(old_fragments); - - self.fragments = new_fragments; - self.visible_text = visible_text; - self.deleted_text = deleted_text; - edit.new_text = new_text; - edit + pub fn update_selection_set( + &mut self, + set_id: SelectionSetId, + selections: impl Into>, + cx: &mut ModelContext, + ) -> Result<()> { + let operation = self.buffer.update_selection_set(set_id, selections)?; + cx.notify(); + self.send_operation(operation, cx); + Ok(()) } - fn content<'a>(&'a self) -> Content<'a> { - self.into() + pub fn set_active_selection_set( + &mut self, + set_id: Option, + cx: &mut ModelContext, + ) -> Result<()> { + let operation = self.buffer.set_active_selection_set(set_id)?; + self.send_operation(operation, cx); + Ok(()) } - pub fn text_summary_for_range(&self, range: Range) -> TextSummary { - self.content().text_summary_for_range(range) + pub fn remove_selection_set( + &mut self, + set_id: SelectionSetId, + cx: &mut ModelContext, + ) -> Result<()> { + let operation = self.buffer.remove_selection_set(set_id)?; + cx.notify(); + self.send_operation(operation, cx); + Ok(()) } - pub fn anchor_before(&self, position: T) -> Anchor { - self.anchor_at(position, Bias::Left) + pub fn apply_ops>( + &mut self, + ops: I, + cx: &mut ModelContext, + ) -> Result<()> { + self.pending_autoindent.take(); + + let was_dirty = self.is_dirty(); + let old_version = self.version.clone(); + + self.buffer.apply_ops(ops)?; + + cx.notify(); + if self.edits_since(old_version).next().is_some() { + self.did_edit(was_dirty, cx); + self.reparse(cx); + } + + Ok(()) } - pub fn anchor_after(&self, position: T) -> Anchor { - self.anchor_at(position, Bias::Right) + #[cfg(not(test))] + pub fn send_operation(&mut self, operation: Operation, cx: &mut ModelContext) { + if let Some(file) = &self.file { + file.buffer_updated(self.remote_id, operation, cx.as_mut()); + } } - pub fn anchor_at(&self, position: T, bias: Bias) -> Anchor { - self.content().anchor_at(position, bias) + #[cfg(test)] + pub fn send_operation(&mut self, operation: Operation, _: &mut ModelContext) { + self.operations.push(operation); } - pub fn point_for_offset(&self, offset: usize) -> Result { - self.content().point_for_offset(offset) + pub fn remove_peer(&mut self, replica_id: ReplicaId, cx: &mut ModelContext) { + self.buffer.remove_peer(replica_id); + cx.notify(); } - pub fn clip_point(&self, point: Point, bias: Bias) -> Point { - self.visible_text.clip_point(point, bias) + pub fn undo(&mut self, cx: &mut ModelContext) { + let was_dirty = self.is_dirty(); + let old_version = self.version.clone(); + + for operation in self.buffer.undo() { + self.send_operation(operation, cx); + } + + cx.notify(); + if self.edits_since(old_version).next().is_some() { + self.did_edit(was_dirty, cx); + self.reparse(cx); + } } - pub fn clip_offset(&self, offset: usize, bias: Bias) -> usize { - self.visible_text.clip_offset(offset, bias) + pub fn redo(&mut self, cx: &mut ModelContext) { + let was_dirty = self.is_dirty(); + let old_version = self.version.clone(); + + for operation in self.buffer.redo() { + self.send_operation(operation, cx); + } + + cx.notify(); + if self.edits_since(old_version).next().is_some() { + self.did_edit(was_dirty, cx); + self.reparse(cx); + } } } #[cfg(any(test, feature = "test-support"))] impl Buffer { + pub fn randomly_edit( + &mut self, + rng: &mut T, + old_range_count: usize, + _: &mut ModelContext, + ) -> (Vec>, String) + where + T: rand::Rng, + { + self.buffer.randomly_edit(rng, old_range_count) + } + + pub fn randomly_mutate( + &mut self, + rng: &mut T, + _: &mut ModelContext, + ) -> (Vec>, String) + where + T: rand::Rng, + { + self.buffer.randomly_mutate(rng) + } +} + +#[cfg(any(test, feature = "test-support"))] +impl TextBuffer { fn random_byte_range(&mut self, start_offset: usize, rng: &mut impl rand::Rng) -> Range { let end = self.clip_offset(rng.gen_range(start_offset..=self.len()), Bias::Right); let start = self.clip_offset(rng.gen_range(start_offset..=end), Bias::Right); @@ -2314,7 +2450,6 @@ impl Buffer { &mut self, rng: &mut T, old_range_count: usize, - cx: &mut ModelContext, ) -> (Vec>, String) where T: rand::Rng, @@ -2337,21 +2472,17 @@ impl Buffer { old_ranges, new_text ); - self.edit(old_ranges.iter().cloned(), new_text.as_str(), cx); + self.edit(old_ranges.iter().cloned(), new_text.as_str()); (old_ranges, new_text) } - pub fn randomly_mutate( - &mut self, - rng: &mut T, - cx: &mut ModelContext, - ) -> (Vec>, String) + pub fn randomly_mutate(&mut self, rng: &mut T) -> (Vec>, String) where T: rand::Rng, { use rand::prelude::*; - let (old_ranges, new_text) = self.randomly_edit(rng, 5, cx); + let (old_ranges, new_text) = self.randomly_edit(rng, 5); // Randomly add, remove or mutate selection sets. let replica_selection_sets = &self @@ -2361,7 +2492,7 @@ impl Buffer { .collect::>(); let set_id = replica_selection_sets.choose(rng); if set_id.is_some() && rng.gen_bool(1.0 / 6.0) { - self.remove_selection_set(*set_id.unwrap(), cx).unwrap(); + self.remove_selection_set(*set_id.unwrap()).unwrap(); } else { let mut ranges = Vec::new(); for _ in 0..5 { @@ -2370,9 +2501,9 @@ impl Buffer { let new_selections = self.selections_from_ranges(ranges).unwrap(); if set_id.is_none() || rng.gen_bool(1.0 / 5.0) { - self.add_selection_set(new_selections, cx); + self.add_selection_set(new_selections); } else { - self.update_selection_set(*set_id.unwrap(), new_selections, cx) + self.update_selection_set(*set_id.unwrap(), new_selections) .unwrap(); } } @@ -2380,7 +2511,7 @@ impl Buffer { (old_ranges, new_text) } - pub fn randomly_undo_redo(&mut self, rng: &mut impl rand::Rng, cx: &mut ModelContext) { + pub fn randomly_undo_redo(&mut self, rng: &mut impl rand::Rng) { use rand::prelude::*; for _ in 0..rng.gen_range(1..=5) { @@ -2390,7 +2521,7 @@ impl Buffer { self.replica_id, transaction ); - self.undo_or_redo(transaction, cx).unwrap(); + self.undo_or_redo(transaction).unwrap(); } } } @@ -2453,33 +2584,14 @@ impl Buffer { .keys() .map(move |set_id| (*set_id, self.selection_ranges(*set_id).unwrap())) } - - pub fn enclosing_bracket_point_ranges( - &self, - range: Range, - ) -> Option<(Range, Range)> { - self.enclosing_bracket_ranges(range).map(|(start, end)| { - let point_start = start.start.to_point(self)..start.end.to_point(self); - let point_end = end.start.to_point(self)..end.end.to_point(self); - (point_start, point_end) - }) - } } impl Clone for Buffer { fn clone(&self) -> Self { Self { - fragments: self.fragments.clone(), - visible_text: self.visible_text.clone(), - deleted_text: self.deleted_text.clone(), - version: self.version.clone(), + buffer: self.buffer.clone(), saved_version: self.saved_version.clone(), saved_mtime: self.saved_mtime, - last_edit: self.last_edit.clone(), - undo_map: self.undo_map.clone(), - history: self.history.clone(), - selections: self.selections.clone(), - deferred_ops: self.deferred_ops.clone(), file: self.file.as_ref().map(|f| f.boxed_clone()), language: self.language.clone(), syntax_tree: Mutex::new(self.syntax_tree.lock().clone()), @@ -2488,11 +2600,6 @@ impl Clone for Buffer { parse_count: self.parse_count, autoindent_requests: Default::default(), pending_autoindent: Default::default(), - deferred_replicas: self.deferred_replicas.clone(), - replica_id: self.replica_id, - remote_id: self.remote_id.clone(), - local_clock: self.local_clock.clone(), - lamport_clock: self.lamport_clock.clone(), #[cfg(test)] operations: self.operations.clone(), @@ -2777,6 +2884,26 @@ impl<'a> From<&'a mut Buffer> for Content<'a> { } } +impl<'a> From<&'a TextBuffer> for Content<'a> { + fn from(buffer: &'a TextBuffer) -> Self { + Self { + visible_text: &buffer.visible_text, + fragments: &buffer.fragments, + version: &buffer.version, + } + } +} + +impl<'a> From<&'a mut TextBuffer> for Content<'a> { + fn from(buffer: &'a mut TextBuffer) -> Self { + Self { + visible_text: &buffer.visible_text, + fragments: &buffer.fragments, + version: &buffer.version, + } + } +} + impl<'a> From<&'a Content<'a>> for Content<'a> { fn from(content: &'a Content) -> Self { Self { diff --git a/crates/buffer/src/tests/buffer.rs b/crates/buffer/src/tests/buffer.rs index 7c627a45eddd544444ba9ee5973abb2cc4c63e4e..74c27e99de72ff1c9ecb232d5e3c3cc6f038b3c7 100644 --- a/crates/buffer/src/tests/buffer.rs +++ b/crates/buffer/src/tests/buffer.rs @@ -11,23 +11,20 @@ use std::{ time::{Duration, Instant}, }; -#[gpui::test] -fn test_edit(cx: &mut gpui::MutableAppContext) { - cx.add_model(|cx| { - let mut buffer = Buffer::new(0, "abc", cx); - assert_eq!(buffer.text(), "abc"); - buffer.edit(vec![3..3], "def", cx); - assert_eq!(buffer.text(), "abcdef"); - buffer.edit(vec![0..0], "ghi", cx); - assert_eq!(buffer.text(), "ghiabcdef"); - buffer.edit(vec![5..5], "jkl", cx); - assert_eq!(buffer.text(), "ghiabjklcdef"); - buffer.edit(vec![6..7], "", cx); - assert_eq!(buffer.text(), "ghiabjlcdef"); - buffer.edit(vec![4..9], "mno", cx); - assert_eq!(buffer.text(), "ghiamnoef"); - buffer - }); +#[test] +fn test_edit() { + let mut buffer = TextBuffer::new(0, 0, History::new("abc".into())); + assert_eq!(buffer.text(), "abc"); + buffer.edit(vec![3..3], "def"); + assert_eq!(buffer.text(), "abcdef"); + buffer.edit(vec![0..0], "ghi"); + assert_eq!(buffer.text(), "ghiabcdef"); + buffer.edit(vec![5..5], "jkl"); + assert_eq!(buffer.text(), "ghiabjklcdef"); + buffer.edit(vec![6..7], ""); + assert_eq!(buffer.text(), "ghiabjlcdef"); + buffer.edit(vec![4..9], "mno"); + assert_eq!(buffer.text(), "ghiamnoef"); } #[gpui::test] @@ -88,7 +85,7 @@ fn test_edit_events(cx: &mut gpui::MutableAppContext) { } #[gpui::test(iterations = 100)] -fn test_random_edits(cx: &mut gpui::MutableAppContext, mut rng: StdRng) { +fn test_random_edits(mut rng: StdRng) { let operations = env::var("OPERATIONS") .map(|i| i.parse().expect("invalid `OPERATIONS` variable")) .unwrap_or(10); @@ -97,360 +94,336 @@ fn test_random_edits(cx: &mut gpui::MutableAppContext, mut rng: StdRng) { let mut reference_string = RandomCharIter::new(&mut rng) .take(reference_string_len) .collect::(); - cx.add_model(|cx| { - let mut buffer = Buffer::new(0, reference_string.as_str(), cx); - buffer.history.group_interval = Duration::from_millis(rng.gen_range(0..=200)); - let mut buffer_versions = Vec::new(); + let mut buffer = TextBuffer::new(0, 0, History::new(reference_string.clone().into())); + buffer.history.group_interval = Duration::from_millis(rng.gen_range(0..=200)); + let mut buffer_versions = Vec::new(); + log::info!( + "buffer text {:?}, version: {:?}", + buffer.text(), + buffer.version() + ); + + for _i in 0..operations { + let (old_ranges, new_text) = buffer.randomly_mutate(&mut rng); + for old_range in old_ranges.iter().rev() { + reference_string.replace_range(old_range.clone(), &new_text); + } + assert_eq!(buffer.text(), reference_string); log::info!( "buffer text {:?}, version: {:?}", buffer.text(), buffer.version() ); - for _i in 0..operations { - let (old_ranges, new_text) = buffer.randomly_mutate(&mut rng, cx); - for old_range in old_ranges.iter().rev() { - reference_string.replace_range(old_range.clone(), &new_text); - } - assert_eq!(buffer.text(), reference_string); + if rng.gen_bool(0.25) { + buffer.randomly_undo_redo(&mut rng); + reference_string = buffer.text(); log::info!( "buffer text {:?}, version: {:?}", buffer.text(), buffer.version() ); + } - if rng.gen_bool(0.25) { - buffer.randomly_undo_redo(&mut rng, cx); - reference_string = buffer.text(); - log::info!( - "buffer text {:?}, version: {:?}", - buffer.text(), - buffer.version() - ); - } - - let range = buffer.random_byte_range(0, &mut rng); - assert_eq!( - buffer.text_summary_for_range(range.clone()), - TextSummary::from(&reference_string[range]) - ); + let range = buffer.random_byte_range(0, &mut rng); + assert_eq!( + buffer.text_summary_for_range(range.clone()), + TextSummary::from(&reference_string[range]) + ); - if rng.gen_bool(0.3) { - buffer_versions.push(buffer.clone()); - } + if rng.gen_bool(0.3) { + buffer_versions.push(buffer.clone()); } + } - for mut old_buffer in buffer_versions { - let edits = buffer - .edits_since(old_buffer.version.clone()) - .collect::>(); + for mut old_buffer in buffer_versions { + let edits = buffer + .edits_since(old_buffer.version.clone()) + .collect::>(); - log::info!( - "mutating old buffer version {:?}, text: {:?}, edits since: {:?}", - old_buffer.version(), - old_buffer.text(), - edits, - ); + log::info!( + "mutating old buffer version {:?}, text: {:?}, edits since: {:?}", + old_buffer.version(), + old_buffer.text(), + edits, + ); - let mut delta = 0_isize; - for edit in edits { - let old_start = (edit.old_bytes.start as isize + delta) as usize; - let new_text: String = buffer.text_for_range(edit.new_bytes.clone()).collect(); - old_buffer.edit( - Some(old_start..old_start + edit.deleted_bytes()), - new_text, - cx, - ); - delta += edit.delta(); - } - assert_eq!(old_buffer.text(), buffer.text()); + let mut delta = 0_isize; + for edit in edits { + let old_start = (edit.old_bytes.start as isize + delta) as usize; + let new_text: String = buffer.text_for_range(edit.new_bytes.clone()).collect(); + old_buffer.edit(Some(old_start..old_start + edit.deleted_bytes()), new_text); + delta += edit.delta(); } - - buffer - }); + assert_eq!(old_buffer.text(), buffer.text()); + } } -#[gpui::test] -fn test_line_len(cx: &mut gpui::MutableAppContext) { - cx.add_model(|cx| { - let mut buffer = Buffer::new(0, "", cx); - buffer.edit(vec![0..0], "abcd\nefg\nhij", cx); - buffer.edit(vec![12..12], "kl\nmno", cx); - buffer.edit(vec![18..18], "\npqrs\n", cx); - buffer.edit(vec![18..21], "\nPQ", cx); - - assert_eq!(buffer.line_len(0), 4); - assert_eq!(buffer.line_len(1), 3); - assert_eq!(buffer.line_len(2), 5); - assert_eq!(buffer.line_len(3), 3); - assert_eq!(buffer.line_len(4), 4); - assert_eq!(buffer.line_len(5), 0); - buffer - }); +#[test] +fn test_line_len() { + let mut buffer = TextBuffer::new(0, 0, History::new("".into())); + buffer.edit(vec![0..0], "abcd\nefg\nhij"); + buffer.edit(vec![12..12], "kl\nmno"); + buffer.edit(vec![18..18], "\npqrs\n"); + buffer.edit(vec![18..21], "\nPQ"); + + assert_eq!(buffer.line_len(0), 4); + assert_eq!(buffer.line_len(1), 3); + assert_eq!(buffer.line_len(2), 5); + assert_eq!(buffer.line_len(3), 3); + assert_eq!(buffer.line_len(4), 4); + assert_eq!(buffer.line_len(5), 0); } -#[gpui::test] -fn test_text_summary_for_range(cx: &mut gpui::MutableAppContext) { - cx.add_model(|cx| { - let buffer = Buffer::new(0, "ab\nefg\nhklm\nnopqrs\ntuvwxyz", cx); - assert_eq!( - buffer.text_summary_for_range(1..3), - TextSummary { - bytes: 2, - lines: Point::new(1, 0), - first_line_chars: 1, - last_line_chars: 0, - longest_row: 0, - longest_row_chars: 1, - } - ); - assert_eq!( - buffer.text_summary_for_range(1..12), - TextSummary { - bytes: 11, - lines: Point::new(3, 0), - first_line_chars: 1, - last_line_chars: 0, - longest_row: 2, - longest_row_chars: 4, - } - ); - assert_eq!( - buffer.text_summary_for_range(0..20), - TextSummary { - bytes: 20, - lines: Point::new(4, 1), - first_line_chars: 2, - last_line_chars: 1, - longest_row: 3, - longest_row_chars: 6, - } - ); - assert_eq!( - buffer.text_summary_for_range(0..22), - TextSummary { - bytes: 22, - lines: Point::new(4, 3), - first_line_chars: 2, - last_line_chars: 3, - longest_row: 3, - longest_row_chars: 6, - } - ); - assert_eq!( - buffer.text_summary_for_range(7..22), - TextSummary { - bytes: 15, - lines: Point::new(2, 3), - first_line_chars: 4, - last_line_chars: 3, - longest_row: 1, - longest_row_chars: 6, - } - ); - buffer - }); +#[test] +fn test_text_summary_for_range() { + let buffer = TextBuffer::new(0, 0, History::new("ab\nefg\nhklm\nnopqrs\ntuvwxyz".into())); + assert_eq!( + buffer.text_summary_for_range(1..3), + TextSummary { + bytes: 2, + lines: Point::new(1, 0), + first_line_chars: 1, + last_line_chars: 0, + longest_row: 0, + longest_row_chars: 1, + } + ); + assert_eq!( + buffer.text_summary_for_range(1..12), + TextSummary { + bytes: 11, + lines: Point::new(3, 0), + first_line_chars: 1, + last_line_chars: 0, + longest_row: 2, + longest_row_chars: 4, + } + ); + assert_eq!( + buffer.text_summary_for_range(0..20), + TextSummary { + bytes: 20, + lines: Point::new(4, 1), + first_line_chars: 2, + last_line_chars: 1, + longest_row: 3, + longest_row_chars: 6, + } + ); + assert_eq!( + buffer.text_summary_for_range(0..22), + TextSummary { + bytes: 22, + lines: Point::new(4, 3), + first_line_chars: 2, + last_line_chars: 3, + longest_row: 3, + longest_row_chars: 6, + } + ); + assert_eq!( + buffer.text_summary_for_range(7..22), + TextSummary { + bytes: 15, + lines: Point::new(2, 3), + first_line_chars: 4, + last_line_chars: 3, + longest_row: 1, + longest_row_chars: 6, + } + ); } -#[gpui::test] -fn test_chars_at(cx: &mut gpui::MutableAppContext) { - cx.add_model(|cx| { - let mut buffer = Buffer::new(0, "", cx); - buffer.edit(vec![0..0], "abcd\nefgh\nij", cx); - buffer.edit(vec![12..12], "kl\nmno", cx); - buffer.edit(vec![18..18], "\npqrs", cx); - buffer.edit(vec![18..21], "\nPQ", cx); - - let chars = buffer.chars_at(Point::new(0, 0)); - assert_eq!(chars.collect::(), "abcd\nefgh\nijkl\nmno\nPQrs"); +#[test] +fn test_chars_at() { + let mut buffer = TextBuffer::new(0, 0, History::new("".into())); + buffer.edit(vec![0..0], "abcd\nefgh\nij"); + buffer.edit(vec![12..12], "kl\nmno"); + buffer.edit(vec![18..18], "\npqrs"); + buffer.edit(vec![18..21], "\nPQ"); - let chars = buffer.chars_at(Point::new(1, 0)); - assert_eq!(chars.collect::(), "efgh\nijkl\nmno\nPQrs"); + let chars = buffer.chars_at(Point::new(0, 0)); + assert_eq!(chars.collect::(), "abcd\nefgh\nijkl\nmno\nPQrs"); - let chars = buffer.chars_at(Point::new(2, 0)); - assert_eq!(chars.collect::(), "ijkl\nmno\nPQrs"); + let chars = buffer.chars_at(Point::new(1, 0)); + assert_eq!(chars.collect::(), "efgh\nijkl\nmno\nPQrs"); - let chars = buffer.chars_at(Point::new(3, 0)); - assert_eq!(chars.collect::(), "mno\nPQrs"); + let chars = buffer.chars_at(Point::new(2, 0)); + assert_eq!(chars.collect::(), "ijkl\nmno\nPQrs"); - let chars = buffer.chars_at(Point::new(4, 0)); - assert_eq!(chars.collect::(), "PQrs"); + let chars = buffer.chars_at(Point::new(3, 0)); + assert_eq!(chars.collect::(), "mno\nPQrs"); - // Regression test: - let mut buffer = Buffer::new(0, "", cx); - buffer.edit(vec![0..0], "[workspace]\nmembers = [\n \"xray_core\",\n \"xray_server\",\n \"xray_cli\",\n \"xray_wasm\",\n]\n", cx); - buffer.edit(vec![60..60], "\n", cx); + let chars = buffer.chars_at(Point::new(4, 0)); + assert_eq!(chars.collect::(), "PQrs"); - let chars = buffer.chars_at(Point::new(6, 0)); - assert_eq!(chars.collect::(), " \"xray_wasm\",\n]\n"); + // Regression test: + let mut buffer = TextBuffer::new(0, 0, History::new("".into())); + buffer.edit(vec![0..0], "[workspace]\nmembers = [\n \"xray_core\",\n \"xray_server\",\n \"xray_cli\",\n \"xray_wasm\",\n]\n"); + buffer.edit(vec![60..60], "\n"); - buffer - }); + let chars = buffer.chars_at(Point::new(6, 0)); + assert_eq!(chars.collect::(), " \"xray_wasm\",\n]\n"); } -#[gpui::test] -fn test_anchors(cx: &mut gpui::MutableAppContext) { - cx.add_model(|cx| { - let mut buffer = Buffer::new(0, "", cx); - buffer.edit(vec![0..0], "abc", cx); - let left_anchor = buffer.anchor_before(2); - let right_anchor = buffer.anchor_after(2); - - buffer.edit(vec![1..1], "def\n", cx); - assert_eq!(buffer.text(), "adef\nbc"); - assert_eq!(left_anchor.to_offset(&buffer), 6); - assert_eq!(right_anchor.to_offset(&buffer), 6); - assert_eq!(left_anchor.to_point(&buffer), Point { row: 1, column: 1 }); - assert_eq!(right_anchor.to_point(&buffer), Point { row: 1, column: 1 }); - - buffer.edit(vec![2..3], "", cx); - assert_eq!(buffer.text(), "adf\nbc"); - assert_eq!(left_anchor.to_offset(&buffer), 5); - assert_eq!(right_anchor.to_offset(&buffer), 5); - assert_eq!(left_anchor.to_point(&buffer), Point { row: 1, column: 1 }); - assert_eq!(right_anchor.to_point(&buffer), Point { row: 1, column: 1 }); - - buffer.edit(vec![5..5], "ghi\n", cx); - assert_eq!(buffer.text(), "adf\nbghi\nc"); - assert_eq!(left_anchor.to_offset(&buffer), 5); - assert_eq!(right_anchor.to_offset(&buffer), 9); - assert_eq!(left_anchor.to_point(&buffer), Point { row: 1, column: 1 }); - assert_eq!(right_anchor.to_point(&buffer), Point { row: 2, column: 0 }); - - buffer.edit(vec![7..9], "", cx); - assert_eq!(buffer.text(), "adf\nbghc"); - assert_eq!(left_anchor.to_offset(&buffer), 5); - assert_eq!(right_anchor.to_offset(&buffer), 7); - assert_eq!(left_anchor.to_point(&buffer), Point { row: 1, column: 1 },); - assert_eq!(right_anchor.to_point(&buffer), Point { row: 1, column: 3 }); - - // Ensure anchoring to a point is equivalent to anchoring to an offset. - assert_eq!( - buffer.anchor_before(Point { row: 0, column: 0 }), - buffer.anchor_before(0) - ); - assert_eq!( - buffer.anchor_before(Point { row: 0, column: 1 }), - buffer.anchor_before(1) - ); - assert_eq!( - buffer.anchor_before(Point { row: 0, column: 2 }), - buffer.anchor_before(2) - ); - assert_eq!( - buffer.anchor_before(Point { row: 0, column: 3 }), - buffer.anchor_before(3) - ); - assert_eq!( - buffer.anchor_before(Point { row: 1, column: 0 }), - buffer.anchor_before(4) - ); - assert_eq!( - buffer.anchor_before(Point { row: 1, column: 1 }), - buffer.anchor_before(5) - ); - assert_eq!( - buffer.anchor_before(Point { row: 1, column: 2 }), - buffer.anchor_before(6) - ); - assert_eq!( - buffer.anchor_before(Point { row: 1, column: 3 }), - buffer.anchor_before(7) - ); - assert_eq!( - buffer.anchor_before(Point { row: 1, column: 4 }), - buffer.anchor_before(8) - ); +#[test] +fn test_anchors() { + let mut buffer = TextBuffer::new(0, 0, History::new("".into())); + buffer.edit(vec![0..0], "abc"); + let left_anchor = buffer.anchor_before(2); + let right_anchor = buffer.anchor_after(2); + + buffer.edit(vec![1..1], "def\n"); + assert_eq!(buffer.text(), "adef\nbc"); + assert_eq!(left_anchor.to_offset(&buffer), 6); + assert_eq!(right_anchor.to_offset(&buffer), 6); + assert_eq!(left_anchor.to_point(&buffer), Point { row: 1, column: 1 }); + assert_eq!(right_anchor.to_point(&buffer), Point { row: 1, column: 1 }); + + buffer.edit(vec![2..3], ""); + assert_eq!(buffer.text(), "adf\nbc"); + assert_eq!(left_anchor.to_offset(&buffer), 5); + assert_eq!(right_anchor.to_offset(&buffer), 5); + assert_eq!(left_anchor.to_point(&buffer), Point { row: 1, column: 1 }); + assert_eq!(right_anchor.to_point(&buffer), Point { row: 1, column: 1 }); + + buffer.edit(vec![5..5], "ghi\n"); + assert_eq!(buffer.text(), "adf\nbghi\nc"); + assert_eq!(left_anchor.to_offset(&buffer), 5); + assert_eq!(right_anchor.to_offset(&buffer), 9); + assert_eq!(left_anchor.to_point(&buffer), Point { row: 1, column: 1 }); + assert_eq!(right_anchor.to_point(&buffer), Point { row: 2, column: 0 }); + + buffer.edit(vec![7..9], ""); + assert_eq!(buffer.text(), "adf\nbghc"); + assert_eq!(left_anchor.to_offset(&buffer), 5); + assert_eq!(right_anchor.to_offset(&buffer), 7); + assert_eq!(left_anchor.to_point(&buffer), Point { row: 1, column: 1 },); + assert_eq!(right_anchor.to_point(&buffer), Point { row: 1, column: 3 }); + + // Ensure anchoring to a point is equivalent to anchoring to an offset. + assert_eq!( + buffer.anchor_before(Point { row: 0, column: 0 }), + buffer.anchor_before(0) + ); + assert_eq!( + buffer.anchor_before(Point { row: 0, column: 1 }), + buffer.anchor_before(1) + ); + assert_eq!( + buffer.anchor_before(Point { row: 0, column: 2 }), + buffer.anchor_before(2) + ); + assert_eq!( + buffer.anchor_before(Point { row: 0, column: 3 }), + buffer.anchor_before(3) + ); + assert_eq!( + buffer.anchor_before(Point { row: 1, column: 0 }), + buffer.anchor_before(4) + ); + assert_eq!( + buffer.anchor_before(Point { row: 1, column: 1 }), + buffer.anchor_before(5) + ); + assert_eq!( + buffer.anchor_before(Point { row: 1, column: 2 }), + buffer.anchor_before(6) + ); + assert_eq!( + buffer.anchor_before(Point { row: 1, column: 3 }), + buffer.anchor_before(7) + ); + assert_eq!( + buffer.anchor_before(Point { row: 1, column: 4 }), + buffer.anchor_before(8) + ); - // Comparison between anchors. - let anchor_at_offset_0 = buffer.anchor_before(0); - let anchor_at_offset_1 = buffer.anchor_before(1); - let anchor_at_offset_2 = buffer.anchor_before(2); + // Comparison between anchors. + let anchor_at_offset_0 = buffer.anchor_before(0); + let anchor_at_offset_1 = buffer.anchor_before(1); + let anchor_at_offset_2 = buffer.anchor_before(2); - assert_eq!( - anchor_at_offset_0 - .cmp(&anchor_at_offset_0, &buffer) - .unwrap(), - Ordering::Equal - ); - assert_eq!( - anchor_at_offset_1 - .cmp(&anchor_at_offset_1, &buffer) - .unwrap(), - Ordering::Equal - ); - assert_eq!( - anchor_at_offset_2 - .cmp(&anchor_at_offset_2, &buffer) - .unwrap(), - Ordering::Equal - ); + assert_eq!( + anchor_at_offset_0 + .cmp(&anchor_at_offset_0, &buffer) + .unwrap(), + Ordering::Equal + ); + assert_eq!( + anchor_at_offset_1 + .cmp(&anchor_at_offset_1, &buffer) + .unwrap(), + Ordering::Equal + ); + assert_eq!( + anchor_at_offset_2 + .cmp(&anchor_at_offset_2, &buffer) + .unwrap(), + Ordering::Equal + ); - assert_eq!( - anchor_at_offset_0 - .cmp(&anchor_at_offset_1, &buffer) - .unwrap(), - Ordering::Less - ); - assert_eq!( - anchor_at_offset_1 - .cmp(&anchor_at_offset_2, &buffer) - .unwrap(), - Ordering::Less - ); - assert_eq!( - anchor_at_offset_0 - .cmp(&anchor_at_offset_2, &buffer) - .unwrap(), - Ordering::Less - ); + assert_eq!( + anchor_at_offset_0 + .cmp(&anchor_at_offset_1, &buffer) + .unwrap(), + Ordering::Less + ); + assert_eq!( + anchor_at_offset_1 + .cmp(&anchor_at_offset_2, &buffer) + .unwrap(), + Ordering::Less + ); + assert_eq!( + anchor_at_offset_0 + .cmp(&anchor_at_offset_2, &buffer) + .unwrap(), + Ordering::Less + ); - assert_eq!( - anchor_at_offset_1 - .cmp(&anchor_at_offset_0, &buffer) - .unwrap(), - Ordering::Greater - ); - assert_eq!( - anchor_at_offset_2 - .cmp(&anchor_at_offset_1, &buffer) - .unwrap(), - Ordering::Greater - ); - assert_eq!( - anchor_at_offset_2 - .cmp(&anchor_at_offset_0, &buffer) - .unwrap(), - Ordering::Greater - ); - buffer - }); + assert_eq!( + anchor_at_offset_1 + .cmp(&anchor_at_offset_0, &buffer) + .unwrap(), + Ordering::Greater + ); + assert_eq!( + anchor_at_offset_2 + .cmp(&anchor_at_offset_1, &buffer) + .unwrap(), + Ordering::Greater + ); + assert_eq!( + anchor_at_offset_2 + .cmp(&anchor_at_offset_0, &buffer) + .unwrap(), + Ordering::Greater + ); } -#[gpui::test] -fn test_anchors_at_start_and_end(cx: &mut gpui::MutableAppContext) { - cx.add_model(|cx| { - let mut buffer = Buffer::new(0, "", cx); - let before_start_anchor = buffer.anchor_before(0); - let after_end_anchor = buffer.anchor_after(0); - - buffer.edit(vec![0..0], "abc", cx); - assert_eq!(buffer.text(), "abc"); - assert_eq!(before_start_anchor.to_offset(&buffer), 0); - assert_eq!(after_end_anchor.to_offset(&buffer), 3); - - let after_start_anchor = buffer.anchor_after(0); - let before_end_anchor = buffer.anchor_before(3); - - buffer.edit(vec![3..3], "def", cx); - buffer.edit(vec![0..0], "ghi", cx); - assert_eq!(buffer.text(), "ghiabcdef"); - assert_eq!(before_start_anchor.to_offset(&buffer), 0); - assert_eq!(after_start_anchor.to_offset(&buffer), 3); - assert_eq!(before_end_anchor.to_offset(&buffer), 6); - assert_eq!(after_end_anchor.to_offset(&buffer), 9); - buffer - }); +#[test] +fn test_anchors_at_start_and_end() { + let mut buffer = TextBuffer::new(0, 0, History::new("".into())); + let before_start_anchor = buffer.anchor_before(0); + let after_end_anchor = buffer.anchor_after(0); + + buffer.edit(vec![0..0], "abc"); + assert_eq!(buffer.text(), "abc"); + assert_eq!(before_start_anchor.to_offset(&buffer), 0); + assert_eq!(after_end_anchor.to_offset(&buffer), 3); + + let after_start_anchor = buffer.anchor_after(0); + let before_end_anchor = buffer.anchor_before(3); + + buffer.edit(vec![3..3], "def"); + buffer.edit(vec![0..0], "ghi"); + assert_eq!(buffer.text(), "ghiabcdef"); + assert_eq!(before_start_anchor.to_offset(&buffer), 0); + assert_eq!(after_start_anchor.to_offset(&buffer), 3); + assert_eq!(before_end_anchor.to_offset(&buffer), 6); + assert_eq!(after_end_anchor.to_offset(&buffer), 9); } #[gpui::test] @@ -469,247 +442,221 @@ async fn test_apply_diff(mut cx: gpui::TestAppContext) { cx.read(|cx| assert_eq!(buffer.read(cx).text(), text)); } -#[gpui::test] -fn test_undo_redo(cx: &mut gpui::MutableAppContext) { - cx.add_model(|cx| { - let mut buffer = Buffer::new(0, "1234", cx); - // Set group interval to zero so as to not group edits in the undo stack. - buffer.history.group_interval = Duration::from_secs(0); - - buffer.edit(vec![1..1], "abx", cx); - buffer.edit(vec![3..4], "yzef", cx); - buffer.edit(vec![3..5], "cd", cx); - assert_eq!(buffer.text(), "1abcdef234"); - - let transactions = buffer.history.undo_stack.clone(); - assert_eq!(transactions.len(), 3); - - buffer.undo_or_redo(transactions[0].clone(), cx).unwrap(); - assert_eq!(buffer.text(), "1cdef234"); - buffer.undo_or_redo(transactions[0].clone(), cx).unwrap(); - assert_eq!(buffer.text(), "1abcdef234"); - - buffer.undo_or_redo(transactions[1].clone(), cx).unwrap(); - assert_eq!(buffer.text(), "1abcdx234"); - buffer.undo_or_redo(transactions[2].clone(), cx).unwrap(); - assert_eq!(buffer.text(), "1abx234"); - buffer.undo_or_redo(transactions[1].clone(), cx).unwrap(); - assert_eq!(buffer.text(), "1abyzef234"); - buffer.undo_or_redo(transactions[2].clone(), cx).unwrap(); - assert_eq!(buffer.text(), "1abcdef234"); - - buffer.undo_or_redo(transactions[2].clone(), cx).unwrap(); - assert_eq!(buffer.text(), "1abyzef234"); - buffer.undo_or_redo(transactions[0].clone(), cx).unwrap(); - assert_eq!(buffer.text(), "1yzef234"); - buffer.undo_or_redo(transactions[1].clone(), cx).unwrap(); - assert_eq!(buffer.text(), "1234"); - - buffer - }); +#[test] +fn test_undo_redo() { + let mut buffer = TextBuffer::new(0, 0, History::new("1234".into())); + // Set group interval to zero so as to not group edits in the undo stack. + buffer.history.group_interval = Duration::from_secs(0); + + buffer.edit(vec![1..1], "abx"); + buffer.edit(vec![3..4], "yzef"); + buffer.edit(vec![3..5], "cd"); + assert_eq!(buffer.text(), "1abcdef234"); + + let transactions = buffer.history.undo_stack.clone(); + assert_eq!(transactions.len(), 3); + + buffer.undo_or_redo(transactions[0].clone()).unwrap(); + assert_eq!(buffer.text(), "1cdef234"); + buffer.undo_or_redo(transactions[0].clone()).unwrap(); + assert_eq!(buffer.text(), "1abcdef234"); + + buffer.undo_or_redo(transactions[1].clone()).unwrap(); + assert_eq!(buffer.text(), "1abcdx234"); + buffer.undo_or_redo(transactions[2].clone()).unwrap(); + assert_eq!(buffer.text(), "1abx234"); + buffer.undo_or_redo(transactions[1].clone()).unwrap(); + assert_eq!(buffer.text(), "1abyzef234"); + buffer.undo_or_redo(transactions[2].clone()).unwrap(); + assert_eq!(buffer.text(), "1abcdef234"); + + buffer.undo_or_redo(transactions[2].clone()).unwrap(); + assert_eq!(buffer.text(), "1abyzef234"); + buffer.undo_or_redo(transactions[0].clone()).unwrap(); + assert_eq!(buffer.text(), "1yzef234"); + buffer.undo_or_redo(transactions[1].clone()).unwrap(); + assert_eq!(buffer.text(), "1234"); } -#[gpui::test] -fn test_history(cx: &mut gpui::MutableAppContext) { - cx.add_model(|cx| { - let mut now = Instant::now(); - let mut buffer = Buffer::new(0, "123456", cx); - - let set_id = - buffer.add_selection_set(buffer.selections_from_ranges(vec![4..4]).unwrap(), cx); - buffer.start_transaction_at(Some(set_id), now).unwrap(); - buffer.edit(vec![2..4], "cd", cx); - buffer.end_transaction_at(Some(set_id), now, cx).unwrap(); - assert_eq!(buffer.text(), "12cd56"); - assert_eq!(buffer.selection_ranges(set_id).unwrap(), vec![4..4]); - - buffer.start_transaction_at(Some(set_id), now).unwrap(); - buffer - .update_selection_set( - set_id, - buffer.selections_from_ranges(vec![1..3]).unwrap(), - cx, - ) - .unwrap(); - buffer.edit(vec![4..5], "e", cx); - buffer.end_transaction_at(Some(set_id), now, cx).unwrap(); - assert_eq!(buffer.text(), "12cde6"); - assert_eq!(buffer.selection_ranges(set_id).unwrap(), vec![1..3]); - - now += buffer.history.group_interval + Duration::from_millis(1); - buffer.start_transaction_at(Some(set_id), now).unwrap(); - buffer - .update_selection_set( - set_id, - buffer.selections_from_ranges(vec![2..2]).unwrap(), - cx, - ) - .unwrap(); - buffer.edit(vec![0..1], "a", cx); - buffer.edit(vec![1..1], "b", cx); - buffer.end_transaction_at(Some(set_id), now, cx).unwrap(); - assert_eq!(buffer.text(), "ab2cde6"); - assert_eq!(buffer.selection_ranges(set_id).unwrap(), vec![3..3]); - - // Last transaction happened past the group interval, undo it on its - // own. - buffer.undo(cx); - assert_eq!(buffer.text(), "12cde6"); - assert_eq!(buffer.selection_ranges(set_id).unwrap(), vec![1..3]); - - // First two transactions happened within the group interval, undo them - // together. - buffer.undo(cx); - assert_eq!(buffer.text(), "123456"); - assert_eq!(buffer.selection_ranges(set_id).unwrap(), vec![4..4]); - - // Redo the first two transactions together. - buffer.redo(cx); - assert_eq!(buffer.text(), "12cde6"); - assert_eq!(buffer.selection_ranges(set_id).unwrap(), vec![1..3]); - - // Redo the last transaction on its own. - buffer.redo(cx); - assert_eq!(buffer.text(), "ab2cde6"); - assert_eq!(buffer.selection_ranges(set_id).unwrap(), vec![3..3]); - - buffer.start_transaction_at(None, now).unwrap(); - buffer.end_transaction_at(None, now, cx).unwrap(); - buffer.undo(cx); - assert_eq!(buffer.text(), "12cde6"); - - buffer - }); +#[test] +fn test_history() { + let mut now = Instant::now(); + let mut buffer = TextBuffer::new(0, 0, History::new("123456".into())); + + let set_id = if let Operation::UpdateSelections { set_id, .. } = + buffer.add_selection_set(buffer.selections_from_ranges(vec![4..4]).unwrap()) + { + set_id + } else { + unreachable!() + }; + buffer.start_transaction_at(Some(set_id), now).unwrap(); + buffer.edit(vec![2..4], "cd"); + buffer.end_transaction_at(Some(set_id), now).unwrap(); + assert_eq!(buffer.text(), "12cd56"); + assert_eq!(buffer.selection_ranges(set_id).unwrap(), vec![4..4]); + + buffer.start_transaction_at(Some(set_id), now).unwrap(); + buffer + .update_selection_set(set_id, buffer.selections_from_ranges(vec![1..3]).unwrap()) + .unwrap(); + buffer.edit(vec![4..5], "e"); + buffer.end_transaction_at(Some(set_id), now).unwrap(); + assert_eq!(buffer.text(), "12cde6"); + assert_eq!(buffer.selection_ranges(set_id).unwrap(), vec![1..3]); + + now += buffer.history.group_interval + Duration::from_millis(1); + buffer.start_transaction_at(Some(set_id), now).unwrap(); + buffer + .update_selection_set(set_id, buffer.selections_from_ranges(vec![2..2]).unwrap()) + .unwrap(); + buffer.edit(vec![0..1], "a"); + buffer.edit(vec![1..1], "b"); + buffer.end_transaction_at(Some(set_id), now).unwrap(); + assert_eq!(buffer.text(), "ab2cde6"); + assert_eq!(buffer.selection_ranges(set_id).unwrap(), vec![3..3]); + + // Last transaction happened past the group interval, undo it on its + // own. + buffer.undo(); + assert_eq!(buffer.text(), "12cde6"); + assert_eq!(buffer.selection_ranges(set_id).unwrap(), vec![1..3]); + + // First two transactions happened within the group interval, undo them + // together. + buffer.undo(); + assert_eq!(buffer.text(), "123456"); + assert_eq!(buffer.selection_ranges(set_id).unwrap(), vec![4..4]); + + // Redo the first two transactions together. + buffer.redo(); + assert_eq!(buffer.text(), "12cde6"); + assert_eq!(buffer.selection_ranges(set_id).unwrap(), vec![1..3]); + + // Redo the last transaction on its own. + buffer.redo(); + assert_eq!(buffer.text(), "ab2cde6"); + assert_eq!(buffer.selection_ranges(set_id).unwrap(), vec![3..3]); + + buffer.start_transaction_at(None, now).unwrap(); + assert!(buffer.end_transaction_at(None, now).is_none()); + buffer.undo(); + assert_eq!(buffer.text(), "12cde6"); } -#[gpui::test] -fn test_concurrent_edits(cx: &mut gpui::MutableAppContext) { +#[test] +fn test_concurrent_edits() { let text = "abcdef"; - let buffer1 = cx.add_model(|cx| Buffer::new(1, text, cx)); - let buffer2 = cx.add_model(|cx| Buffer::new(2, text, cx)); - let buffer3 = cx.add_model(|cx| Buffer::new(3, text, cx)); - - let buf1_op = buffer1.update(cx, |buffer, cx| { - buffer.edit(vec![1..2], "12", cx); - assert_eq!(buffer.text(), "a12cdef"); - buffer.operations.last().unwrap().clone() - }); - let buf2_op = buffer2.update(cx, |buffer, cx| { - buffer.edit(vec![3..4], "34", cx); - assert_eq!(buffer.text(), "abc34ef"); - buffer.operations.last().unwrap().clone() - }); - let buf3_op = buffer3.update(cx, |buffer, cx| { - buffer.edit(vec![5..6], "56", cx); - assert_eq!(buffer.text(), "abcde56"); - buffer.operations.last().unwrap().clone() - }); - - buffer1.update(cx, |buffer, _| { - buffer.apply_op(buf2_op.clone()).unwrap(); - buffer.apply_op(buf3_op.clone()).unwrap(); - }); - buffer2.update(cx, |buffer, _| { - buffer.apply_op(buf1_op.clone()).unwrap(); - buffer.apply_op(buf3_op.clone()).unwrap(); - }); - buffer3.update(cx, |buffer, _| { - buffer.apply_op(buf1_op.clone()).unwrap(); - buffer.apply_op(buf2_op.clone()).unwrap(); - }); - - assert_eq!(buffer1.read(cx).text(), "a12c34e56"); - assert_eq!(buffer2.read(cx).text(), "a12c34e56"); - assert_eq!(buffer3.read(cx).text(), "a12c34e56"); + let mut buffer1 = TextBuffer::new(1, 0, History::new(text.into())); + let mut buffer2 = TextBuffer::new(2, 0, History::new(text.into())); + let mut buffer3 = TextBuffer::new(3, 0, History::new(text.into())); + + let buf1_op = buffer1.edit(vec![1..2], "12"); + assert_eq!(buffer1.text(), "a12cdef"); + let buf2_op = buffer2.edit(vec![3..4], "34"); + assert_eq!(buffer2.text(), "abc34ef"); + let buf3_op = buffer3.edit(vec![5..6], "56"); + assert_eq!(buffer3.text(), "abcde56"); + + buffer1.apply_op(Operation::Edit(buf2_op.clone())).unwrap(); + buffer1.apply_op(Operation::Edit(buf3_op.clone())).unwrap(); + buffer2.apply_op(Operation::Edit(buf1_op.clone())).unwrap(); + buffer2.apply_op(Operation::Edit(buf3_op.clone())).unwrap(); + buffer3.apply_op(Operation::Edit(buf1_op.clone())).unwrap(); + buffer3.apply_op(Operation::Edit(buf2_op.clone())).unwrap(); + + assert_eq!(buffer1.text(), "a12c34e56"); + assert_eq!(buffer2.text(), "a12c34e56"); + assert_eq!(buffer3.text(), "a12c34e56"); } -#[gpui::test(iterations = 100)] -fn test_random_concurrent_edits(cx: &mut gpui::MutableAppContext, mut rng: StdRng) { - let peers = env::var("PEERS") - .map(|i| i.parse().expect("invalid `PEERS` variable")) - .unwrap_or(5); - let operations = env::var("OPERATIONS") - .map(|i| i.parse().expect("invalid `OPERATIONS` variable")) - .unwrap_or(10); - - let base_text_len = rng.gen_range(0..10); - let base_text = RandomCharIter::new(&mut rng) - .take(base_text_len) - .collect::(); - let mut replica_ids = Vec::new(); - let mut buffers = Vec::new(); - let mut network = Network::new(rng.clone()); - - for i in 0..peers { - let buffer = cx.add_model(|cx| { - let mut buf = Buffer::new(i as ReplicaId, base_text.as_str(), cx); - buf.history.group_interval = Duration::from_millis(rng.gen_range(0..=200)); - buf - }); - buffers.push(buffer); - replica_ids.push(i as u16); - network.add_peer(i as u16); - } - - log::info!("initial text: {:?}", base_text); - - let mut mutation_count = operations; - loop { - let replica_index = rng.gen_range(0..peers); - let replica_id = replica_ids[replica_index]; - buffers[replica_index].update(cx, |buffer, cx| match rng.gen_range(0..=100) { - 0..=50 if mutation_count != 0 => { - buffer.randomly_mutate(&mut rng, cx); - network.broadcast(buffer.replica_id, mem::take(&mut buffer.operations)); - log::info!("buffer {} text: {:?}", buffer.replica_id, buffer.text()); - mutation_count -= 1; - } - 51..=70 if mutation_count != 0 => { - buffer.randomly_undo_redo(&mut rng, cx); - network.broadcast(buffer.replica_id, mem::take(&mut buffer.operations)); - mutation_count -= 1; - } - 71..=100 if network.has_unreceived(replica_id) => { - let ops = network.receive(replica_id); - if !ops.is_empty() { - log::info!( - "peer {} applying {} ops from the network.", - replica_id, - ops.len() - ); - buffer.apply_ops(ops, cx).unwrap(); - } - } - _ => {} - }); - - if mutation_count == 0 && network.is_idle() { - break; - } - } - - let first_buffer = buffers[0].read(cx); - for buffer in &buffers[1..] { - let buffer = buffer.read(cx); - assert_eq!( - buffer.text(), - first_buffer.text(), - "Replica {} text != Replica 0 text", - buffer.replica_id - ); - assert_eq!( - buffer.selection_sets().collect::>(), - first_buffer.selection_sets().collect::>() - ); - assert_eq!( - buffer.all_selection_ranges().collect::>(), - first_buffer - .all_selection_ranges() - .collect::>() - ); - } -} +// #[gpui::test(iterations = 100)] +// fn test_random_concurrent_edits(cx: &mut gpui::MutableAppContext, mut rng: StdRng) { +// let peers = env::var("PEERS") +// .map(|i| i.parse().expect("invalid `PEERS` variable")) +// .unwrap_or(5); +// let operations = env::var("OPERATIONS") +// .map(|i| i.parse().expect("invalid `OPERATIONS` variable")) +// .unwrap_or(10); + +// let base_text_len = rng.gen_range(0..10); +// let base_text = RandomCharIter::new(&mut rng) +// .take(base_text_len) +// .collect::(); +// let mut replica_ids = Vec::new(); +// let mut buffers = Vec::new(); +// let mut network = Network::new(rng.clone()); + +// for i in 0..peers { +// let buffer = cx.add_model(|cx| { +// let mut buf = Buffer::new(i as ReplicaId, base_text.as_str(), cx); +// buf.history.group_interval = Duration::from_millis(rng.gen_range(0..=200)); +// buf +// }); +// buffers.push(buffer); +// replica_ids.push(i as u16); +// network.add_peer(i as u16); +// } + +// log::info!("initial text: {:?}", base_text); + +// let mut mutation_count = operations; +// loop { +// let replica_index = rng.gen_range(0..peers); +// let replica_id = replica_ids[replica_index]; +// buffers[replica_index].update(cx, |buffer, cx| match rng.gen_range(0..=100) { +// 0..=50 if mutation_count != 0 => { +// buffer.randomly_mutate(&mut rng, cx); +// network.broadcast(buffer.replica_id, mem::take(&mut buffer.operations)); +// log::info!("buffer {} text: {:?}", buffer.replica_id, buffer.text()); +// mutation_count -= 1; +// } +// 51..=70 if mutation_count != 0 => { +// buffer.randomly_undo_redo(&mut rng, cx); +// network.broadcast(buffer.replica_id, mem::take(&mut buffer.operations)); +// mutation_count -= 1; +// } +// 71..=100 if network.has_unreceived(replica_id) => { +// let ops = network.receive(replica_id); +// if !ops.is_empty() { +// log::info!( +// "peer {} applying {} ops from the network.", +// replica_id, +// ops.len() +// ); +// buffer.apply_ops(ops, cx).unwrap(); +// } +// } +// _ => {} +// }); + +// if mutation_count == 0 && network.is_idle() { +// break; +// } +// } + +// let first_buffer = buffers[0].read(cx); +// for buffer in &buffers[1..] { +// let buffer = buffer.read(cx); +// assert_eq!( +// buffer.text(), +// first_buffer.text(), +// "Replica {} text != Replica 0 text", +// buffer.replica_id +// ); +// assert_eq!( +// buffer.selection_sets().collect::>(), +// first_buffer.selection_sets().collect::>() +// ); +// assert_eq!( +// buffer.all_selection_ranges().collect::>(), +// first_buffer +// .all_selection_ranges() +// .collect::>() +// ); +// } +// } #[derive(Clone)] struct Envelope { diff --git a/crates/buffer/src/tests/syntax.rs b/crates/buffer/src/tests/syntax.rs index 4b897dd9427e29281fe578550afc5d34e5484837..e61f8ffd5e7687dfbc9fa66945b7e1256261fd6b 100644 --- a/crates/buffer/src/tests/syntax.rs +++ b/crates/buffer/src/tests/syntax.rs @@ -351,6 +351,19 @@ fn test_contiguous_ranges() { ); } +impl Buffer { + pub fn enclosing_bracket_point_ranges( + &self, + range: Range, + ) -> Option<(Range, Range)> { + self.enclosing_bracket_ranges(range).map(|(start, end)| { + let point_start = start.start.to_point(self)..start.end.to_point(self); + let point_end = end.start.to_point(self)..end.end.to_point(self); + (point_start, point_end) + }) + } +} + fn rust_lang() -> Arc { Arc::new( Language::new( diff --git a/crates/editor/src/display_map.rs b/crates/editor/src/display_map.rs index d9655d9a9c13386413ec83ce2cfcb56afafaf07d..209d63940779233f8025c231c5073f9fac272a56 100644 --- a/crates/editor/src/display_map.rs +++ b/crates/editor/src/display_map.rs @@ -436,7 +436,7 @@ mod tests { } } _ => { - buffer.update(&mut cx, |buffer, cx| buffer.randomly_mutate(&mut rng, cx)); + buffer.update(&mut cx, |buffer, cx| buffer.randomly_edit(&mut rng, 5, cx)); } } From cdb268e656bcedc3c0df3f241aa841a01b56b8c4 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Wed, 20 Oct 2021 21:44:26 +0200 Subject: [PATCH 2/7] Re-enable randomized concurrent edits test --- crates/buffer/src/lib.rs | 46 +++--- crates/buffer/src/tests/buffer.rs | 172 +++++++++++----------- crates/editor/src/display_map.rs | 2 +- crates/editor/src/display_map/fold_map.rs | 4 +- crates/editor/src/display_map/wrap_map.rs | 2 +- 5 files changed, 108 insertions(+), 118 deletions(-) diff --git a/crates/buffer/src/lib.rs b/crates/buffer/src/lib.rs index ad3a3194219039deee82b4945d47e0f60f2dcaff..c8d9c80d5ebeaa35256c2dee0274dc3a25709b2a 100644 --- a/crates/buffer/src/lib.rs +++ b/crates/buffer/src/lib.rs @@ -2414,27 +2414,18 @@ impl Buffer { #[cfg(any(test, feature = "test-support"))] impl Buffer { - pub fn randomly_edit( - &mut self, - rng: &mut T, - old_range_count: usize, - _: &mut ModelContext, - ) -> (Vec>, String) + pub fn randomly_edit(&mut self, rng: &mut T, old_range_count: usize) where T: rand::Rng, { - self.buffer.randomly_edit(rng, old_range_count) + self.buffer.randomly_edit(rng, old_range_count); } - pub fn randomly_mutate( - &mut self, - rng: &mut T, - _: &mut ModelContext, - ) -> (Vec>, String) + pub fn randomly_mutate(&mut self, rng: &mut T) where T: rand::Rng, { - self.buffer.randomly_mutate(rng) + self.buffer.randomly_mutate(rng); } } @@ -2450,7 +2441,7 @@ impl TextBuffer { &mut self, rng: &mut T, old_range_count: usize, - ) -> (Vec>, String) + ) -> (Vec>, String, Operation) where T: rand::Rng, { @@ -2472,17 +2463,17 @@ impl TextBuffer { old_ranges, new_text ); - self.edit(old_ranges.iter().cloned(), new_text.as_str()); - (old_ranges, new_text) + let op = self.edit(old_ranges.iter().cloned(), new_text.as_str()); + (old_ranges, new_text, Operation::Edit(op)) } - pub fn randomly_mutate(&mut self, rng: &mut T) -> (Vec>, String) + pub fn randomly_mutate(&mut self, rng: &mut T) -> Vec where T: rand::Rng, { use rand::prelude::*; - let (old_ranges, new_text) = self.randomly_edit(rng, 5); + let mut ops = vec![self.randomly_edit(rng, 5).2]; // Randomly add, remove or mutate selection sets. let replica_selection_sets = &self @@ -2492,7 +2483,7 @@ impl TextBuffer { .collect::>(); let set_id = replica_selection_sets.choose(rng); if set_id.is_some() && rng.gen_bool(1.0 / 6.0) { - self.remove_selection_set(*set_id.unwrap()).unwrap(); + ops.push(self.remove_selection_set(*set_id.unwrap()).unwrap()); } else { let mut ranges = Vec::new(); for _ in 0..5 { @@ -2500,20 +2491,22 @@ impl TextBuffer { } let new_selections = self.selections_from_ranges(ranges).unwrap(); - if set_id.is_none() || rng.gen_bool(1.0 / 5.0) { - self.add_selection_set(new_selections); + let op = if set_id.is_none() || rng.gen_bool(1.0 / 5.0) { + self.add_selection_set(new_selections) } else { self.update_selection_set(*set_id.unwrap(), new_selections) - .unwrap(); - } + .unwrap() + }; + ops.push(op); } - (old_ranges, new_text) + ops } - pub fn randomly_undo_redo(&mut self, rng: &mut impl rand::Rng) { + pub fn randomly_undo_redo(&mut self, rng: &mut impl rand::Rng) -> Vec { use rand::prelude::*; + let mut ops = Vec::new(); for _ in 0..rng.gen_range(1..=5) { if let Some(transaction) = self.history.undo_stack.choose(rng).cloned() { log::info!( @@ -2521,9 +2514,10 @@ impl TextBuffer { self.replica_id, transaction ); - self.undo_or_redo(transaction).unwrap(); + ops.push(self.undo_or_redo(transaction).unwrap()); } } + ops } fn selections_from_ranges(&self, ranges: I) -> Result> diff --git a/crates/buffer/src/tests/buffer.rs b/crates/buffer/src/tests/buffer.rs index 74c27e99de72ff1c9ecb232d5e3c3cc6f038b3c7..f0c9051daa6b572d5226f7c2cb9e0cb7788c7142 100644 --- a/crates/buffer/src/tests/buffer.rs +++ b/crates/buffer/src/tests/buffer.rs @@ -6,7 +6,6 @@ use std::{ cmp::Ordering, env, iter::Iterator, - mem, rc::Rc, time::{Duration, Instant}, }; @@ -104,7 +103,7 @@ fn test_random_edits(mut rng: StdRng) { ); for _i in 0..operations { - let (old_ranges, new_text) = buffer.randomly_mutate(&mut rng); + let (old_ranges, new_text, _) = buffer.randomly_edit(&mut rng, 5); for old_range in old_ranges.iter().rev() { reference_string.replace_range(old_range.clone(), &new_text); } @@ -571,92 +570,89 @@ fn test_concurrent_edits() { assert_eq!(buffer3.text(), "a12c34e56"); } -// #[gpui::test(iterations = 100)] -// fn test_random_concurrent_edits(cx: &mut gpui::MutableAppContext, mut rng: StdRng) { -// let peers = env::var("PEERS") -// .map(|i| i.parse().expect("invalid `PEERS` variable")) -// .unwrap_or(5); -// let operations = env::var("OPERATIONS") -// .map(|i| i.parse().expect("invalid `OPERATIONS` variable")) -// .unwrap_or(10); - -// let base_text_len = rng.gen_range(0..10); -// let base_text = RandomCharIter::new(&mut rng) -// .take(base_text_len) -// .collect::(); -// let mut replica_ids = Vec::new(); -// let mut buffers = Vec::new(); -// let mut network = Network::new(rng.clone()); - -// for i in 0..peers { -// let buffer = cx.add_model(|cx| { -// let mut buf = Buffer::new(i as ReplicaId, base_text.as_str(), cx); -// buf.history.group_interval = Duration::from_millis(rng.gen_range(0..=200)); -// buf -// }); -// buffers.push(buffer); -// replica_ids.push(i as u16); -// network.add_peer(i as u16); -// } - -// log::info!("initial text: {:?}", base_text); - -// let mut mutation_count = operations; -// loop { -// let replica_index = rng.gen_range(0..peers); -// let replica_id = replica_ids[replica_index]; -// buffers[replica_index].update(cx, |buffer, cx| match rng.gen_range(0..=100) { -// 0..=50 if mutation_count != 0 => { -// buffer.randomly_mutate(&mut rng, cx); -// network.broadcast(buffer.replica_id, mem::take(&mut buffer.operations)); -// log::info!("buffer {} text: {:?}", buffer.replica_id, buffer.text()); -// mutation_count -= 1; -// } -// 51..=70 if mutation_count != 0 => { -// buffer.randomly_undo_redo(&mut rng, cx); -// network.broadcast(buffer.replica_id, mem::take(&mut buffer.operations)); -// mutation_count -= 1; -// } -// 71..=100 if network.has_unreceived(replica_id) => { -// let ops = network.receive(replica_id); -// if !ops.is_empty() { -// log::info!( -// "peer {} applying {} ops from the network.", -// replica_id, -// ops.len() -// ); -// buffer.apply_ops(ops, cx).unwrap(); -// } -// } -// _ => {} -// }); - -// if mutation_count == 0 && network.is_idle() { -// break; -// } -// } - -// let first_buffer = buffers[0].read(cx); -// for buffer in &buffers[1..] { -// let buffer = buffer.read(cx); -// assert_eq!( -// buffer.text(), -// first_buffer.text(), -// "Replica {} text != Replica 0 text", -// buffer.replica_id -// ); -// assert_eq!( -// buffer.selection_sets().collect::>(), -// first_buffer.selection_sets().collect::>() -// ); -// assert_eq!( -// buffer.all_selection_ranges().collect::>(), -// first_buffer -// .all_selection_ranges() -// .collect::>() -// ); -// } -// } +#[gpui::test(iterations = 100)] +fn test_random_concurrent_edits(mut rng: StdRng) { + let peers = env::var("PEERS") + .map(|i| i.parse().expect("invalid `PEERS` variable")) + .unwrap_or(5); + let operations = env::var("OPERATIONS") + .map(|i| i.parse().expect("invalid `OPERATIONS` variable")) + .unwrap_or(10); + + let base_text_len = rng.gen_range(0..10); + let base_text = RandomCharIter::new(&mut rng) + .take(base_text_len) + .collect::(); + let mut replica_ids = Vec::new(); + let mut buffers = Vec::new(); + let mut network = Network::new(rng.clone()); + + for i in 0..peers { + let mut buffer = TextBuffer::new(i as ReplicaId, 0, History::new(base_text.clone().into())); + buffer.history.group_interval = Duration::from_millis(rng.gen_range(0..=200)); + buffers.push(buffer); + replica_ids.push(i as u16); + network.add_peer(i as u16); + } + + log::info!("initial text: {:?}", base_text); + + let mut mutation_count = operations; + loop { + let replica_index = rng.gen_range(0..peers); + let replica_id = replica_ids[replica_index]; + let buffer = &mut buffers[replica_index]; + match rng.gen_range(0..=100) { + 0..=50 if mutation_count != 0 => { + let ops = buffer.randomly_mutate(&mut rng); + network.broadcast(buffer.replica_id, ops); + log::info!("buffer {} text: {:?}", buffer.replica_id, buffer.text()); + mutation_count -= 1; + } + 51..=70 if mutation_count != 0 => { + let ops = buffer.randomly_undo_redo(&mut rng); + network.broadcast(buffer.replica_id, ops); + mutation_count -= 1; + } + 71..=100 if network.has_unreceived(replica_id) => { + let ops = network.receive(replica_id); + if !ops.is_empty() { + log::info!( + "peer {} applying {} ops from the network.", + replica_id, + ops.len() + ); + buffer.apply_ops(ops).unwrap(); + } + } + _ => {} + } + + if mutation_count == 0 && network.is_idle() { + break; + } + } + + let first_buffer = &buffers[0]; + for buffer in &buffers[1..] { + assert_eq!( + buffer.text(), + first_buffer.text(), + "Replica {} text != Replica 0 text", + buffer.replica_id + ); + assert_eq!( + buffer.selection_sets().collect::>(), + first_buffer.selection_sets().collect::>() + ); + assert_eq!( + buffer.all_selection_ranges().collect::>(), + first_buffer + .all_selection_ranges() + .collect::>() + ); + } +} #[derive(Clone)] struct Envelope { diff --git a/crates/editor/src/display_map.rs b/crates/editor/src/display_map.rs index 209d63940779233f8025c231c5073f9fac272a56..dd348b6e46e25fb9336f933ca6ec41606950556c 100644 --- a/crates/editor/src/display_map.rs +++ b/crates/editor/src/display_map.rs @@ -436,7 +436,7 @@ mod tests { } } _ => { - buffer.update(&mut cx, |buffer, cx| buffer.randomly_edit(&mut rng, 5, cx)); + buffer.update(&mut cx, |buffer, cx| buffer.randomly_edit(&mut rng, 5)); } } diff --git a/crates/editor/src/display_map/fold_map.rs b/crates/editor/src/display_map/fold_map.rs index 3dc671e59d49db3eefb1c6b9a5e6290041b14e0c..0a1e01e0c06ec942c9486a042c23b3f54a6c5359 100644 --- a/crates/editor/src/display_map/fold_map.rs +++ b/crates/editor/src/display_map/fold_map.rs @@ -1,4 +1,4 @@ -use buffer::{Anchor, Buffer, Point, ToOffset, AnchorRangeExt, HighlightId, TextSummary}; +use buffer::{Anchor, AnchorRangeExt, Buffer, HighlightId, Point, TextSummary, ToOffset}; use gpui::{AppContext, ModelHandle}; use parking_lot::Mutex; use std::{ @@ -1334,7 +1334,7 @@ mod tests { let edits = buffer.update(cx, |buffer, cx| { let start_version = buffer.version.clone(); let edit_count = rng.gen_range(1..=5); - buffer.randomly_edit(&mut rng, edit_count, cx); + buffer.randomly_edit(&mut rng, edit_count); buffer.edits_since(start_version).collect::>() }); log::info!("editing {:?}", edits); diff --git a/crates/editor/src/display_map/wrap_map.rs b/crates/editor/src/display_map/wrap_map.rs index fa26685a6501a2616cacdd539f52fb8414bf8242..884657f5024b68be989a761c1730cfb9ab5242d2 100644 --- a/crates/editor/src/display_map/wrap_map.rs +++ b/crates/editor/src/display_map/wrap_map.rs @@ -990,7 +990,7 @@ mod tests { } } _ => { - buffer.update(&mut cx, |buffer, cx| buffer.randomly_mutate(&mut rng, cx)); + buffer.update(&mut cx, |buffer, cx| buffer.randomly_mutate(&mut rng)); } } From 81a85e9c792dfdb5e533e4955341900ce93f1cab Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Wed, 20 Oct 2021 22:51:40 +0200 Subject: [PATCH 3/7] Extract a language crate --- Cargo.lock | 30 + crates/buffer/src/anchor.rs | 6 +- crates/buffer/src/lib.rs | 1751 ++--------------- crates/buffer/src/selection.rs | 8 +- crates/buffer/src/tests.rs | 660 ++++++- crates/buffer/src/tests/buffer.rs | 733 ------- crates/editor/Cargo.toml | 2 + crates/editor/src/display_map.rs | 8 +- crates/editor/src/display_map/fold_map.rs | 8 +- crates/editor/src/display_map/tab_map.rs | 2 +- crates/editor/src/display_map/wrap_map.rs | 6 +- crates/editor/src/element.rs | 4 +- crates/editor/src/lib.rs | 18 +- crates/language/Cargo.toml | 35 + .../{buffer => language}/src/highlight_map.rs | 0 crates/{buffer => language}/src/language.rs | 0 crates/language/src/lib.rs | 1471 ++++++++++++++ .../tests/syntax.rs => language/src/tests.rs} | 76 +- crates/project/Cargo.toml | 2 + crates/project/src/lib.rs | 4 +- crates/project/src/worktree.rs | 32 +- crates/server/src/rpc.rs | 2 +- crates/workspace/Cargo.toml | 3 +- crates/workspace/src/items.rs | 2 +- crates/workspace/src/lib.rs | 6 +- crates/zed/Cargo.toml | 3 + crates/zed/src/language.rs | 2 +- crates/zed/src/lib.rs | 3 +- crates/zed/src/test.rs | 2 +- 29 files changed, 2492 insertions(+), 2387 deletions(-) delete mode 100644 crates/buffer/src/tests/buffer.rs create mode 100644 crates/language/Cargo.toml rename crates/{buffer => language}/src/highlight_map.rs (100%) rename crates/{buffer => language}/src/language.rs (100%) create mode 100644 crates/language/src/lib.rs rename crates/{buffer/src/tests/syntax.rs => language/src/tests.rs} (81%) diff --git a/Cargo.lock b/Cargo.lock index 0cee1f91d7813a6c6aefc1e3ad3e46239dae5187..07a6c0a1a2e7a6fa399ee22dee16fec0bdcb7045 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1624,6 +1624,7 @@ dependencies = [ "buffer", "clock", "gpui", + "language", "lazy_static", "log", "parking_lot", @@ -2816,6 +2817,32 @@ dependencies = [ "log", ] +[[package]] +name = "language" +version = "0.1.0" +dependencies = [ + "anyhow", + "arrayvec 0.7.1", + "buffer", + "clock", + "gpui", + "lazy_static", + "log", + "parking_lot", + "rand 0.8.3", + "rpc", + "seahash", + "serde 1.0.125", + "similar", + "smallvec", + "smol", + "sum_tree", + "theme", + "tree-sitter", + "tree-sitter-rust", + "unindent", +] + [[package]] name = "lazy_static" version = "1.4.0" @@ -3760,6 +3787,7 @@ dependencies = [ "fuzzy", "gpui", "ignore", + "language", "lazy_static", "libc", "log", @@ -6106,6 +6134,7 @@ dependencies = [ "client", "editor", "gpui", + "language", "log", "postage", "project", @@ -6176,6 +6205,7 @@ dependencies = [ "ignore", "image 0.23.14", "indexmap", + "language", "lazy_static", "libc", "log", diff --git a/crates/buffer/src/anchor.rs b/crates/buffer/src/anchor.rs index 1ac82727df7485bb6d098a66b251ecb465cc1cc6..0a0d63d949cf254d4e9c74aca8615a1e393e5e1b 100644 --- a/crates/buffer/src/anchor.rs +++ b/crates/buffer/src/anchor.rs @@ -1,6 +1,6 @@ use crate::Point; -use super::{Buffer, Content}; +use super::{Content, TextBuffer}; use anyhow::Result; use std::{cmp::Ordering, ops::Range}; use sum_tree::Bias; @@ -65,7 +65,7 @@ impl Anchor { Ok(offset_comparison.then_with(|| self.bias.cmp(&other.bias))) } - pub fn bias_left(&self, buffer: &Buffer) -> Anchor { + pub fn bias_left(&self, buffer: &TextBuffer) -> Anchor { if self.bias == Bias::Left { self.clone() } else { @@ -73,7 +73,7 @@ impl Anchor { } } - pub fn bias_right(&self, buffer: &Buffer) -> Anchor { + pub fn bias_right(&self, buffer: &TextBuffer) -> Anchor { if self.bias == Bias::Right { self.clone() } else { diff --git a/crates/buffer/src/lib.rs b/crates/buffer/src/lib.rs index c8d9c80d5ebeaa35256c2dee0274dc3a25709b2a..3e17808b68a9735bb0e03abb48209928253dd175 100644 --- a/crates/buffer/src/lib.rs +++ b/crates/buffer/src/lib.rs @@ -1,6 +1,4 @@ mod anchor; -mod highlight_map; -mod language; mod operation_queue; mod point; #[cfg(any(test, feature = "test-support"))] @@ -13,13 +11,7 @@ mod tests; pub use anchor::*; use anyhow::{anyhow, Result}; use clock::ReplicaId; -use gpui::{AppContext, Entity, ModelContext, MutableAppContext, Task}; -pub use highlight_map::{HighlightId, HighlightMap}; -use language::Tree; -pub use language::{BracketPair, Language, LanguageConfig, LanguageRegistry}; -use lazy_static::lazy_static; use operation_queue::OperationQueue; -use parking_lot::Mutex; pub use point::*; #[cfg(any(test, feature = "test-support"))] pub use random_char_iter::*; @@ -27,66 +19,17 @@ pub use rope::{Chunks, Rope, TextSummary}; use rpc::proto; use seahash::SeaHasher; pub use selection::*; -use similar::{ChangeTag, TextDiff}; -use smol::future::yield_now; use std::{ - any::Any, - cell::RefCell, cmp, - collections::BTreeMap, convert::{TryFrom, TryInto}, - ffi::OsString, - future::Future, hash::BuildHasher, iter::Iterator, - ops::{Deref, DerefMut, Range}, - path::{Path, PathBuf}, + ops::Range, str, sync::Arc, - time::{Duration, Instant, SystemTime, UNIX_EPOCH}, + time::{Duration, Instant}, }; use sum_tree::{Bias, FilterCursor, SumTree}; -use tree_sitter::{InputEdit, Parser, QueryCursor}; - -pub trait File { - fn worktree_id(&self) -> usize; - - fn entry_id(&self) -> Option; - - fn set_entry_id(&mut self, entry_id: Option); - - fn mtime(&self) -> SystemTime; - - fn set_mtime(&mut self, mtime: SystemTime); - - fn path(&self) -> &Arc; - - fn set_path(&mut self, path: Arc); - - fn full_path(&self, cx: &AppContext) -> PathBuf; - - /// Returns the last component of this handle's absolute path. If this handle refers to the root - /// of its worktree, then this method will return the name of the worktree itself. - fn file_name<'a>(&'a self, cx: &'a AppContext) -> Option; - - fn is_deleted(&self) -> bool; - - fn save( - &self, - buffer_id: u64, - text: Rope, - version: clock::Global, - cx: &mut MutableAppContext, - ) -> Task>; - - fn buffer_updated(&self, buffer_id: u64, operation: Operation, cx: &mut MutableAppContext); - - fn buffer_removed(&self, buffer_id: u64, cx: &mut MutableAppContext); - - fn boxed_clone(&self) -> Box; - - fn as_any(&self) -> &dyn Any; -} #[derive(Clone, Default)] struct DeterministicState; @@ -111,53 +54,6 @@ type HashMap = std::collections::HashMap; #[cfg(not(any(test, feature = "test-support")))] type HashSet = std::collections::HashSet; -thread_local! { - static PARSER: RefCell = RefCell::new(Parser::new()); -} - -lazy_static! { - static ref QUERY_CURSORS: Mutex> = Default::default(); -} - -// TODO - Make this configurable -const INDENT_SIZE: u32 = 4; - -struct QueryCursorHandle(Option); - -impl QueryCursorHandle { - fn new() -> Self { - QueryCursorHandle(Some( - QUERY_CURSORS - .lock() - .pop() - .unwrap_or_else(|| QueryCursor::new()), - )) - } -} - -impl Deref for QueryCursorHandle { - type Target = QueryCursor; - - fn deref(&self) -> &Self::Target { - self.0.as_ref().unwrap() - } -} - -impl DerefMut for QueryCursorHandle { - fn deref_mut(&mut self) -> &mut Self::Target { - self.0.as_mut().unwrap() - } -} - -impl Drop for QueryCursorHandle { - fn drop(&mut self) { - let mut cursor = self.0.take().unwrap(); - cursor.set_byte_range(0..usize::MAX); - cursor.set_point_range(Point::zero().into()..Point::MAX.into()); - QUERY_CURSORS.lock().push(cursor) - } -} - #[derive(Clone)] pub struct TextBuffer { fragments: SumTree, @@ -176,42 +72,12 @@ pub struct TextBuffer { lamport_clock: clock::Lamport, } -pub struct Buffer { - buffer: TextBuffer, - file: Option>, - saved_version: clock::Global, - saved_mtime: SystemTime, - language: Option>, - autoindent_requests: Vec>, - pending_autoindent: Option>, - sync_parse_timeout: Duration, - syntax_tree: Mutex>, - parsing_in_background: bool, - parse_count: usize, - #[cfg(test)] - operations: Vec, -} - #[derive(Clone, Debug, Eq, PartialEq)] pub struct SelectionSet { pub selections: Arc<[Selection]>, pub active: bool, } -#[derive(Clone)] -struct SyntaxTree { - tree: Tree, - version: clock::Global, -} - -#[derive(Clone)] -struct AutoindentRequest { - selection_set_ids: HashSet, - before_edit: Snapshot, - edited: AnchorSet, - inserted: Option, -} - #[derive(Clone, Debug)] pub struct Transaction { start: clock::Global, @@ -485,12 +351,6 @@ impl Edit { } } -struct Diff { - base_version: clock::Global, - new_text: Arc, - changes: Vec<(ChangeTag, usize)>, -} - #[derive(Copy, Clone, Debug, Default, Eq, PartialEq)] struct InsertionTimestamp { replica_id: ReplicaId, @@ -580,14 +440,6 @@ pub struct UndoOperation { version: clock::Global, } -impl Deref for Buffer { - type Target = TextBuffer; - - fn deref(&self) -> &Self::Target { - &self.buffer - } -} - impl TextBuffer { pub fn new(replica_id: u16, remote_id: u64, history: History) -> TextBuffer { let mut fragments = SumTree::new(); @@ -624,11 +476,69 @@ impl TextBuffer { } } + pub fn from_proto(replica_id: u16, message: proto::Buffer) -> Result { + let mut buffer = + TextBuffer::new(replica_id, message.id, History::new(message.content.into())); + let ops = message + .history + .into_iter() + .map(|op| Operation::Edit(op.into())); + buffer.apply_ops(ops)?; + buffer.selections = message + .selections + .into_iter() + .map(|set| { + let set_id = clock::Lamport { + replica_id: set.replica_id as ReplicaId, + value: set.local_timestamp, + }; + let selections: Vec = set + .selections + .into_iter() + .map(TryFrom::try_from) + .collect::>()?; + let set = SelectionSet { + selections: Arc::from(selections), + active: set.is_active, + }; + Result::<_, anyhow::Error>::Ok((set_id, set)) + }) + .collect::>()?; + Ok(buffer) + } + + pub fn to_proto(&self) -> proto::Buffer { + let ops = self.history.ops.values().map(Into::into).collect(); + proto::Buffer { + id: self.remote_id, + content: self.history.base_text.to_string(), + history: ops, + selections: self + .selections + .iter() + .map(|(set_id, set)| proto::SelectionSetSnapshot { + replica_id: set_id.replica_id as u32, + local_timestamp: set_id.value, + selections: set.selections.iter().map(Into::into).collect(), + is_active: set.active, + }) + .collect(), + } + } + pub fn version(&self) -> clock::Global { self.version.clone() } - fn content<'a>(&'a self) -> Content<'a> { + pub fn snapshot(&self) -> Snapshot { + Snapshot { + visible_text: self.visible_text.clone(), + fragments: self.fragments.clone(), + version: self.version.clone(), + } + } + + pub fn content<'a>(&'a self) -> Content<'a> { self.into() } @@ -1250,11 +1160,10 @@ impl TextBuffer { &mut self, selection_set_ids: impl IntoIterator, ) -> Result<()> { - self.start_transaction_at(selection_set_ids, Instant::now())?; - Ok(()) + self.start_transaction_at(selection_set_ids, Instant::now()) } - fn start_transaction_at( + pub fn start_transaction_at( &mut self, selection_set_ids: impl IntoIterator, now: Instant, @@ -1274,11 +1183,11 @@ impl TextBuffer { Ok(()) } - fn end_transaction(&mut self, selection_set_ids: impl IntoIterator) { + pub fn end_transaction(&mut self, selection_set_ids: impl IntoIterator) { self.end_transaction_at(selection_set_ids, Instant::now()); } - fn end_transaction_at( + pub fn end_transaction_at( &mut self, selection_set_ids: impl IntoIterator, now: Instant, @@ -1303,12 +1212,12 @@ impl TextBuffer { } } - fn remove_peer(&mut self, replica_id: ReplicaId) { + pub fn remove_peer(&mut self, replica_id: ReplicaId) { self.selections .retain(|set_id, _| set_id.replica_id != replica_id) } - fn undo(&mut self) -> Vec { + pub fn undo(&mut self) -> Vec { let mut ops = Vec::new(); if let Some(transaction) = self.history.pop_undo().cloned() { let selections = transaction.selections_before.clone(); @@ -1320,7 +1229,7 @@ impl TextBuffer { ops } - fn redo(&mut self) -> Vec { + pub fn redo(&mut self) -> Vec { let mut ops = Vec::new(); if let Some(transaction) = self.history.pop_redo().cloned() { let selections = transaction.selections_after.clone(); @@ -1458,1078 +1367,107 @@ impl TextBuffer { } } -impl Buffer { - pub fn new>>( - replica_id: ReplicaId, - base_text: T, - cx: &mut ModelContext, - ) -> Self { - Self::build( - replica_id, - History::new(base_text.into()), - None, - cx.model_id() as u64, - None, - cx, - ) +#[cfg(any(test, feature = "test-support"))] +impl TextBuffer { + fn random_byte_range(&mut self, start_offset: usize, rng: &mut impl rand::Rng) -> Range { + let end = self.clip_offset(rng.gen_range(start_offset..=self.len()), Bias::Right); + let start = self.clip_offset(rng.gen_range(start_offset..=end), Bias::Right); + start..end } - pub fn from_history( - replica_id: ReplicaId, - history: History, - file: Option>, - language: Option>, - cx: &mut ModelContext, - ) -> Self { - Self::build( - replica_id, - history, - file, - cx.model_id() as u64, - language, - cx, - ) + pub fn randomly_edit( + &mut self, + rng: &mut T, + old_range_count: usize, + ) -> (Vec>, String, Operation) + where + T: rand::Rng, + { + let mut old_ranges: Vec> = Vec::new(); + for _ in 0..old_range_count { + let last_end = old_ranges.last().map_or(0, |last_range| last_range.end + 1); + if last_end > self.len() { + break; + } + old_ranges.push(self.random_byte_range(last_end, rng)); + } + let new_text_len = rng.gen_range(0..10); + let new_text: String = crate::random_char_iter::RandomCharIter::new(&mut *rng) + .take(new_text_len) + .collect(); + log::info!( + "mutating buffer {} at {:?}: {:?}", + self.replica_id, + old_ranges, + new_text + ); + let op = self.edit(old_ranges.iter().cloned(), new_text.as_str()); + (old_ranges, new_text, Operation::Edit(op)) } - fn build( - replica_id: ReplicaId, - history: History, - file: Option>, - remote_id: u64, - language: Option>, - cx: &mut ModelContext, - ) -> Self { - let saved_mtime; - if let Some(file) = file.as_ref() { - saved_mtime = file.mtime(); - } else { - saved_mtime = UNIX_EPOCH; - } + pub fn randomly_mutate(&mut self, rng: &mut T) -> Vec + where + T: rand::Rng, + { + use rand::prelude::*; - let mut result = Self { - buffer: TextBuffer::new(replica_id, remote_id, history), - saved_mtime, - saved_version: clock::Global::new(), - file, - syntax_tree: Mutex::new(None), - parsing_in_background: false, - parse_count: 0, - sync_parse_timeout: Duration::from_millis(1), - autoindent_requests: Default::default(), - pending_autoindent: Default::default(), - language, + let mut ops = vec![self.randomly_edit(rng, 5).2]; - #[cfg(test)] - operations: Default::default(), - }; - result.reparse(cx); - result - } + // Randomly add, remove or mutate selection sets. + let replica_selection_sets = &self + .selection_sets() + .map(|(set_id, _)| *set_id) + .filter(|set_id| self.replica_id == set_id.replica_id) + .collect::>(); + let set_id = replica_selection_sets.choose(rng); + if set_id.is_some() && rng.gen_bool(1.0 / 6.0) { + ops.push(self.remove_selection_set(*set_id.unwrap()).unwrap()); + } else { + let mut ranges = Vec::new(); + for _ in 0..5 { + ranges.push(self.random_byte_range(0, rng)); + } + let new_selections = self.selections_from_ranges(ranges).unwrap(); - pub fn snapshot(&self) -> Snapshot { - Snapshot { - visible_text: self.visible_text.clone(), - fragments: self.fragments.clone(), - version: self.version.clone(), - tree: self.syntax_tree(), - is_parsing: self.parsing_in_background, - language: self.language.clone(), - query_cursor: QueryCursorHandle::new(), + let op = if set_id.is_none() || rng.gen_bool(1.0 / 5.0) { + self.add_selection_set(new_selections) + } else { + self.update_selection_set(*set_id.unwrap(), new_selections) + .unwrap() + }; + ops.push(op); } - } - pub fn from_proto( - replica_id: ReplicaId, - message: proto::Buffer, - file: Option>, - language: Option>, - cx: &mut ModelContext, - ) -> Result { - let mut buffer = Buffer::build( - replica_id, - History::new(message.content.into()), - file, - message.id, - language, - cx, - ); - let ops = message - .history - .into_iter() - .map(|op| Operation::Edit(op.into())); - buffer.apply_ops(ops, cx)?; - buffer.buffer.selections = message - .selections - .into_iter() - .map(|set| { - let set_id = clock::Lamport { - replica_id: set.replica_id as ReplicaId, - value: set.local_timestamp, - }; - let selections: Vec = set - .selections - .into_iter() - .map(TryFrom::try_from) - .collect::>()?; - let set = SelectionSet { - selections: Arc::from(selections), - active: set.is_active, - }; - Result::<_, anyhow::Error>::Ok((set_id, set)) - }) - .collect::>()?; - Ok(buffer) + ops } - pub fn to_proto(&self, cx: &mut ModelContext) -> proto::Buffer { - let ops = self.history.ops.values().map(Into::into).collect(); - proto::Buffer { - id: cx.model_id() as u64, - content: self.history.base_text.to_string(), - history: ops, - selections: self - .selections - .iter() - .map(|(set_id, set)| proto::SelectionSetSnapshot { - replica_id: set_id.replica_id as u32, - local_timestamp: set_id.value, - selections: set.selections.iter().map(Into::into).collect(), - is_active: set.active, - }) - .collect(), - } - } + pub fn randomly_undo_redo(&mut self, rng: &mut impl rand::Rng) -> Vec { + use rand::prelude::*; - pub fn file(&self) -> Option<&dyn File> { - self.file.as_deref() + let mut ops = Vec::new(); + for _ in 0..rng.gen_range(1..=5) { + if let Some(transaction) = self.history.undo_stack.choose(rng).cloned() { + log::info!( + "undoing buffer {} transaction {:?}", + self.replica_id, + transaction + ); + ops.push(self.undo_or_redo(transaction).unwrap()); + } + } + ops } - pub fn file_mut(&mut self) -> Option<&mut dyn File> { - self.file.as_mut().map(|f| f.deref_mut() as &mut dyn File) - } + fn selections_from_ranges(&self, ranges: I) -> Result> + where + I: IntoIterator>, + { + use std::sync::atomic::{self, AtomicUsize}; - pub fn save( - &mut self, - cx: &mut ModelContext, - ) -> Result>> { - let file = self - .file - .as_ref() - .ok_or_else(|| anyhow!("buffer has no file"))?; - let text = self.visible_text.clone(); - let version = self.version.clone(); - let save = file.save(self.remote_id, text, version, cx.as_mut()); - Ok(cx.spawn(|this, mut cx| async move { - let (version, mtime) = save.await?; - this.update(&mut cx, |this, cx| { - this.did_save(version.clone(), mtime, None, cx); - }); - Ok((version, mtime)) - })) - } + static NEXT_SELECTION_ID: AtomicUsize = AtomicUsize::new(0); - pub fn set_language(&mut self, language: Option>, cx: &mut ModelContext) { - self.language = language; - self.reparse(cx); - } - - pub fn did_save( - &mut self, - version: clock::Global, - mtime: SystemTime, - new_file: Option>, - cx: &mut ModelContext, - ) { - self.saved_mtime = mtime; - self.saved_version = version; - if let Some(new_file) = new_file { - self.file = Some(new_file); - } - cx.emit(Event::Saved); - } - - pub fn file_updated( - &mut self, - path: Arc, - mtime: SystemTime, - new_text: Option, - cx: &mut ModelContext, - ) { - let file = self.file.as_mut().unwrap(); - let mut changed = false; - if path != *file.path() { - file.set_path(path); - changed = true; - } - - if mtime != file.mtime() { - file.set_mtime(mtime); - changed = true; - if let Some(new_text) = new_text { - if self.version == self.saved_version { - cx.spawn(|this, mut cx| async move { - let diff = this - .read_with(&cx, |this, cx| this.diff(new_text.into(), cx)) - .await; - this.update(&mut cx, |this, cx| { - if this.apply_diff(diff, cx) { - this.saved_version = this.version.clone(); - this.saved_mtime = mtime; - cx.emit(Event::Reloaded); - } - }); - }) - .detach(); - } - } - } - - if changed { - cx.emit(Event::FileHandleChanged); - } - } - - pub fn file_deleted(&mut self, cx: &mut ModelContext) { - if self.version == self.saved_version { - cx.emit(Event::Dirtied); - } - cx.emit(Event::FileHandleChanged); - } - - pub fn close(&mut self, cx: &mut ModelContext) { - cx.emit(Event::Closed); - } - - pub fn language(&self) -> Option<&Arc> { - self.language.as_ref() - } - - pub fn parse_count(&self) -> usize { - self.parse_count - } - - fn syntax_tree(&self) -> Option { - if let Some(syntax_tree) = self.syntax_tree.lock().as_mut() { - self.interpolate_tree(syntax_tree); - Some(syntax_tree.tree.clone()) - } else { - None - } - } - - #[cfg(any(test, feature = "test-support"))] - pub fn is_parsing(&self) -> bool { - self.parsing_in_background - } - - #[cfg(test)] - pub fn set_sync_parse_timeout(&mut self, timeout: Duration) { - self.sync_parse_timeout = timeout; - } - - fn reparse(&mut self, cx: &mut ModelContext) -> bool { - if self.parsing_in_background { - return false; - } - - if let Some(language) = self.language.clone() { - let old_tree = self.syntax_tree(); - let text = self.visible_text.clone(); - let parsed_version = self.version(); - let parse_task = cx.background().spawn({ - let language = language.clone(); - async move { Self::parse_text(&text, old_tree, &language) } - }); - - match cx - .background() - .block_with_timeout(self.sync_parse_timeout, parse_task) - { - Ok(new_tree) => { - self.did_finish_parsing(new_tree, parsed_version, cx); - return true; - } - Err(parse_task) => { - self.parsing_in_background = true; - cx.spawn(move |this, mut cx| async move { - let new_tree = parse_task.await; - this.update(&mut cx, move |this, cx| { - let language_changed = - this.language.as_ref().map_or(true, |curr_language| { - !Arc::ptr_eq(curr_language, &language) - }); - let parse_again = this.version > parsed_version || language_changed; - this.parsing_in_background = false; - this.did_finish_parsing(new_tree, parsed_version, cx); - - if parse_again && this.reparse(cx) { - return; - } - }); - }) - .detach(); - } - } - } - false - } - - fn parse_text(text: &Rope, old_tree: Option, language: &Language) -> Tree { - PARSER.with(|parser| { - let mut parser = parser.borrow_mut(); - parser - .set_language(language.grammar) - .expect("incompatible grammar"); - let mut chunks = text.chunks_in_range(0..text.len()); - let tree = parser - .parse_with( - &mut move |offset, _| { - chunks.seek(offset); - chunks.next().unwrap_or("").as_bytes() - }, - old_tree.as_ref(), - ) - .unwrap(); - tree - }) - } - - fn interpolate_tree(&self, tree: &mut SyntaxTree) { - let mut delta = 0_isize; - for edit in self.edits_since(tree.version.clone()) { - let start_offset = (edit.old_bytes.start as isize + delta) as usize; - let start_point = self.visible_text.to_point(start_offset); - tree.tree.edit(&InputEdit { - start_byte: start_offset, - old_end_byte: start_offset + edit.deleted_bytes(), - new_end_byte: start_offset + edit.inserted_bytes(), - start_position: start_point.into(), - old_end_position: (start_point + edit.deleted_lines()).into(), - new_end_position: self - .visible_text - .to_point(start_offset + edit.inserted_bytes()) - .into(), - }); - delta += edit.inserted_bytes() as isize - edit.deleted_bytes() as isize; - } - tree.version = self.version(); - } - - fn did_finish_parsing( - &mut self, - tree: Tree, - version: clock::Global, - cx: &mut ModelContext, - ) { - self.parse_count += 1; - *self.syntax_tree.lock() = Some(SyntaxTree { tree, version }); - self.request_autoindent(cx); - cx.emit(Event::Reparsed); - cx.notify(); - } - - fn request_autoindent(&mut self, cx: &mut ModelContext) { - if let Some(indent_columns) = self.compute_autoindents() { - let indent_columns = cx.background().spawn(indent_columns); - match cx - .background() - .block_with_timeout(Duration::from_micros(500), indent_columns) - { - Ok(indent_columns) => self.apply_autoindents(indent_columns, cx), - Err(indent_columns) => { - self.pending_autoindent = Some(cx.spawn(|this, mut cx| async move { - let indent_columns = indent_columns.await; - this.update(&mut cx, |this, cx| { - this.apply_autoindents(indent_columns, cx); - }); - })); - } - } - } - } - - fn compute_autoindents(&self) -> Option>> { - let max_rows_between_yields = 100; - let snapshot = self.snapshot(); - if snapshot.language.is_none() - || snapshot.tree.is_none() - || self.autoindent_requests.is_empty() - { - return None; - } - - let autoindent_requests = self.autoindent_requests.clone(); - Some(async move { - let mut indent_columns = BTreeMap::new(); - for request in autoindent_requests { - let old_to_new_rows = request - .edited - .to_points(&request.before_edit) - .map(|point| point.row) - .zip(request.edited.to_points(&snapshot).map(|point| point.row)) - .collect::>(); - - let mut old_suggestions = HashMap::default(); - let old_edited_ranges = - contiguous_ranges(old_to_new_rows.keys().copied(), max_rows_between_yields); - for old_edited_range in old_edited_ranges { - let suggestions = request - .before_edit - .suggest_autoindents(old_edited_range.clone()) - .into_iter() - .flatten(); - for (old_row, suggestion) in old_edited_range.zip(suggestions) { - let indentation_basis = old_to_new_rows - .get(&suggestion.basis_row) - .and_then(|from_row| old_suggestions.get(from_row).copied()) - .unwrap_or_else(|| { - request - .before_edit - .indent_column_for_line(suggestion.basis_row) - }); - let delta = if suggestion.indent { INDENT_SIZE } else { 0 }; - old_suggestions.insert( - *old_to_new_rows.get(&old_row).unwrap(), - indentation_basis + delta, - ); - } - yield_now().await; - } - - // At this point, old_suggestions contains the suggested indentation for all edited lines with respect to the state of the - // buffer before the edit, but keyed by the row for these lines after the edits were applied. - let new_edited_row_ranges = - contiguous_ranges(old_to_new_rows.values().copied(), max_rows_between_yields); - for new_edited_row_range in new_edited_row_ranges { - let suggestions = snapshot - .suggest_autoindents(new_edited_row_range.clone()) - .into_iter() - .flatten(); - for (new_row, suggestion) in new_edited_row_range.zip(suggestions) { - let delta = if suggestion.indent { INDENT_SIZE } else { 0 }; - let new_indentation = indent_columns - .get(&suggestion.basis_row) - .copied() - .unwrap_or_else(|| { - snapshot.indent_column_for_line(suggestion.basis_row) - }) - + delta; - if old_suggestions - .get(&new_row) - .map_or(true, |old_indentation| new_indentation != *old_indentation) - { - indent_columns.insert(new_row, new_indentation); - } - } - yield_now().await; - } - - if let Some(inserted) = request.inserted.as_ref() { - let inserted_row_ranges = contiguous_ranges( - inserted - .to_point_ranges(&snapshot) - .flat_map(|range| range.start.row..range.end.row + 1), - max_rows_between_yields, - ); - for inserted_row_range in inserted_row_ranges { - let suggestions = snapshot - .suggest_autoindents(inserted_row_range.clone()) - .into_iter() - .flatten(); - for (row, suggestion) in inserted_row_range.zip(suggestions) { - let delta = if suggestion.indent { INDENT_SIZE } else { 0 }; - let new_indentation = indent_columns - .get(&suggestion.basis_row) - .copied() - .unwrap_or_else(|| { - snapshot.indent_column_for_line(suggestion.basis_row) - }) - + delta; - indent_columns.insert(row, new_indentation); - } - yield_now().await; - } - } - } - indent_columns - }) - } - - fn apply_autoindents( - &mut self, - indent_columns: BTreeMap, - cx: &mut ModelContext, - ) { - let selection_set_ids = self - .autoindent_requests - .drain(..) - .flat_map(|req| req.selection_set_ids.clone()) - .collect::>(); - - self.start_transaction(selection_set_ids.iter().copied()) - .unwrap(); - for (row, indent_column) in &indent_columns { - self.set_indent_column_for_line(*row, *indent_column, cx); - } - - for selection_set_id in &selection_set_ids { - if let Some(set) = self.selections.get(selection_set_id) { - let new_selections = set - .selections - .iter() - .map(|selection| { - let start_point = selection.start.to_point(&self.buffer); - if start_point.column == 0 { - let end_point = selection.end.to_point(&self.buffer); - let delta = Point::new( - 0, - indent_columns.get(&start_point.row).copied().unwrap_or(0), - ); - if delta.column > 0 { - return Selection { - id: selection.id, - goal: selection.goal, - reversed: selection.reversed, - start: self - .anchor_at(start_point + delta, selection.start.bias), - end: self.anchor_at(end_point + delta, selection.end.bias), - }; - } - } - selection.clone() - }) - .collect::>(); - self.update_selection_set(*selection_set_id, new_selections, cx) - .unwrap(); - } - } - - self.end_transaction(selection_set_ids.iter().copied(), cx) - .unwrap(); - } - - pub fn indent_column_for_line(&self, row: u32) -> u32 { - self.content().indent_column_for_line(row) - } - - fn set_indent_column_for_line(&mut self, row: u32, column: u32, cx: &mut ModelContext) { - let current_column = self.indent_column_for_line(row); - if column > current_column { - let offset = self.visible_text.to_offset(Point::new(row, 0)); - self.edit( - [offset..offset], - " ".repeat((column - current_column) as usize), - cx, - ); - } else if column < current_column { - self.edit( - [Point::new(row, 0)..Point::new(row, current_column - column)], - "", - cx, - ); - } - } - - pub fn range_for_syntax_ancestor(&self, range: Range) -> Option> { - if let Some(tree) = self.syntax_tree() { - let root = tree.root_node(); - let range = range.start.to_offset(self)..range.end.to_offset(self); - let mut node = root.descendant_for_byte_range(range.start, range.end); - while node.map_or(false, |n| n.byte_range() == range) { - node = node.unwrap().parent(); - } - node.map(|n| n.byte_range()) - } else { - None - } - } - - pub fn enclosing_bracket_ranges( - &self, - range: Range, - ) -> Option<(Range, Range)> { - let (lang, tree) = self.language.as_ref().zip(self.syntax_tree())?; - let open_capture_ix = lang.brackets_query.capture_index_for_name("open")?; - let close_capture_ix = lang.brackets_query.capture_index_for_name("close")?; - - // Find bracket pairs that *inclusively* contain the given range. - let range = range.start.to_offset(self).saturating_sub(1)..range.end.to_offset(self) + 1; - let mut cursor = QueryCursorHandle::new(); - let matches = cursor.set_byte_range(range).matches( - &lang.brackets_query, - tree.root_node(), - TextProvider(&self.visible_text), - ); - - // Get the ranges of the innermost pair of brackets. - matches - .filter_map(|mat| { - let open = mat.nodes_for_capture_index(open_capture_ix).next()?; - let close = mat.nodes_for_capture_index(close_capture_ix).next()?; - Some((open.byte_range(), close.byte_range())) - }) - .min_by_key(|(open_range, close_range)| close_range.end - open_range.start) - } - - fn diff(&self, new_text: Arc, cx: &AppContext) -> Task { - // TODO: it would be nice to not allocate here. - let old_text = self.text(); - let base_version = self.version(); - cx.background().spawn(async move { - let changes = TextDiff::from_lines(old_text.as_str(), new_text.as_ref()) - .iter_all_changes() - .map(|c| (c.tag(), c.value().len())) - .collect::>(); - Diff { - base_version, - new_text, - changes, - } - }) - } - - pub fn set_text_from_disk(&self, new_text: Arc, cx: &mut ModelContext) -> Task<()> { - cx.spawn(|this, mut cx| async move { - let diff = this - .read_with(&cx, |this, cx| this.diff(new_text, cx)) - .await; - - this.update(&mut cx, |this, cx| { - if this.apply_diff(diff, cx) { - this.saved_version = this.version.clone(); - } - }); - }) - } - - fn apply_diff(&mut self, diff: Diff, cx: &mut ModelContext) -> bool { - if self.version == diff.base_version { - self.start_transaction(None).unwrap(); - let mut offset = 0; - for (tag, len) in diff.changes { - let range = offset..(offset + len); - match tag { - ChangeTag::Equal => offset += len, - ChangeTag::Delete => self.edit(Some(range), "", cx), - ChangeTag::Insert => { - self.edit(Some(offset..offset), &diff.new_text[range], cx); - offset += len; - } - } - } - self.end_transaction(None, cx).unwrap(); - true - } else { - false - } - } - - pub fn is_dirty(&self) -> bool { - self.version > self.saved_version - || self.file.as_ref().map_or(false, |file| file.is_deleted()) - } - - pub fn has_conflict(&self) -> bool { - self.version > self.saved_version - && self - .file - .as_ref() - .map_or(false, |file| file.mtime() > self.saved_mtime) - } - - pub fn start_transaction( - &mut self, - selection_set_ids: impl IntoIterator, - ) -> Result<()> { - self.start_transaction_at(selection_set_ids, Instant::now())?; - Ok(()) - } - - fn start_transaction_at( - &mut self, - selection_set_ids: impl IntoIterator, - now: Instant, - ) -> Result<()> { - self.buffer.start_transaction_at(selection_set_ids, now)?; - Ok(()) - } - - pub fn end_transaction( - &mut self, - selection_set_ids: impl IntoIterator, - cx: &mut ModelContext, - ) -> Result<()> { - self.end_transaction_at(selection_set_ids, Instant::now(), cx) - } - - fn end_transaction_at( - &mut self, - selection_set_ids: impl IntoIterator, - now: Instant, - cx: &mut ModelContext, - ) -> Result<()> { - if let Some(start_version) = self.buffer.end_transaction_at(selection_set_ids, now) { - cx.notify(); - let was_dirty = start_version != self.saved_version; - let edited = self.edits_since(start_version).next().is_some(); - if edited { - self.did_edit(was_dirty, cx); - self.reparse(cx); - } - } - Ok(()) - } - - pub fn edit(&mut self, ranges_iter: I, new_text: T, cx: &mut ModelContext) - where - I: IntoIterator>, - S: ToOffset, - T: Into, - { - self.edit_internal(ranges_iter, new_text, false, cx) - } - - pub fn edit_with_autoindent( - &mut self, - ranges_iter: I, - new_text: T, - cx: &mut ModelContext, - ) where - I: IntoIterator>, - S: ToOffset, - T: Into, - { - self.edit_internal(ranges_iter, new_text, true, cx) - } - - pub fn edit_internal( - &mut self, - ranges_iter: I, - new_text: T, - autoindent: bool, - cx: &mut ModelContext, - ) where - I: IntoIterator>, - S: ToOffset, - T: Into, - { - let new_text = new_text.into(); - - // Skip invalid ranges and coalesce contiguous ones. - let mut ranges: Vec> = Vec::new(); - for range in ranges_iter { - let range = range.start.to_offset(&*self)..range.end.to_offset(&*self); - if !new_text.is_empty() || !range.is_empty() { - if let Some(prev_range) = ranges.last_mut() { - if prev_range.end >= range.start { - prev_range.end = cmp::max(prev_range.end, range.end); - } else { - ranges.push(range); - } - } else { - ranges.push(range); - } - } - } - if ranges.is_empty() { - return; - } - - self.start_transaction(None).unwrap(); - self.pending_autoindent.take(); - let autoindent_request = if autoindent && self.language.is_some() { - let before_edit = self.snapshot(); - let edited = self.content().anchor_set(ranges.iter().filter_map(|range| { - let start = range.start.to_point(&*self); - if new_text.starts_with('\n') && start.column == self.line_len(start.row) { - None - } else { - Some((range.start, Bias::Left)) - } - })); - Some((before_edit, edited)) - } else { - None - }; - - let first_newline_ix = new_text.find('\n'); - let new_text_len = new_text.len(); - - let edit = self.buffer.edit(ranges.iter().cloned(), new_text); - - if let Some((before_edit, edited)) = autoindent_request { - let mut inserted = None; - if let Some(first_newline_ix) = first_newline_ix { - let mut delta = 0isize; - inserted = Some(self.content().anchor_range_set(ranges.iter().map(|range| { - let start = (delta + range.start as isize) as usize + first_newline_ix + 1; - let end = (delta + range.start as isize) as usize + new_text_len; - delta += (range.end as isize - range.start as isize) + new_text_len as isize; - (start, Bias::Left)..(end, Bias::Right) - }))); - } - - let selection_set_ids = self - .buffer - .peek_undo_stack() - .unwrap() - .starting_selection_set_ids() - .collect(); - self.autoindent_requests.push(Arc::new(AutoindentRequest { - selection_set_ids, - before_edit, - edited, - inserted, - })); - } - - self.end_transaction(None, cx).unwrap(); - self.send_operation(Operation::Edit(edit), cx); - } - - fn did_edit(&self, was_dirty: bool, cx: &mut ModelContext) { - cx.emit(Event::Edited); - if !was_dirty { - cx.emit(Event::Dirtied); - } - } - - pub fn add_selection_set( - &mut self, - selections: impl Into>, - cx: &mut ModelContext, - ) -> SelectionSetId { - let operation = self.buffer.add_selection_set(selections); - if let Operation::UpdateSelections { set_id, .. } = &operation { - let set_id = *set_id; - cx.notify(); - self.send_operation(operation, cx); - set_id - } else { - unreachable!() - } - } - - pub fn update_selection_set( - &mut self, - set_id: SelectionSetId, - selections: impl Into>, - cx: &mut ModelContext, - ) -> Result<()> { - let operation = self.buffer.update_selection_set(set_id, selections)?; - cx.notify(); - self.send_operation(operation, cx); - Ok(()) - } - - pub fn set_active_selection_set( - &mut self, - set_id: Option, - cx: &mut ModelContext, - ) -> Result<()> { - let operation = self.buffer.set_active_selection_set(set_id)?; - self.send_operation(operation, cx); - Ok(()) - } - - pub fn remove_selection_set( - &mut self, - set_id: SelectionSetId, - cx: &mut ModelContext, - ) -> Result<()> { - let operation = self.buffer.remove_selection_set(set_id)?; - cx.notify(); - self.send_operation(operation, cx); - Ok(()) - } - - pub fn apply_ops>( - &mut self, - ops: I, - cx: &mut ModelContext, - ) -> Result<()> { - self.pending_autoindent.take(); - - let was_dirty = self.is_dirty(); - let old_version = self.version.clone(); - - self.buffer.apply_ops(ops)?; - - cx.notify(); - if self.edits_since(old_version).next().is_some() { - self.did_edit(was_dirty, cx); - self.reparse(cx); - } - - Ok(()) - } - - #[cfg(not(test))] - pub fn send_operation(&mut self, operation: Operation, cx: &mut ModelContext) { - if let Some(file) = &self.file { - file.buffer_updated(self.remote_id, operation, cx.as_mut()); - } - } - - #[cfg(test)] - pub fn send_operation(&mut self, operation: Operation, _: &mut ModelContext) { - self.operations.push(operation); - } - - pub fn remove_peer(&mut self, replica_id: ReplicaId, cx: &mut ModelContext) { - self.buffer.remove_peer(replica_id); - cx.notify(); - } - - pub fn undo(&mut self, cx: &mut ModelContext) { - let was_dirty = self.is_dirty(); - let old_version = self.version.clone(); - - for operation in self.buffer.undo() { - self.send_operation(operation, cx); - } - - cx.notify(); - if self.edits_since(old_version).next().is_some() { - self.did_edit(was_dirty, cx); - self.reparse(cx); - } - } - - pub fn redo(&mut self, cx: &mut ModelContext) { - let was_dirty = self.is_dirty(); - let old_version = self.version.clone(); - - for operation in self.buffer.redo() { - self.send_operation(operation, cx); - } - - cx.notify(); - if self.edits_since(old_version).next().is_some() { - self.did_edit(was_dirty, cx); - self.reparse(cx); - } - } -} - -#[cfg(any(test, feature = "test-support"))] -impl Buffer { - pub fn randomly_edit(&mut self, rng: &mut T, old_range_count: usize) - where - T: rand::Rng, - { - self.buffer.randomly_edit(rng, old_range_count); - } - - pub fn randomly_mutate(&mut self, rng: &mut T) - where - T: rand::Rng, - { - self.buffer.randomly_mutate(rng); - } -} - -#[cfg(any(test, feature = "test-support"))] -impl TextBuffer { - fn random_byte_range(&mut self, start_offset: usize, rng: &mut impl rand::Rng) -> Range { - let end = self.clip_offset(rng.gen_range(start_offset..=self.len()), Bias::Right); - let start = self.clip_offset(rng.gen_range(start_offset..=end), Bias::Right); - start..end - } - - pub fn randomly_edit( - &mut self, - rng: &mut T, - old_range_count: usize, - ) -> (Vec>, String, Operation) - where - T: rand::Rng, - { - let mut old_ranges: Vec> = Vec::new(); - for _ in 0..old_range_count { - let last_end = old_ranges.last().map_or(0, |last_range| last_range.end + 1); - if last_end > self.len() { - break; - } - old_ranges.push(self.random_byte_range(last_end, rng)); - } - let new_text_len = rng.gen_range(0..10); - let new_text: String = crate::random_char_iter::RandomCharIter::new(&mut *rng) - .take(new_text_len) - .collect(); - log::info!( - "mutating buffer {} at {:?}: {:?}", - self.replica_id, - old_ranges, - new_text - ); - let op = self.edit(old_ranges.iter().cloned(), new_text.as_str()); - (old_ranges, new_text, Operation::Edit(op)) - } - - pub fn randomly_mutate(&mut self, rng: &mut T) -> Vec - where - T: rand::Rng, - { - use rand::prelude::*; - - let mut ops = vec![self.randomly_edit(rng, 5).2]; - - // Randomly add, remove or mutate selection sets. - let replica_selection_sets = &self - .selection_sets() - .map(|(set_id, _)| *set_id) - .filter(|set_id| self.replica_id == set_id.replica_id) - .collect::>(); - let set_id = replica_selection_sets.choose(rng); - if set_id.is_some() && rng.gen_bool(1.0 / 6.0) { - ops.push(self.remove_selection_set(*set_id.unwrap()).unwrap()); - } else { - let mut ranges = Vec::new(); - for _ in 0..5 { - ranges.push(self.random_byte_range(0, rng)); - } - let new_selections = self.selections_from_ranges(ranges).unwrap(); - - let op = if set_id.is_none() || rng.gen_bool(1.0 / 5.0) { - self.add_selection_set(new_selections) - } else { - self.update_selection_set(*set_id.unwrap(), new_selections) - .unwrap() - }; - ops.push(op); - } - - ops - } - - pub fn randomly_undo_redo(&mut self, rng: &mut impl rand::Rng) -> Vec { - use rand::prelude::*; - - let mut ops = Vec::new(); - for _ in 0..rng.gen_range(1..=5) { - if let Some(transaction) = self.history.undo_stack.choose(rng).cloned() { - log::info!( - "undoing buffer {} transaction {:?}", - self.replica_id, - transaction - ); - ops.push(self.undo_or_redo(transaction).unwrap()); - } - } - ops - } - - fn selections_from_ranges(&self, ranges: I) -> Result> - where - I: IntoIterator>, - { - use std::sync::atomic::{self, AtomicUsize}; - - static NEXT_SELECTION_ID: AtomicUsize = AtomicUsize::new(0); - - let mut ranges = ranges.into_iter().collect::>(); - ranges.sort_unstable_by_key(|range| range.start); + let mut ranges = ranges.into_iter().collect::>(); + ranges.sort_unstable_by_key(|range| range.start); let mut selections = Vec::with_capacity(ranges.len()); for range in ranges { @@ -2580,52 +1518,18 @@ impl TextBuffer { } } -impl Clone for Buffer { - fn clone(&self) -> Self { - Self { - buffer: self.buffer.clone(), - saved_version: self.saved_version.clone(), - saved_mtime: self.saved_mtime, - file: self.file.as_ref().map(|f| f.boxed_clone()), - language: self.language.clone(), - syntax_tree: Mutex::new(self.syntax_tree.lock().clone()), - parsing_in_background: false, - sync_parse_timeout: self.sync_parse_timeout, - parse_count: self.parse_count, - autoindent_requests: Default::default(), - pending_autoindent: Default::default(), - - #[cfg(test)] - operations: self.operations.clone(), - } - } -} - +#[derive(Clone)] pub struct Snapshot { visible_text: Rope, fragments: SumTree, version: clock::Global, - tree: Option, - is_parsing: bool, - language: Option>, - query_cursor: QueryCursorHandle, } -impl Clone for Snapshot { - fn clone(&self) -> Self { - Self { - visible_text: self.visible_text.clone(), - fragments: self.fragments.clone(), - version: self.version.clone(), - tree: self.tree.clone(), - is_parsing: self.is_parsing, - language: self.language.clone(), - query_cursor: QueryCursorHandle::new(), - } +impl Snapshot { + pub fn as_rope(&self) -> &Rope { + &self.visible_text } -} -impl Snapshot { pub fn len(&self) -> usize { self.visible_text.len() } @@ -2638,120 +1542,6 @@ impl Snapshot { self.content().indent_column_for_line(row) } - fn suggest_autoindents<'a>( - &'a self, - row_range: Range, - ) -> Option + 'a> { - let mut query_cursor = QueryCursorHandle::new(); - if let Some((language, tree)) = self.language.as_ref().zip(self.tree.as_ref()) { - let prev_non_blank_row = self.prev_non_blank_row(row_range.start); - - // Get the "indentation ranges" that intersect this row range. - let indent_capture_ix = language.indents_query.capture_index_for_name("indent"); - let end_capture_ix = language.indents_query.capture_index_for_name("end"); - query_cursor.set_point_range( - Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0).into() - ..Point::new(row_range.end, 0).into(), - ); - let mut indentation_ranges = Vec::<(Range, &'static str)>::new(); - for mat in query_cursor.matches( - &language.indents_query, - tree.root_node(), - TextProvider(&self.visible_text), - ) { - let mut node_kind = ""; - let mut start: Option = None; - let mut end: Option = None; - for capture in mat.captures { - if Some(capture.index) == indent_capture_ix { - node_kind = capture.node.kind(); - start.get_or_insert(capture.node.start_position().into()); - end.get_or_insert(capture.node.end_position().into()); - } else if Some(capture.index) == end_capture_ix { - end = Some(capture.node.start_position().into()); - } - } - - if let Some((start, end)) = start.zip(end) { - if start.row == end.row { - continue; - } - - let range = start..end; - match indentation_ranges.binary_search_by_key(&range.start, |r| r.0.start) { - Err(ix) => indentation_ranges.insert(ix, (range, node_kind)), - Ok(ix) => { - let prev_range = &mut indentation_ranges[ix]; - prev_range.0.end = prev_range.0.end.max(range.end); - } - } - } - } - - let mut prev_row = prev_non_blank_row.unwrap_or(0); - Some(row_range.map(move |row| { - let row_start = Point::new(row, self.indent_column_for_line(row)); - - let mut indent_from_prev_row = false; - let mut outdent_to_row = u32::MAX; - for (range, _node_kind) in &indentation_ranges { - if range.start.row >= row { - break; - } - - if range.start.row == prev_row && range.end > row_start { - indent_from_prev_row = true; - } - if range.end.row >= prev_row && range.end <= row_start { - outdent_to_row = outdent_to_row.min(range.start.row); - } - } - - let suggestion = if outdent_to_row == prev_row { - IndentSuggestion { - basis_row: prev_row, - indent: false, - } - } else if indent_from_prev_row { - IndentSuggestion { - basis_row: prev_row, - indent: true, - } - } else if outdent_to_row < prev_row { - IndentSuggestion { - basis_row: outdent_to_row, - indent: false, - } - } else { - IndentSuggestion { - basis_row: prev_row, - indent: false, - } - }; - - prev_row = row; - suggestion - })) - } else { - None - } - } - - fn prev_non_blank_row(&self, mut row: u32) -> Option { - while row > 0 { - row -= 1; - if !self.is_line_blank(row) { - return Some(row); - } - } - None - } - - fn is_line_blank(&self, row: u32) -> bool { - self.text_for_range(Point::new(row, 0)..Point::new(row, self.line_len(row))) - .all(|chunk| chunk.matches(|c: char| !c.is_whitespace()).next().is_none()) - } - pub fn text(&self) -> Rope { self.visible_text.clone() } @@ -2769,38 +1559,6 @@ impl Snapshot { self.visible_text.chunks_in_range(range) } - pub fn highlighted_text_for_range( - &mut self, - range: Range, - ) -> HighlightedChunks { - let range = range.start.to_offset(&*self)..range.end.to_offset(&*self); - let chunks = self.visible_text.chunks_in_range(range.clone()); - if let Some((language, tree)) = self.language.as_ref().zip(self.tree.as_ref()) { - let captures = self.query_cursor.set_byte_range(range.clone()).captures( - &language.highlights_query, - tree.root_node(), - TextProvider(&self.visible_text), - ); - - HighlightedChunks { - range, - chunks, - highlights: Some(Highlights { - captures, - next_capture: None, - stack: Default::default(), - highlight_map: language.highlight_map(), - }), - } - } else { - HighlightedChunks { - range, - chunks, - highlights: None, - } - } - } - pub fn text_summary_for_range(&self, range: Range) -> TextSummary where T: ToOffset, @@ -2837,7 +1595,7 @@ impl Snapshot { self.content().anchor_at(position, Bias::Right) } - fn content(&self) -> Content { + pub fn content(&self) -> Content { self.into() } } @@ -2858,26 +1616,6 @@ impl<'a> From<&'a Snapshot> for Content<'a> { } } -impl<'a> From<&'a Buffer> for Content<'a> { - fn from(buffer: &'a Buffer) -> Self { - Self { - visible_text: &buffer.visible_text, - fragments: &buffer.fragments, - version: &buffer.version, - } - } -} - -impl<'a> From<&'a mut Buffer> for Content<'a> { - fn from(buffer: &'a mut Buffer) -> Self { - Self { - visible_text: &buffer.visible_text, - fragments: &buffer.fragments, - version: &buffer.version, - } - } -} - impl<'a> From<&'a TextBuffer> for Content<'a> { fn from(buffer: &'a TextBuffer) -> Self { Self { @@ -3123,12 +1861,6 @@ impl<'a> Content<'a> { } } -#[derive(Debug)] -struct IndentSuggestion { - basis_row: u32, - indent: bool, -} - struct RopeBuilder<'a> { old_visible_cursor: rope::Cursor<'a>, old_deleted_cursor: rope::Cursor<'a>, @@ -3182,27 +1914,6 @@ impl<'a> RopeBuilder<'a> { } } -#[derive(Clone, Debug, Eq, PartialEq)] -pub enum Event { - Edited, - Dirtied, - Saved, - FileHandleChanged, - Reloaded, - Reparsed, - Closed, -} - -impl Entity for Buffer { - type Event = Event; - - fn release(&mut self, cx: &mut gpui::MutableAppContext) { - if let Some(file) = self.file.as_ref() { - file.buffer_removed(self.remote_id, cx); - } - } -} - impl<'a, F: Fn(&FragmentSummary) -> bool> Iterator for Edits<'a, F> { type Item = Edit; @@ -3267,126 +1978,6 @@ impl<'a, F: Fn(&FragmentSummary) -> bool> Iterator for Edits<'a, F> { } } -struct ByteChunks<'a>(rope::Chunks<'a>); - -impl<'a> Iterator for ByteChunks<'a> { - type Item = &'a [u8]; - - fn next(&mut self) -> Option { - self.0.next().map(str::as_bytes) - } -} - -struct TextProvider<'a>(&'a Rope); - -impl<'a> tree_sitter::TextProvider<'a> for TextProvider<'a> { - type I = ByteChunks<'a>; - - fn text(&mut self, node: tree_sitter::Node) -> Self::I { - ByteChunks(self.0.chunks_in_range(node.byte_range())) - } -} - -struct Highlights<'a> { - captures: tree_sitter::QueryCaptures<'a, 'a, TextProvider<'a>>, - next_capture: Option<(tree_sitter::QueryMatch<'a, 'a>, usize)>, - stack: Vec<(usize, HighlightId)>, - highlight_map: HighlightMap, -} - -pub struct HighlightedChunks<'a> { - range: Range, - chunks: Chunks<'a>, - highlights: Option>, -} - -impl<'a> HighlightedChunks<'a> { - pub fn seek(&mut self, offset: usize) { - self.range.start = offset; - self.chunks.seek(self.range.start); - if let Some(highlights) = self.highlights.as_mut() { - highlights - .stack - .retain(|(end_offset, _)| *end_offset > offset); - if let Some((mat, capture_ix)) = &highlights.next_capture { - let capture = mat.captures[*capture_ix as usize]; - if offset >= capture.node.start_byte() { - let next_capture_end = capture.node.end_byte(); - if offset < next_capture_end { - highlights.stack.push(( - next_capture_end, - highlights.highlight_map.get(capture.index), - )); - } - highlights.next_capture.take(); - } - } - highlights.captures.set_byte_range(self.range.clone()); - } - } - - pub fn offset(&self) -> usize { - self.range.start - } -} - -impl<'a> Iterator for HighlightedChunks<'a> { - type Item = (&'a str, HighlightId); - - fn next(&mut self) -> Option { - let mut next_capture_start = usize::MAX; - - if let Some(highlights) = self.highlights.as_mut() { - while let Some((parent_capture_end, _)) = highlights.stack.last() { - if *parent_capture_end <= self.range.start { - highlights.stack.pop(); - } else { - break; - } - } - - if highlights.next_capture.is_none() { - highlights.next_capture = highlights.captures.next(); - } - - while let Some((mat, capture_ix)) = highlights.next_capture.as_ref() { - let capture = mat.captures[*capture_ix as usize]; - if self.range.start < capture.node.start_byte() { - next_capture_start = capture.node.start_byte(); - break; - } else { - let style_id = highlights.highlight_map.get(capture.index); - highlights.stack.push((capture.node.end_byte(), style_id)); - highlights.next_capture = highlights.captures.next(); - } - } - } - - if let Some(chunk) = self.chunks.peek() { - let chunk_start = self.range.start; - let mut chunk_end = (self.chunks.offset() + chunk.len()).min(next_capture_start); - let mut style_id = HighlightId::default(); - if let Some((parent_capture_end, parent_style_id)) = - self.highlights.as_ref().and_then(|h| h.stack.last()) - { - chunk_end = chunk_end.min(*parent_capture_end); - style_id = *parent_style_id; - } - - let slice = - &chunk[chunk_start - self.chunks.offset()..chunk_end - self.chunks.offset()]; - self.range.start = chunk_end; - if self.range.start == self.chunks.offset() + chunk.len() { - self.chunks.next().unwrap(); - } - - Some((slice, style_id)) - } else { - None - } - } -} - impl Fragment { fn is_visible(&self, undos: &UndoMap) -> bool { !undos.is_undone(self.timestamp.local()) @@ -3858,29 +2449,3 @@ impl ToPoint for usize { content.into().visible_text.to_point(*self) } } - -fn contiguous_ranges( - values: impl IntoIterator, - max_len: usize, -) -> impl Iterator> { - let mut values = values.into_iter(); - let mut current_range: Option> = None; - std::iter::from_fn(move || loop { - if let Some(value) = values.next() { - if let Some(range) = &mut current_range { - if value == range.end && range.len() < max_len { - range.end += 1; - continue; - } - } - - let prev_range = current_range.clone(); - current_range = Some(value..(value + 1)); - if prev_range.is_some() { - return prev_range; - } - } else { - return current_range.take(); - } - }) -} diff --git a/crates/buffer/src/selection.rs b/crates/buffer/src/selection.rs index 98f34865f55a4544a749ce2a8d5c9cb305ab9394..596c8dac56756031220e11aeb2138c2dbb442e1d 100644 --- a/crates/buffer/src/selection.rs +++ b/crates/buffer/src/selection.rs @@ -1,4 +1,4 @@ -use crate::{Anchor, Buffer, Point, ToOffset as _, ToPoint as _}; +use crate::{Anchor, Point, TextBuffer, ToOffset as _, ToPoint as _}; use std::{cmp::Ordering, mem, ops::Range}; pub type SelectionSetId = clock::Lamport; @@ -29,7 +29,7 @@ impl Selection { } } - pub fn set_head(&mut self, buffer: &Buffer, cursor: Anchor) { + pub fn set_head(&mut self, buffer: &TextBuffer, cursor: Anchor) { if cursor.cmp(self.tail(), buffer).unwrap() < Ordering::Equal { if !self.reversed { mem::swap(&mut self.start, &mut self.end); @@ -53,7 +53,7 @@ impl Selection { } } - pub fn point_range(&self, buffer: &Buffer) -> Range { + pub fn point_range(&self, buffer: &TextBuffer) -> Range { let start = self.start.to_point(buffer); let end = self.end.to_point(buffer); if self.reversed { @@ -63,7 +63,7 @@ impl Selection { } } - pub fn offset_range(&self, buffer: &Buffer) -> Range { + pub fn offset_range(&self, buffer: &TextBuffer) -> Range { let start = self.start.to_offset(buffer); let end = self.end.to_offset(buffer); if self.reversed { diff --git a/crates/buffer/src/tests.rs b/crates/buffer/src/tests.rs index c1b6050de376886d87bb5bb447f1f25bdfde3e22..391dbf5ce6574f70381723d2e9f61c4359a92775 100644 --- a/crates/buffer/src/tests.rs +++ b/crates/buffer/src/tests.rs @@ -1,2 +1,658 @@ -mod buffer; -mod syntax; +use super::*; +use clock::ReplicaId; +use rand::prelude::*; +use std::{ + cmp::Ordering, + env, + iter::Iterator, + time::{Duration, Instant}, +}; + +#[test] +fn test_edit() { + let mut buffer = TextBuffer::new(0, 0, History::new("abc".into())); + assert_eq!(buffer.text(), "abc"); + buffer.edit(vec![3..3], "def"); + assert_eq!(buffer.text(), "abcdef"); + buffer.edit(vec![0..0], "ghi"); + assert_eq!(buffer.text(), "ghiabcdef"); + buffer.edit(vec![5..5], "jkl"); + assert_eq!(buffer.text(), "ghiabjklcdef"); + buffer.edit(vec![6..7], ""); + assert_eq!(buffer.text(), "ghiabjlcdef"); + buffer.edit(vec![4..9], "mno"); + assert_eq!(buffer.text(), "ghiamnoef"); +} + +#[gpui::test(iterations = 100)] +fn test_random_edits(mut rng: StdRng) { + let operations = env::var("OPERATIONS") + .map(|i| i.parse().expect("invalid `OPERATIONS` variable")) + .unwrap_or(10); + + let reference_string_len = rng.gen_range(0..3); + let mut reference_string = RandomCharIter::new(&mut rng) + .take(reference_string_len) + .collect::(); + let mut buffer = TextBuffer::new(0, 0, History::new(reference_string.clone().into())); + buffer.history.group_interval = Duration::from_millis(rng.gen_range(0..=200)); + let mut buffer_versions = Vec::new(); + log::info!( + "buffer text {:?}, version: {:?}", + buffer.text(), + buffer.version() + ); + + for _i in 0..operations { + let (old_ranges, new_text, _) = buffer.randomly_edit(&mut rng, 5); + for old_range in old_ranges.iter().rev() { + reference_string.replace_range(old_range.clone(), &new_text); + } + assert_eq!(buffer.text(), reference_string); + log::info!( + "buffer text {:?}, version: {:?}", + buffer.text(), + buffer.version() + ); + + if rng.gen_bool(0.25) { + buffer.randomly_undo_redo(&mut rng); + reference_string = buffer.text(); + log::info!( + "buffer text {:?}, version: {:?}", + buffer.text(), + buffer.version() + ); + } + + let range = buffer.random_byte_range(0, &mut rng); + assert_eq!( + buffer.text_summary_for_range(range.clone()), + TextSummary::from(&reference_string[range]) + ); + + if rng.gen_bool(0.3) { + buffer_versions.push(buffer.clone()); + } + } + + for mut old_buffer in buffer_versions { + let edits = buffer + .edits_since(old_buffer.version.clone()) + .collect::>(); + + log::info!( + "mutating old buffer version {:?}, text: {:?}, edits since: {:?}", + old_buffer.version(), + old_buffer.text(), + edits, + ); + + let mut delta = 0_isize; + for edit in edits { + let old_start = (edit.old_bytes.start as isize + delta) as usize; + let new_text: String = buffer.text_for_range(edit.new_bytes.clone()).collect(); + old_buffer.edit(Some(old_start..old_start + edit.deleted_bytes()), new_text); + delta += edit.delta(); + } + assert_eq!(old_buffer.text(), buffer.text()); + } +} + +#[test] +fn test_line_len() { + let mut buffer = TextBuffer::new(0, 0, History::new("".into())); + buffer.edit(vec![0..0], "abcd\nefg\nhij"); + buffer.edit(vec![12..12], "kl\nmno"); + buffer.edit(vec![18..18], "\npqrs\n"); + buffer.edit(vec![18..21], "\nPQ"); + + assert_eq!(buffer.line_len(0), 4); + assert_eq!(buffer.line_len(1), 3); + assert_eq!(buffer.line_len(2), 5); + assert_eq!(buffer.line_len(3), 3); + assert_eq!(buffer.line_len(4), 4); + assert_eq!(buffer.line_len(5), 0); +} + +#[test] +fn test_text_summary_for_range() { + let buffer = TextBuffer::new(0, 0, History::new("ab\nefg\nhklm\nnopqrs\ntuvwxyz".into())); + assert_eq!( + buffer.text_summary_for_range(1..3), + TextSummary { + bytes: 2, + lines: Point::new(1, 0), + first_line_chars: 1, + last_line_chars: 0, + longest_row: 0, + longest_row_chars: 1, + } + ); + assert_eq!( + buffer.text_summary_for_range(1..12), + TextSummary { + bytes: 11, + lines: Point::new(3, 0), + first_line_chars: 1, + last_line_chars: 0, + longest_row: 2, + longest_row_chars: 4, + } + ); + assert_eq!( + buffer.text_summary_for_range(0..20), + TextSummary { + bytes: 20, + lines: Point::new(4, 1), + first_line_chars: 2, + last_line_chars: 1, + longest_row: 3, + longest_row_chars: 6, + } + ); + assert_eq!( + buffer.text_summary_for_range(0..22), + TextSummary { + bytes: 22, + lines: Point::new(4, 3), + first_line_chars: 2, + last_line_chars: 3, + longest_row: 3, + longest_row_chars: 6, + } + ); + assert_eq!( + buffer.text_summary_for_range(7..22), + TextSummary { + bytes: 15, + lines: Point::new(2, 3), + first_line_chars: 4, + last_line_chars: 3, + longest_row: 1, + longest_row_chars: 6, + } + ); +} + +#[test] +fn test_chars_at() { + let mut buffer = TextBuffer::new(0, 0, History::new("".into())); + buffer.edit(vec![0..0], "abcd\nefgh\nij"); + buffer.edit(vec![12..12], "kl\nmno"); + buffer.edit(vec![18..18], "\npqrs"); + buffer.edit(vec![18..21], "\nPQ"); + + let chars = buffer.chars_at(Point::new(0, 0)); + assert_eq!(chars.collect::(), "abcd\nefgh\nijkl\nmno\nPQrs"); + + let chars = buffer.chars_at(Point::new(1, 0)); + assert_eq!(chars.collect::(), "efgh\nijkl\nmno\nPQrs"); + + let chars = buffer.chars_at(Point::new(2, 0)); + assert_eq!(chars.collect::(), "ijkl\nmno\nPQrs"); + + let chars = buffer.chars_at(Point::new(3, 0)); + assert_eq!(chars.collect::(), "mno\nPQrs"); + + let chars = buffer.chars_at(Point::new(4, 0)); + assert_eq!(chars.collect::(), "PQrs"); + + // Regression test: + let mut buffer = TextBuffer::new(0, 0, History::new("".into())); + buffer.edit(vec![0..0], "[workspace]\nmembers = [\n \"xray_core\",\n \"xray_server\",\n \"xray_cli\",\n \"xray_wasm\",\n]\n"); + buffer.edit(vec![60..60], "\n"); + + let chars = buffer.chars_at(Point::new(6, 0)); + assert_eq!(chars.collect::(), " \"xray_wasm\",\n]\n"); +} + +#[test] +fn test_anchors() { + let mut buffer = TextBuffer::new(0, 0, History::new("".into())); + buffer.edit(vec![0..0], "abc"); + let left_anchor = buffer.anchor_before(2); + let right_anchor = buffer.anchor_after(2); + + buffer.edit(vec![1..1], "def\n"); + assert_eq!(buffer.text(), "adef\nbc"); + assert_eq!(left_anchor.to_offset(&buffer), 6); + assert_eq!(right_anchor.to_offset(&buffer), 6); + assert_eq!(left_anchor.to_point(&buffer), Point { row: 1, column: 1 }); + assert_eq!(right_anchor.to_point(&buffer), Point { row: 1, column: 1 }); + + buffer.edit(vec![2..3], ""); + assert_eq!(buffer.text(), "adf\nbc"); + assert_eq!(left_anchor.to_offset(&buffer), 5); + assert_eq!(right_anchor.to_offset(&buffer), 5); + assert_eq!(left_anchor.to_point(&buffer), Point { row: 1, column: 1 }); + assert_eq!(right_anchor.to_point(&buffer), Point { row: 1, column: 1 }); + + buffer.edit(vec![5..5], "ghi\n"); + assert_eq!(buffer.text(), "adf\nbghi\nc"); + assert_eq!(left_anchor.to_offset(&buffer), 5); + assert_eq!(right_anchor.to_offset(&buffer), 9); + assert_eq!(left_anchor.to_point(&buffer), Point { row: 1, column: 1 }); + assert_eq!(right_anchor.to_point(&buffer), Point { row: 2, column: 0 }); + + buffer.edit(vec![7..9], ""); + assert_eq!(buffer.text(), "adf\nbghc"); + assert_eq!(left_anchor.to_offset(&buffer), 5); + assert_eq!(right_anchor.to_offset(&buffer), 7); + assert_eq!(left_anchor.to_point(&buffer), Point { row: 1, column: 1 },); + assert_eq!(right_anchor.to_point(&buffer), Point { row: 1, column: 3 }); + + // Ensure anchoring to a point is equivalent to anchoring to an offset. + assert_eq!( + buffer.anchor_before(Point { row: 0, column: 0 }), + buffer.anchor_before(0) + ); + assert_eq!( + buffer.anchor_before(Point { row: 0, column: 1 }), + buffer.anchor_before(1) + ); + assert_eq!( + buffer.anchor_before(Point { row: 0, column: 2 }), + buffer.anchor_before(2) + ); + assert_eq!( + buffer.anchor_before(Point { row: 0, column: 3 }), + buffer.anchor_before(3) + ); + assert_eq!( + buffer.anchor_before(Point { row: 1, column: 0 }), + buffer.anchor_before(4) + ); + assert_eq!( + buffer.anchor_before(Point { row: 1, column: 1 }), + buffer.anchor_before(5) + ); + assert_eq!( + buffer.anchor_before(Point { row: 1, column: 2 }), + buffer.anchor_before(6) + ); + assert_eq!( + buffer.anchor_before(Point { row: 1, column: 3 }), + buffer.anchor_before(7) + ); + assert_eq!( + buffer.anchor_before(Point { row: 1, column: 4 }), + buffer.anchor_before(8) + ); + + // Comparison between anchors. + let anchor_at_offset_0 = buffer.anchor_before(0); + let anchor_at_offset_1 = buffer.anchor_before(1); + let anchor_at_offset_2 = buffer.anchor_before(2); + + assert_eq!( + anchor_at_offset_0 + .cmp(&anchor_at_offset_0, &buffer) + .unwrap(), + Ordering::Equal + ); + assert_eq!( + anchor_at_offset_1 + .cmp(&anchor_at_offset_1, &buffer) + .unwrap(), + Ordering::Equal + ); + assert_eq!( + anchor_at_offset_2 + .cmp(&anchor_at_offset_2, &buffer) + .unwrap(), + Ordering::Equal + ); + + assert_eq!( + anchor_at_offset_0 + .cmp(&anchor_at_offset_1, &buffer) + .unwrap(), + Ordering::Less + ); + assert_eq!( + anchor_at_offset_1 + .cmp(&anchor_at_offset_2, &buffer) + .unwrap(), + Ordering::Less + ); + assert_eq!( + anchor_at_offset_0 + .cmp(&anchor_at_offset_2, &buffer) + .unwrap(), + Ordering::Less + ); + + assert_eq!( + anchor_at_offset_1 + .cmp(&anchor_at_offset_0, &buffer) + .unwrap(), + Ordering::Greater + ); + assert_eq!( + anchor_at_offset_2 + .cmp(&anchor_at_offset_1, &buffer) + .unwrap(), + Ordering::Greater + ); + assert_eq!( + anchor_at_offset_2 + .cmp(&anchor_at_offset_0, &buffer) + .unwrap(), + Ordering::Greater + ); +} + +#[test] +fn test_anchors_at_start_and_end() { + let mut buffer = TextBuffer::new(0, 0, History::new("".into())); + let before_start_anchor = buffer.anchor_before(0); + let after_end_anchor = buffer.anchor_after(0); + + buffer.edit(vec![0..0], "abc"); + assert_eq!(buffer.text(), "abc"); + assert_eq!(before_start_anchor.to_offset(&buffer), 0); + assert_eq!(after_end_anchor.to_offset(&buffer), 3); + + let after_start_anchor = buffer.anchor_after(0); + let before_end_anchor = buffer.anchor_before(3); + + buffer.edit(vec![3..3], "def"); + buffer.edit(vec![0..0], "ghi"); + assert_eq!(buffer.text(), "ghiabcdef"); + assert_eq!(before_start_anchor.to_offset(&buffer), 0); + assert_eq!(after_start_anchor.to_offset(&buffer), 3); + assert_eq!(before_end_anchor.to_offset(&buffer), 6); + assert_eq!(after_end_anchor.to_offset(&buffer), 9); +} + +#[test] +fn test_undo_redo() { + let mut buffer = TextBuffer::new(0, 0, History::new("1234".into())); + // Set group interval to zero so as to not group edits in the undo stack. + buffer.history.group_interval = Duration::from_secs(0); + + buffer.edit(vec![1..1], "abx"); + buffer.edit(vec![3..4], "yzef"); + buffer.edit(vec![3..5], "cd"); + assert_eq!(buffer.text(), "1abcdef234"); + + let transactions = buffer.history.undo_stack.clone(); + assert_eq!(transactions.len(), 3); + + buffer.undo_or_redo(transactions[0].clone()).unwrap(); + assert_eq!(buffer.text(), "1cdef234"); + buffer.undo_or_redo(transactions[0].clone()).unwrap(); + assert_eq!(buffer.text(), "1abcdef234"); + + buffer.undo_or_redo(transactions[1].clone()).unwrap(); + assert_eq!(buffer.text(), "1abcdx234"); + buffer.undo_or_redo(transactions[2].clone()).unwrap(); + assert_eq!(buffer.text(), "1abx234"); + buffer.undo_or_redo(transactions[1].clone()).unwrap(); + assert_eq!(buffer.text(), "1abyzef234"); + buffer.undo_or_redo(transactions[2].clone()).unwrap(); + assert_eq!(buffer.text(), "1abcdef234"); + + buffer.undo_or_redo(transactions[2].clone()).unwrap(); + assert_eq!(buffer.text(), "1abyzef234"); + buffer.undo_or_redo(transactions[0].clone()).unwrap(); + assert_eq!(buffer.text(), "1yzef234"); + buffer.undo_or_redo(transactions[1].clone()).unwrap(); + assert_eq!(buffer.text(), "1234"); +} + +#[test] +fn test_history() { + let mut now = Instant::now(); + let mut buffer = TextBuffer::new(0, 0, History::new("123456".into())); + + let set_id = if let Operation::UpdateSelections { set_id, .. } = + buffer.add_selection_set(buffer.selections_from_ranges(vec![4..4]).unwrap()) + { + set_id + } else { + unreachable!() + }; + buffer.start_transaction_at(Some(set_id), now).unwrap(); + buffer.edit(vec![2..4], "cd"); + buffer.end_transaction_at(Some(set_id), now).unwrap(); + assert_eq!(buffer.text(), "12cd56"); + assert_eq!(buffer.selection_ranges(set_id).unwrap(), vec![4..4]); + + buffer.start_transaction_at(Some(set_id), now).unwrap(); + buffer + .update_selection_set(set_id, buffer.selections_from_ranges(vec![1..3]).unwrap()) + .unwrap(); + buffer.edit(vec![4..5], "e"); + buffer.end_transaction_at(Some(set_id), now).unwrap(); + assert_eq!(buffer.text(), "12cde6"); + assert_eq!(buffer.selection_ranges(set_id).unwrap(), vec![1..3]); + + now += buffer.history.group_interval + Duration::from_millis(1); + buffer.start_transaction_at(Some(set_id), now).unwrap(); + buffer + .update_selection_set(set_id, buffer.selections_from_ranges(vec![2..2]).unwrap()) + .unwrap(); + buffer.edit(vec![0..1], "a"); + buffer.edit(vec![1..1], "b"); + buffer.end_transaction_at(Some(set_id), now).unwrap(); + assert_eq!(buffer.text(), "ab2cde6"); + assert_eq!(buffer.selection_ranges(set_id).unwrap(), vec![3..3]); + + // Last transaction happened past the group interval, undo it on its + // own. + buffer.undo(); + assert_eq!(buffer.text(), "12cde6"); + assert_eq!(buffer.selection_ranges(set_id).unwrap(), vec![1..3]); + + // First two transactions happened within the group interval, undo them + // together. + buffer.undo(); + assert_eq!(buffer.text(), "123456"); + assert_eq!(buffer.selection_ranges(set_id).unwrap(), vec![4..4]); + + // Redo the first two transactions together. + buffer.redo(); + assert_eq!(buffer.text(), "12cde6"); + assert_eq!(buffer.selection_ranges(set_id).unwrap(), vec![1..3]); + + // Redo the last transaction on its own. + buffer.redo(); + assert_eq!(buffer.text(), "ab2cde6"); + assert_eq!(buffer.selection_ranges(set_id).unwrap(), vec![3..3]); + + buffer.start_transaction_at(None, now).unwrap(); + assert!(buffer.end_transaction_at(None, now).is_none()); + buffer.undo(); + assert_eq!(buffer.text(), "12cde6"); +} + +#[test] +fn test_concurrent_edits() { + let text = "abcdef"; + + let mut buffer1 = TextBuffer::new(1, 0, History::new(text.into())); + let mut buffer2 = TextBuffer::new(2, 0, History::new(text.into())); + let mut buffer3 = TextBuffer::new(3, 0, History::new(text.into())); + + let buf1_op = buffer1.edit(vec![1..2], "12"); + assert_eq!(buffer1.text(), "a12cdef"); + let buf2_op = buffer2.edit(vec![3..4], "34"); + assert_eq!(buffer2.text(), "abc34ef"); + let buf3_op = buffer3.edit(vec![5..6], "56"); + assert_eq!(buffer3.text(), "abcde56"); + + buffer1.apply_op(Operation::Edit(buf2_op.clone())).unwrap(); + buffer1.apply_op(Operation::Edit(buf3_op.clone())).unwrap(); + buffer2.apply_op(Operation::Edit(buf1_op.clone())).unwrap(); + buffer2.apply_op(Operation::Edit(buf3_op.clone())).unwrap(); + buffer3.apply_op(Operation::Edit(buf1_op.clone())).unwrap(); + buffer3.apply_op(Operation::Edit(buf2_op.clone())).unwrap(); + + assert_eq!(buffer1.text(), "a12c34e56"); + assert_eq!(buffer2.text(), "a12c34e56"); + assert_eq!(buffer3.text(), "a12c34e56"); +} + +#[gpui::test(iterations = 100)] +fn test_random_concurrent_edits(mut rng: StdRng) { + let peers = env::var("PEERS") + .map(|i| i.parse().expect("invalid `PEERS` variable")) + .unwrap_or(5); + let operations = env::var("OPERATIONS") + .map(|i| i.parse().expect("invalid `OPERATIONS` variable")) + .unwrap_or(10); + + let base_text_len = rng.gen_range(0..10); + let base_text = RandomCharIter::new(&mut rng) + .take(base_text_len) + .collect::(); + let mut replica_ids = Vec::new(); + let mut buffers = Vec::new(); + let mut network = Network::new(rng.clone()); + + for i in 0..peers { + let mut buffer = TextBuffer::new(i as ReplicaId, 0, History::new(base_text.clone().into())); + buffer.history.group_interval = Duration::from_millis(rng.gen_range(0..=200)); + buffers.push(buffer); + replica_ids.push(i as u16); + network.add_peer(i as u16); + } + + log::info!("initial text: {:?}", base_text); + + let mut mutation_count = operations; + loop { + let replica_index = rng.gen_range(0..peers); + let replica_id = replica_ids[replica_index]; + let buffer = &mut buffers[replica_index]; + match rng.gen_range(0..=100) { + 0..=50 if mutation_count != 0 => { + let ops = buffer.randomly_mutate(&mut rng); + network.broadcast(buffer.replica_id, ops); + log::info!("buffer {} text: {:?}", buffer.replica_id, buffer.text()); + mutation_count -= 1; + } + 51..=70 if mutation_count != 0 => { + let ops = buffer.randomly_undo_redo(&mut rng); + network.broadcast(buffer.replica_id, ops); + mutation_count -= 1; + } + 71..=100 if network.has_unreceived(replica_id) => { + let ops = network.receive(replica_id); + if !ops.is_empty() { + log::info!( + "peer {} applying {} ops from the network.", + replica_id, + ops.len() + ); + buffer.apply_ops(ops).unwrap(); + } + } + _ => {} + } + + if mutation_count == 0 && network.is_idle() { + break; + } + } + + let first_buffer = &buffers[0]; + for buffer in &buffers[1..] { + assert_eq!( + buffer.text(), + first_buffer.text(), + "Replica {} text != Replica 0 text", + buffer.replica_id + ); + assert_eq!( + buffer.selection_sets().collect::>(), + first_buffer.selection_sets().collect::>() + ); + assert_eq!( + buffer.all_selection_ranges().collect::>(), + first_buffer + .all_selection_ranges() + .collect::>() + ); + } +} + +#[derive(Clone)] +struct Envelope { + message: T, + sender: ReplicaId, +} + +struct Network { + inboxes: std::collections::BTreeMap>>, + all_messages: Vec, + rng: R, +} + +impl Network { + fn new(rng: R) -> Self { + Network { + inboxes: Default::default(), + all_messages: Vec::new(), + rng, + } + } + + fn add_peer(&mut self, id: ReplicaId) { + self.inboxes.insert(id, Vec::new()); + } + + fn is_idle(&self) -> bool { + self.inboxes.values().all(|i| i.is_empty()) + } + + fn broadcast(&mut self, sender: ReplicaId, messages: Vec) { + for (replica, inbox) in self.inboxes.iter_mut() { + if *replica != sender { + for message in &messages { + let min_index = inbox + .iter() + .enumerate() + .rev() + .find_map(|(index, envelope)| { + if sender == envelope.sender { + Some(index + 1) + } else { + None + } + }) + .unwrap_or(0); + + // Insert one or more duplicates of this message *after* the previous + // message delivered by this replica. + for _ in 0..self.rng.gen_range(1..4) { + let insertion_index = self.rng.gen_range(min_index..inbox.len() + 1); + inbox.insert( + insertion_index, + Envelope { + message: message.clone(), + sender, + }, + ); + } + } + } + } + self.all_messages.extend(messages); + } + + fn has_unreceived(&self, receiver: ReplicaId) -> bool { + !self.inboxes[&receiver].is_empty() + } + + fn receive(&mut self, receiver: ReplicaId) -> Vec { + let inbox = self.inboxes.get_mut(&receiver).unwrap(); + let count = self.rng.gen_range(0..inbox.len() + 1); + inbox + .drain(0..count) + .map(|envelope| envelope.message) + .collect() + } +} diff --git a/crates/buffer/src/tests/buffer.rs b/crates/buffer/src/tests/buffer.rs deleted file mode 100644 index f0c9051daa6b572d5226f7c2cb9e0cb7788c7142..0000000000000000000000000000000000000000 --- a/crates/buffer/src/tests/buffer.rs +++ /dev/null @@ -1,733 +0,0 @@ -use crate::*; -use clock::ReplicaId; -use rand::prelude::*; -use std::{ - cell::RefCell, - cmp::Ordering, - env, - iter::Iterator, - rc::Rc, - time::{Duration, Instant}, -}; - -#[test] -fn test_edit() { - let mut buffer = TextBuffer::new(0, 0, History::new("abc".into())); - assert_eq!(buffer.text(), "abc"); - buffer.edit(vec![3..3], "def"); - assert_eq!(buffer.text(), "abcdef"); - buffer.edit(vec![0..0], "ghi"); - assert_eq!(buffer.text(), "ghiabcdef"); - buffer.edit(vec![5..5], "jkl"); - assert_eq!(buffer.text(), "ghiabjklcdef"); - buffer.edit(vec![6..7], ""); - assert_eq!(buffer.text(), "ghiabjlcdef"); - buffer.edit(vec![4..9], "mno"); - assert_eq!(buffer.text(), "ghiamnoef"); -} - -#[gpui::test] -fn test_edit_events(cx: &mut gpui::MutableAppContext) { - let mut now = Instant::now(); - let buffer_1_events = Rc::new(RefCell::new(Vec::new())); - let buffer_2_events = Rc::new(RefCell::new(Vec::new())); - - let buffer1 = cx.add_model(|cx| Buffer::new(0, "abcdef", cx)); - let buffer2 = cx.add_model(|cx| Buffer::new(1, "abcdef", cx)); - let buffer_ops = buffer1.update(cx, |buffer, cx| { - let buffer_1_events = buffer_1_events.clone(); - cx.subscribe(&buffer1, move |_, _, event, _| { - buffer_1_events.borrow_mut().push(event.clone()) - }) - .detach(); - let buffer_2_events = buffer_2_events.clone(); - cx.subscribe(&buffer2, move |_, _, event, _| { - buffer_2_events.borrow_mut().push(event.clone()) - }) - .detach(); - - // An edit emits an edited event, followed by a dirtied event, - // since the buffer was previously in a clean state. - buffer.edit(Some(2..4), "XYZ", cx); - - // An empty transaction does not emit any events. - buffer.start_transaction(None).unwrap(); - buffer.end_transaction(None, cx).unwrap(); - - // A transaction containing two edits emits one edited event. - now += Duration::from_secs(1); - buffer.start_transaction_at(None, now).unwrap(); - buffer.edit(Some(5..5), "u", cx); - buffer.edit(Some(6..6), "w", cx); - buffer.end_transaction_at(None, now, cx).unwrap(); - - // Undoing a transaction emits one edited event. - buffer.undo(cx); - - buffer.operations.clone() - }); - - // Incorporating a set of remote ops emits a single edited event, - // followed by a dirtied event. - buffer2.update(cx, |buffer, cx| { - buffer.apply_ops(buffer_ops, cx).unwrap(); - }); - - let buffer_1_events = buffer_1_events.borrow(); - assert_eq!( - *buffer_1_events, - vec![Event::Edited, Event::Dirtied, Event::Edited, Event::Edited] - ); - - let buffer_2_events = buffer_2_events.borrow(); - assert_eq!(*buffer_2_events, vec![Event::Edited, Event::Dirtied]); -} - -#[gpui::test(iterations = 100)] -fn test_random_edits(mut rng: StdRng) { - let operations = env::var("OPERATIONS") - .map(|i| i.parse().expect("invalid `OPERATIONS` variable")) - .unwrap_or(10); - - let reference_string_len = rng.gen_range(0..3); - let mut reference_string = RandomCharIter::new(&mut rng) - .take(reference_string_len) - .collect::(); - let mut buffer = TextBuffer::new(0, 0, History::new(reference_string.clone().into())); - buffer.history.group_interval = Duration::from_millis(rng.gen_range(0..=200)); - let mut buffer_versions = Vec::new(); - log::info!( - "buffer text {:?}, version: {:?}", - buffer.text(), - buffer.version() - ); - - for _i in 0..operations { - let (old_ranges, new_text, _) = buffer.randomly_edit(&mut rng, 5); - for old_range in old_ranges.iter().rev() { - reference_string.replace_range(old_range.clone(), &new_text); - } - assert_eq!(buffer.text(), reference_string); - log::info!( - "buffer text {:?}, version: {:?}", - buffer.text(), - buffer.version() - ); - - if rng.gen_bool(0.25) { - buffer.randomly_undo_redo(&mut rng); - reference_string = buffer.text(); - log::info!( - "buffer text {:?}, version: {:?}", - buffer.text(), - buffer.version() - ); - } - - let range = buffer.random_byte_range(0, &mut rng); - assert_eq!( - buffer.text_summary_for_range(range.clone()), - TextSummary::from(&reference_string[range]) - ); - - if rng.gen_bool(0.3) { - buffer_versions.push(buffer.clone()); - } - } - - for mut old_buffer in buffer_versions { - let edits = buffer - .edits_since(old_buffer.version.clone()) - .collect::>(); - - log::info!( - "mutating old buffer version {:?}, text: {:?}, edits since: {:?}", - old_buffer.version(), - old_buffer.text(), - edits, - ); - - let mut delta = 0_isize; - for edit in edits { - let old_start = (edit.old_bytes.start as isize + delta) as usize; - let new_text: String = buffer.text_for_range(edit.new_bytes.clone()).collect(); - old_buffer.edit(Some(old_start..old_start + edit.deleted_bytes()), new_text); - delta += edit.delta(); - } - assert_eq!(old_buffer.text(), buffer.text()); - } -} - -#[test] -fn test_line_len() { - let mut buffer = TextBuffer::new(0, 0, History::new("".into())); - buffer.edit(vec![0..0], "abcd\nefg\nhij"); - buffer.edit(vec![12..12], "kl\nmno"); - buffer.edit(vec![18..18], "\npqrs\n"); - buffer.edit(vec![18..21], "\nPQ"); - - assert_eq!(buffer.line_len(0), 4); - assert_eq!(buffer.line_len(1), 3); - assert_eq!(buffer.line_len(2), 5); - assert_eq!(buffer.line_len(3), 3); - assert_eq!(buffer.line_len(4), 4); - assert_eq!(buffer.line_len(5), 0); -} - -#[test] -fn test_text_summary_for_range() { - let buffer = TextBuffer::new(0, 0, History::new("ab\nefg\nhklm\nnopqrs\ntuvwxyz".into())); - assert_eq!( - buffer.text_summary_for_range(1..3), - TextSummary { - bytes: 2, - lines: Point::new(1, 0), - first_line_chars: 1, - last_line_chars: 0, - longest_row: 0, - longest_row_chars: 1, - } - ); - assert_eq!( - buffer.text_summary_for_range(1..12), - TextSummary { - bytes: 11, - lines: Point::new(3, 0), - first_line_chars: 1, - last_line_chars: 0, - longest_row: 2, - longest_row_chars: 4, - } - ); - assert_eq!( - buffer.text_summary_for_range(0..20), - TextSummary { - bytes: 20, - lines: Point::new(4, 1), - first_line_chars: 2, - last_line_chars: 1, - longest_row: 3, - longest_row_chars: 6, - } - ); - assert_eq!( - buffer.text_summary_for_range(0..22), - TextSummary { - bytes: 22, - lines: Point::new(4, 3), - first_line_chars: 2, - last_line_chars: 3, - longest_row: 3, - longest_row_chars: 6, - } - ); - assert_eq!( - buffer.text_summary_for_range(7..22), - TextSummary { - bytes: 15, - lines: Point::new(2, 3), - first_line_chars: 4, - last_line_chars: 3, - longest_row: 1, - longest_row_chars: 6, - } - ); -} - -#[test] -fn test_chars_at() { - let mut buffer = TextBuffer::new(0, 0, History::new("".into())); - buffer.edit(vec![0..0], "abcd\nefgh\nij"); - buffer.edit(vec![12..12], "kl\nmno"); - buffer.edit(vec![18..18], "\npqrs"); - buffer.edit(vec![18..21], "\nPQ"); - - let chars = buffer.chars_at(Point::new(0, 0)); - assert_eq!(chars.collect::(), "abcd\nefgh\nijkl\nmno\nPQrs"); - - let chars = buffer.chars_at(Point::new(1, 0)); - assert_eq!(chars.collect::(), "efgh\nijkl\nmno\nPQrs"); - - let chars = buffer.chars_at(Point::new(2, 0)); - assert_eq!(chars.collect::(), "ijkl\nmno\nPQrs"); - - let chars = buffer.chars_at(Point::new(3, 0)); - assert_eq!(chars.collect::(), "mno\nPQrs"); - - let chars = buffer.chars_at(Point::new(4, 0)); - assert_eq!(chars.collect::(), "PQrs"); - - // Regression test: - let mut buffer = TextBuffer::new(0, 0, History::new("".into())); - buffer.edit(vec![0..0], "[workspace]\nmembers = [\n \"xray_core\",\n \"xray_server\",\n \"xray_cli\",\n \"xray_wasm\",\n]\n"); - buffer.edit(vec![60..60], "\n"); - - let chars = buffer.chars_at(Point::new(6, 0)); - assert_eq!(chars.collect::(), " \"xray_wasm\",\n]\n"); -} - -#[test] -fn test_anchors() { - let mut buffer = TextBuffer::new(0, 0, History::new("".into())); - buffer.edit(vec![0..0], "abc"); - let left_anchor = buffer.anchor_before(2); - let right_anchor = buffer.anchor_after(2); - - buffer.edit(vec![1..1], "def\n"); - assert_eq!(buffer.text(), "adef\nbc"); - assert_eq!(left_anchor.to_offset(&buffer), 6); - assert_eq!(right_anchor.to_offset(&buffer), 6); - assert_eq!(left_anchor.to_point(&buffer), Point { row: 1, column: 1 }); - assert_eq!(right_anchor.to_point(&buffer), Point { row: 1, column: 1 }); - - buffer.edit(vec![2..3], ""); - assert_eq!(buffer.text(), "adf\nbc"); - assert_eq!(left_anchor.to_offset(&buffer), 5); - assert_eq!(right_anchor.to_offset(&buffer), 5); - assert_eq!(left_anchor.to_point(&buffer), Point { row: 1, column: 1 }); - assert_eq!(right_anchor.to_point(&buffer), Point { row: 1, column: 1 }); - - buffer.edit(vec![5..5], "ghi\n"); - assert_eq!(buffer.text(), "adf\nbghi\nc"); - assert_eq!(left_anchor.to_offset(&buffer), 5); - assert_eq!(right_anchor.to_offset(&buffer), 9); - assert_eq!(left_anchor.to_point(&buffer), Point { row: 1, column: 1 }); - assert_eq!(right_anchor.to_point(&buffer), Point { row: 2, column: 0 }); - - buffer.edit(vec![7..9], ""); - assert_eq!(buffer.text(), "adf\nbghc"); - assert_eq!(left_anchor.to_offset(&buffer), 5); - assert_eq!(right_anchor.to_offset(&buffer), 7); - assert_eq!(left_anchor.to_point(&buffer), Point { row: 1, column: 1 },); - assert_eq!(right_anchor.to_point(&buffer), Point { row: 1, column: 3 }); - - // Ensure anchoring to a point is equivalent to anchoring to an offset. - assert_eq!( - buffer.anchor_before(Point { row: 0, column: 0 }), - buffer.anchor_before(0) - ); - assert_eq!( - buffer.anchor_before(Point { row: 0, column: 1 }), - buffer.anchor_before(1) - ); - assert_eq!( - buffer.anchor_before(Point { row: 0, column: 2 }), - buffer.anchor_before(2) - ); - assert_eq!( - buffer.anchor_before(Point { row: 0, column: 3 }), - buffer.anchor_before(3) - ); - assert_eq!( - buffer.anchor_before(Point { row: 1, column: 0 }), - buffer.anchor_before(4) - ); - assert_eq!( - buffer.anchor_before(Point { row: 1, column: 1 }), - buffer.anchor_before(5) - ); - assert_eq!( - buffer.anchor_before(Point { row: 1, column: 2 }), - buffer.anchor_before(6) - ); - assert_eq!( - buffer.anchor_before(Point { row: 1, column: 3 }), - buffer.anchor_before(7) - ); - assert_eq!( - buffer.anchor_before(Point { row: 1, column: 4 }), - buffer.anchor_before(8) - ); - - // Comparison between anchors. - let anchor_at_offset_0 = buffer.anchor_before(0); - let anchor_at_offset_1 = buffer.anchor_before(1); - let anchor_at_offset_2 = buffer.anchor_before(2); - - assert_eq!( - anchor_at_offset_0 - .cmp(&anchor_at_offset_0, &buffer) - .unwrap(), - Ordering::Equal - ); - assert_eq!( - anchor_at_offset_1 - .cmp(&anchor_at_offset_1, &buffer) - .unwrap(), - Ordering::Equal - ); - assert_eq!( - anchor_at_offset_2 - .cmp(&anchor_at_offset_2, &buffer) - .unwrap(), - Ordering::Equal - ); - - assert_eq!( - anchor_at_offset_0 - .cmp(&anchor_at_offset_1, &buffer) - .unwrap(), - Ordering::Less - ); - assert_eq!( - anchor_at_offset_1 - .cmp(&anchor_at_offset_2, &buffer) - .unwrap(), - Ordering::Less - ); - assert_eq!( - anchor_at_offset_0 - .cmp(&anchor_at_offset_2, &buffer) - .unwrap(), - Ordering::Less - ); - - assert_eq!( - anchor_at_offset_1 - .cmp(&anchor_at_offset_0, &buffer) - .unwrap(), - Ordering::Greater - ); - assert_eq!( - anchor_at_offset_2 - .cmp(&anchor_at_offset_1, &buffer) - .unwrap(), - Ordering::Greater - ); - assert_eq!( - anchor_at_offset_2 - .cmp(&anchor_at_offset_0, &buffer) - .unwrap(), - Ordering::Greater - ); -} - -#[test] -fn test_anchors_at_start_and_end() { - let mut buffer = TextBuffer::new(0, 0, History::new("".into())); - let before_start_anchor = buffer.anchor_before(0); - let after_end_anchor = buffer.anchor_after(0); - - buffer.edit(vec![0..0], "abc"); - assert_eq!(buffer.text(), "abc"); - assert_eq!(before_start_anchor.to_offset(&buffer), 0); - assert_eq!(after_end_anchor.to_offset(&buffer), 3); - - let after_start_anchor = buffer.anchor_after(0); - let before_end_anchor = buffer.anchor_before(3); - - buffer.edit(vec![3..3], "def"); - buffer.edit(vec![0..0], "ghi"); - assert_eq!(buffer.text(), "ghiabcdef"); - assert_eq!(before_start_anchor.to_offset(&buffer), 0); - assert_eq!(after_start_anchor.to_offset(&buffer), 3); - assert_eq!(before_end_anchor.to_offset(&buffer), 6); - assert_eq!(after_end_anchor.to_offset(&buffer), 9); -} - -#[gpui::test] -async fn test_apply_diff(mut cx: gpui::TestAppContext) { - let text = "a\nbb\nccc\ndddd\neeeee\nffffff\n"; - let buffer = cx.add_model(|cx| Buffer::new(0, text, cx)); - - let text = "a\nccc\ndddd\nffffff\n"; - let diff = buffer.read_with(&cx, |b, cx| b.diff(text.into(), cx)).await; - buffer.update(&mut cx, |b, cx| b.apply_diff(diff, cx)); - cx.read(|cx| assert_eq!(buffer.read(cx).text(), text)); - - let text = "a\n1\n\nccc\ndd2dd\nffffff\n"; - let diff = buffer.read_with(&cx, |b, cx| b.diff(text.into(), cx)).await; - buffer.update(&mut cx, |b, cx| b.apply_diff(diff, cx)); - cx.read(|cx| assert_eq!(buffer.read(cx).text(), text)); -} - -#[test] -fn test_undo_redo() { - let mut buffer = TextBuffer::new(0, 0, History::new("1234".into())); - // Set group interval to zero so as to not group edits in the undo stack. - buffer.history.group_interval = Duration::from_secs(0); - - buffer.edit(vec![1..1], "abx"); - buffer.edit(vec![3..4], "yzef"); - buffer.edit(vec![3..5], "cd"); - assert_eq!(buffer.text(), "1abcdef234"); - - let transactions = buffer.history.undo_stack.clone(); - assert_eq!(transactions.len(), 3); - - buffer.undo_or_redo(transactions[0].clone()).unwrap(); - assert_eq!(buffer.text(), "1cdef234"); - buffer.undo_or_redo(transactions[0].clone()).unwrap(); - assert_eq!(buffer.text(), "1abcdef234"); - - buffer.undo_or_redo(transactions[1].clone()).unwrap(); - assert_eq!(buffer.text(), "1abcdx234"); - buffer.undo_or_redo(transactions[2].clone()).unwrap(); - assert_eq!(buffer.text(), "1abx234"); - buffer.undo_or_redo(transactions[1].clone()).unwrap(); - assert_eq!(buffer.text(), "1abyzef234"); - buffer.undo_or_redo(transactions[2].clone()).unwrap(); - assert_eq!(buffer.text(), "1abcdef234"); - - buffer.undo_or_redo(transactions[2].clone()).unwrap(); - assert_eq!(buffer.text(), "1abyzef234"); - buffer.undo_or_redo(transactions[0].clone()).unwrap(); - assert_eq!(buffer.text(), "1yzef234"); - buffer.undo_or_redo(transactions[1].clone()).unwrap(); - assert_eq!(buffer.text(), "1234"); -} - -#[test] -fn test_history() { - let mut now = Instant::now(); - let mut buffer = TextBuffer::new(0, 0, History::new("123456".into())); - - let set_id = if let Operation::UpdateSelections { set_id, .. } = - buffer.add_selection_set(buffer.selections_from_ranges(vec![4..4]).unwrap()) - { - set_id - } else { - unreachable!() - }; - buffer.start_transaction_at(Some(set_id), now).unwrap(); - buffer.edit(vec![2..4], "cd"); - buffer.end_transaction_at(Some(set_id), now).unwrap(); - assert_eq!(buffer.text(), "12cd56"); - assert_eq!(buffer.selection_ranges(set_id).unwrap(), vec![4..4]); - - buffer.start_transaction_at(Some(set_id), now).unwrap(); - buffer - .update_selection_set(set_id, buffer.selections_from_ranges(vec![1..3]).unwrap()) - .unwrap(); - buffer.edit(vec![4..5], "e"); - buffer.end_transaction_at(Some(set_id), now).unwrap(); - assert_eq!(buffer.text(), "12cde6"); - assert_eq!(buffer.selection_ranges(set_id).unwrap(), vec![1..3]); - - now += buffer.history.group_interval + Duration::from_millis(1); - buffer.start_transaction_at(Some(set_id), now).unwrap(); - buffer - .update_selection_set(set_id, buffer.selections_from_ranges(vec![2..2]).unwrap()) - .unwrap(); - buffer.edit(vec![0..1], "a"); - buffer.edit(vec![1..1], "b"); - buffer.end_transaction_at(Some(set_id), now).unwrap(); - assert_eq!(buffer.text(), "ab2cde6"); - assert_eq!(buffer.selection_ranges(set_id).unwrap(), vec![3..3]); - - // Last transaction happened past the group interval, undo it on its - // own. - buffer.undo(); - assert_eq!(buffer.text(), "12cde6"); - assert_eq!(buffer.selection_ranges(set_id).unwrap(), vec![1..3]); - - // First two transactions happened within the group interval, undo them - // together. - buffer.undo(); - assert_eq!(buffer.text(), "123456"); - assert_eq!(buffer.selection_ranges(set_id).unwrap(), vec![4..4]); - - // Redo the first two transactions together. - buffer.redo(); - assert_eq!(buffer.text(), "12cde6"); - assert_eq!(buffer.selection_ranges(set_id).unwrap(), vec![1..3]); - - // Redo the last transaction on its own. - buffer.redo(); - assert_eq!(buffer.text(), "ab2cde6"); - assert_eq!(buffer.selection_ranges(set_id).unwrap(), vec![3..3]); - - buffer.start_transaction_at(None, now).unwrap(); - assert!(buffer.end_transaction_at(None, now).is_none()); - buffer.undo(); - assert_eq!(buffer.text(), "12cde6"); -} - -#[test] -fn test_concurrent_edits() { - let text = "abcdef"; - - let mut buffer1 = TextBuffer::new(1, 0, History::new(text.into())); - let mut buffer2 = TextBuffer::new(2, 0, History::new(text.into())); - let mut buffer3 = TextBuffer::new(3, 0, History::new(text.into())); - - let buf1_op = buffer1.edit(vec![1..2], "12"); - assert_eq!(buffer1.text(), "a12cdef"); - let buf2_op = buffer2.edit(vec![3..4], "34"); - assert_eq!(buffer2.text(), "abc34ef"); - let buf3_op = buffer3.edit(vec![5..6], "56"); - assert_eq!(buffer3.text(), "abcde56"); - - buffer1.apply_op(Operation::Edit(buf2_op.clone())).unwrap(); - buffer1.apply_op(Operation::Edit(buf3_op.clone())).unwrap(); - buffer2.apply_op(Operation::Edit(buf1_op.clone())).unwrap(); - buffer2.apply_op(Operation::Edit(buf3_op.clone())).unwrap(); - buffer3.apply_op(Operation::Edit(buf1_op.clone())).unwrap(); - buffer3.apply_op(Operation::Edit(buf2_op.clone())).unwrap(); - - assert_eq!(buffer1.text(), "a12c34e56"); - assert_eq!(buffer2.text(), "a12c34e56"); - assert_eq!(buffer3.text(), "a12c34e56"); -} - -#[gpui::test(iterations = 100)] -fn test_random_concurrent_edits(mut rng: StdRng) { - let peers = env::var("PEERS") - .map(|i| i.parse().expect("invalid `PEERS` variable")) - .unwrap_or(5); - let operations = env::var("OPERATIONS") - .map(|i| i.parse().expect("invalid `OPERATIONS` variable")) - .unwrap_or(10); - - let base_text_len = rng.gen_range(0..10); - let base_text = RandomCharIter::new(&mut rng) - .take(base_text_len) - .collect::(); - let mut replica_ids = Vec::new(); - let mut buffers = Vec::new(); - let mut network = Network::new(rng.clone()); - - for i in 0..peers { - let mut buffer = TextBuffer::new(i as ReplicaId, 0, History::new(base_text.clone().into())); - buffer.history.group_interval = Duration::from_millis(rng.gen_range(0..=200)); - buffers.push(buffer); - replica_ids.push(i as u16); - network.add_peer(i as u16); - } - - log::info!("initial text: {:?}", base_text); - - let mut mutation_count = operations; - loop { - let replica_index = rng.gen_range(0..peers); - let replica_id = replica_ids[replica_index]; - let buffer = &mut buffers[replica_index]; - match rng.gen_range(0..=100) { - 0..=50 if mutation_count != 0 => { - let ops = buffer.randomly_mutate(&mut rng); - network.broadcast(buffer.replica_id, ops); - log::info!("buffer {} text: {:?}", buffer.replica_id, buffer.text()); - mutation_count -= 1; - } - 51..=70 if mutation_count != 0 => { - let ops = buffer.randomly_undo_redo(&mut rng); - network.broadcast(buffer.replica_id, ops); - mutation_count -= 1; - } - 71..=100 if network.has_unreceived(replica_id) => { - let ops = network.receive(replica_id); - if !ops.is_empty() { - log::info!( - "peer {} applying {} ops from the network.", - replica_id, - ops.len() - ); - buffer.apply_ops(ops).unwrap(); - } - } - _ => {} - } - - if mutation_count == 0 && network.is_idle() { - break; - } - } - - let first_buffer = &buffers[0]; - for buffer in &buffers[1..] { - assert_eq!( - buffer.text(), - first_buffer.text(), - "Replica {} text != Replica 0 text", - buffer.replica_id - ); - assert_eq!( - buffer.selection_sets().collect::>(), - first_buffer.selection_sets().collect::>() - ); - assert_eq!( - buffer.all_selection_ranges().collect::>(), - first_buffer - .all_selection_ranges() - .collect::>() - ); - } -} - -#[derive(Clone)] -struct Envelope { - message: T, - sender: ReplicaId, -} - -struct Network { - inboxes: std::collections::BTreeMap>>, - all_messages: Vec, - rng: R, -} - -impl Network { - fn new(rng: R) -> Self { - Network { - inboxes: Default::default(), - all_messages: Vec::new(), - rng, - } - } - - fn add_peer(&mut self, id: ReplicaId) { - self.inboxes.insert(id, Vec::new()); - } - - fn is_idle(&self) -> bool { - self.inboxes.values().all(|i| i.is_empty()) - } - - fn broadcast(&mut self, sender: ReplicaId, messages: Vec) { - for (replica, inbox) in self.inboxes.iter_mut() { - if *replica != sender { - for message in &messages { - let min_index = inbox - .iter() - .enumerate() - .rev() - .find_map(|(index, envelope)| { - if sender == envelope.sender { - Some(index + 1) - } else { - None - } - }) - .unwrap_or(0); - - // Insert one or more duplicates of this message *after* the previous - // message delivered by this replica. - for _ in 0..self.rng.gen_range(1..4) { - let insertion_index = self.rng.gen_range(min_index..inbox.len() + 1); - inbox.insert( - insertion_index, - Envelope { - message: message.clone(), - sender, - }, - ); - } - } - } - } - self.all_messages.extend(messages); - } - - fn has_unreceived(&self, receiver: ReplicaId) -> bool { - !self.inboxes[&receiver].is_empty() - } - - fn receive(&mut self, receiver: ReplicaId) -> Vec { - let inbox = self.inboxes.get_mut(&receiver).unwrap(); - let count = self.rng.gen_range(0..inbox.len() + 1); - inbox - .drain(0..count) - .map(|envelope| envelope.message) - .collect() - } -} diff --git a/crates/editor/Cargo.toml b/crates/editor/Cargo.toml index 5212d42526bb866f262d77370eeadf0568c107c5..1baf3b56112560d6154afb75fe31b27dbbf7c25f 100644 --- a/crates/editor/Cargo.toml +++ b/crates/editor/Cargo.toml @@ -10,6 +10,7 @@ test-support = ["buffer/test-support", "gpui/test-support"] buffer = { path = "../buffer" } clock = { path = "../clock" } gpui = { path = "../gpui" } +language = { path = "../language" } sum_tree = { path = "../sum_tree" } theme = { path = "../theme" } util = { path = "../util" } @@ -24,6 +25,7 @@ smol = "1.2" [dev-dependencies] buffer = { path = "../buffer", features = ["test-support"] } +language = { path = "../language", features = ["test-support"] } gpui = { path = "../gpui", features = ["test-support"] } rand = "0.8" unindent = "0.1.7" diff --git a/crates/editor/src/display_map.rs b/crates/editor/src/display_map.rs index dd348b6e46e25fb9336f933ca6ec41606950556c..30a506ea920b717c45b4146e106edf942a31eb34 100644 --- a/crates/editor/src/display_map.rs +++ b/crates/editor/src/display_map.rs @@ -2,9 +2,9 @@ mod fold_map; mod tab_map; mod wrap_map; -use buffer::{Anchor, Buffer, Point, ToOffset, ToPoint}; use fold_map::{FoldMap, ToFoldPoint as _}; use gpui::{fonts::FontId, Entity, ModelContext, ModelHandle}; +use language::{Anchor, Buffer, Point, ToOffset, ToPoint}; use std::ops::Range; use sum_tree::Bias; use tab_map::TabMap; @@ -109,7 +109,7 @@ impl DisplayMap { } pub struct DisplayMapSnapshot { - buffer_snapshot: buffer::Snapshot, + buffer_snapshot: language::Snapshot, folds_snapshot: fold_map::Snapshot, tabs_snapshot: tab_map::Snapshot, wraps_snapshot: wrap_map::Snapshot, @@ -358,8 +358,8 @@ impl ToDisplayPoint for Anchor { mod tests { use super::*; use crate::{movement, test::*}; - use buffer::{History, Language, LanguageConfig, RandomCharIter, SelectionGoal}; use gpui::{color::Color, MutableAppContext}; + use language::{History, Language, LanguageConfig, RandomCharIter, SelectionGoal}; use rand::{prelude::StdRng, Rng}; use std::{env, sync::Arc}; use theme::SyntaxTheme; @@ -436,7 +436,7 @@ mod tests { } } _ => { - buffer.update(&mut cx, |buffer, cx| buffer.randomly_edit(&mut rng, 5)); + buffer.update(&mut cx, |buffer, _| buffer.randomly_edit(&mut rng, 5)); } } diff --git a/crates/editor/src/display_map/fold_map.rs b/crates/editor/src/display_map/fold_map.rs index 0a1e01e0c06ec942c9486a042c23b3f54a6c5359..73e032e7f37b103b1e5753cd18fa91cedfb5a4de 100644 --- a/crates/editor/src/display_map/fold_map.rs +++ b/crates/editor/src/display_map/fold_map.rs @@ -1,5 +1,5 @@ -use buffer::{Anchor, AnchorRangeExt, Buffer, HighlightId, Point, TextSummary, ToOffset}; use gpui::{AppContext, ModelHandle}; +use language::{Anchor, AnchorRangeExt, Buffer, HighlightId, Point, TextSummary, ToOffset}; use parking_lot::Mutex; use std::{ cmp::{self, Ordering}, @@ -485,7 +485,7 @@ impl FoldMap { pub struct Snapshot { transforms: SumTree, folds: SumTree, - buffer_snapshot: buffer::Snapshot, + buffer_snapshot: language::Snapshot, pub version: usize, } @@ -994,7 +994,7 @@ impl<'a> Iterator for Chunks<'a> { pub struct HighlightedChunks<'a> { transform_cursor: Cursor<'a, Transform, (FoldOffset, usize)>, - buffer_chunks: buffer::HighlightedChunks<'a>, + buffer_chunks: language::HighlightedChunks<'a>, buffer_chunk: Option<(usize, &'a str, HighlightId)>, buffer_offset: usize, } @@ -1331,7 +1331,7 @@ mod tests { snapshot_edits.extend(map.randomly_mutate(&mut rng, cx.as_ref())); } _ => { - let edits = buffer.update(cx, |buffer, cx| { + let edits = buffer.update(cx, |buffer, _| { let start_version = buffer.version.clone(); let edit_count = rng.gen_range(1..=5); buffer.randomly_edit(&mut rng, edit_count); diff --git a/crates/editor/src/display_map/tab_map.rs b/crates/editor/src/display_map/tab_map.rs index 4fa684c47e3c53396e74af03b2b43529877802eb..cfab4fd941921fef6410f64ab2104db3a7ee8873 100644 --- a/crates/editor/src/display_map/tab_map.rs +++ b/crates/editor/src/display_map/tab_map.rs @@ -1,5 +1,5 @@ use super::fold_map::{self, FoldEdit, FoldPoint, Snapshot as FoldSnapshot}; -use buffer::{rope, HighlightId}; +use language::{rope, HighlightId}; use parking_lot::Mutex; use std::{mem, ops::Range}; use sum_tree::Bias; diff --git a/crates/editor/src/display_map/wrap_map.rs b/crates/editor/src/display_map/wrap_map.rs index 884657f5024b68be989a761c1730cfb9ab5242d2..897dfa01b9cde891f3d6720694edc0046dbd3f18 100644 --- a/crates/editor/src/display_map/wrap_map.rs +++ b/crates/editor/src/display_map/wrap_map.rs @@ -2,8 +2,8 @@ use super::{ fold_map, tab_map::{self, Edit as TabEdit, Snapshot as TabSnapshot, TabPoint, TextSummary}, }; -use buffer::{HighlightId, Point}; use gpui::{fonts::FontId, text_layout::LineWrapper, Entity, ModelContext, Task}; +use language::{HighlightId, Point}; use lazy_static::lazy_static; use smol::future::yield_now; use std::{collections::VecDeque, ops::Range, time::Duration}; @@ -899,7 +899,7 @@ mod tests { display_map::{fold_map::FoldMap, tab_map::TabMap}, test::Observer, }; - use buffer::{Buffer, RandomCharIter}; + use language::{Buffer, RandomCharIter}; use rand::prelude::*; use std::env; @@ -990,7 +990,7 @@ mod tests { } } _ => { - buffer.update(&mut cx, |buffer, cx| buffer.randomly_mutate(&mut rng)); + buffer.update(&mut cx, |buffer, _| buffer.randomly_mutate(&mut rng)); } } diff --git a/crates/editor/src/element.rs b/crates/editor/src/element.rs index e3e48e475c2dd1bbb9043044ea55ac64dfb89d62..cf0a101b0feaa8490888ca1aef1aec71ff8513a5 100644 --- a/crates/editor/src/element.rs +++ b/crates/editor/src/element.rs @@ -2,7 +2,6 @@ use super::{ DisplayPoint, Editor, EditorMode, EditorSettings, EditorStyle, Input, Scroll, Select, SelectPhase, Snapshot, MAX_LINE_LEN, }; -use buffer::HighlightId; use clock::ReplicaId; use gpui::{ color::Color, @@ -18,6 +17,7 @@ use gpui::{ MutableAppContext, PaintContext, Quad, Scene, SizeConstraint, ViewContext, WeakViewHandle, }; use json::json; +use language::HighlightId; use smallvec::SmallVec; use std::{ cmp::{self, Ordering}, @@ -1043,7 +1043,7 @@ mod tests { test::sample_text, {Editor, EditorSettings}, }; - use buffer::Buffer; + use language::Buffer; #[gpui::test] fn test_layout_line_numbers(cx: &mut gpui::MutableAppContext) { diff --git a/crates/editor/src/lib.rs b/crates/editor/src/lib.rs index a835e5e50cb5c4114b6f556c840cd58be5fafc26..61414d5dc7ea9864a4ea5ee53bd744f6e7266b20 100644 --- a/crates/editor/src/lib.rs +++ b/crates/editor/src/lib.rs @@ -5,7 +5,6 @@ pub mod movement; #[cfg(test)] mod test; -use buffer::*; use clock::ReplicaId; pub use display_map::DisplayPoint; use display_map::*; @@ -15,6 +14,7 @@ use gpui::{ text_layout, AppContext, ClipboardItem, Element, ElementBox, Entity, ModelHandle, MutableAppContext, RenderContext, View, ViewContext, WeakViewHandle, }; +use language::*; use serde::{Deserialize, Serialize}; use smallvec::SmallVec; use smol::Timer; @@ -2661,17 +2661,17 @@ impl Editor { fn on_buffer_event( &mut self, _: ModelHandle, - event: &buffer::Event, + event: &language::Event, cx: &mut ViewContext, ) { match event { - buffer::Event::Edited => cx.emit(Event::Edited), - buffer::Event::Dirtied => cx.emit(Event::Dirtied), - buffer::Event::Saved => cx.emit(Event::Saved), - buffer::Event::FileHandleChanged => cx.emit(Event::FileHandleChanged), - buffer::Event::Reloaded => cx.emit(Event::FileHandleChanged), - buffer::Event::Closed => cx.emit(Event::Closed), - buffer::Event::Reparsed => {} + language::Event::Edited => cx.emit(Event::Edited), + language::Event::Dirtied => cx.emit(Event::Dirtied), + language::Event::Saved => cx.emit(Event::Saved), + language::Event::FileHandleChanged => cx.emit(Event::FileHandleChanged), + language::Event::Reloaded => cx.emit(Event::FileHandleChanged), + language::Event::Closed => cx.emit(Event::Closed), + language::Event::Reparsed => {} } } diff --git a/crates/language/Cargo.toml b/crates/language/Cargo.toml new file mode 100644 index 0000000000000000000000000000000000000000..06ac60c3b24bfbe239768642a2bec6ea04d4c8c7 --- /dev/null +++ b/crates/language/Cargo.toml @@ -0,0 +1,35 @@ +[package] +name = "language" +version = "0.1.0" +edition = "2018" + +[features] +test-support = ["rand"] + +[dependencies] +buffer = { path = "../buffer" } +clock = { path = "../clock" } +gpui = { path = "../gpui" } +rpc = { path = "../rpc" } +sum_tree = { path = "../sum_tree" } +theme = { path = "../theme" } +anyhow = "1.0.38" +arrayvec = "0.7.1" +lazy_static = "1.4" +log = "0.4" +parking_lot = "0.11.1" +rand = { version = "0.8.3", optional = true } +seahash = "4.1" +serde = { version = "1", features = ["derive"] } +similar = "1.3" +smallvec = { version = "1.6", features = ["union"] } +smol = "1.2" +tree-sitter = "0.19.5" + +[dev-dependencies] +buffer = { path = "../buffer", features = ["test-support"] } +gpui = { path = "../gpui", features = ["test-support"] } + +rand = "0.8.3" +tree-sitter-rust = "0.19.0" +unindent = "0.1.7" diff --git a/crates/buffer/src/highlight_map.rs b/crates/language/src/highlight_map.rs similarity index 100% rename from crates/buffer/src/highlight_map.rs rename to crates/language/src/highlight_map.rs diff --git a/crates/buffer/src/language.rs b/crates/language/src/language.rs similarity index 100% rename from crates/buffer/src/language.rs rename to crates/language/src/language.rs diff --git a/crates/language/src/lib.rs b/crates/language/src/lib.rs new file mode 100644 index 0000000000000000000000000000000000000000..c7b866f003271a007dbd4598c8448add5c4071b1 --- /dev/null +++ b/crates/language/src/lib.rs @@ -0,0 +1,1471 @@ +mod highlight_map; +mod language; +#[cfg(test)] +mod tests; + +pub use self::{ + highlight_map::{HighlightId, HighlightMap}, + language::{BracketPair, Language, LanguageConfig, LanguageRegistry}, +}; +use anyhow::{anyhow, Result}; +pub use buffer::*; +use clock::ReplicaId; +use gpui::{AppContext, Entity, ModelContext, MutableAppContext, Task}; +use lazy_static::lazy_static; +use parking_lot::Mutex; +use rpc::proto; +use similar::{ChangeTag, TextDiff}; +use smol::future::yield_now; +use std::{ + any::Any, + cell::RefCell, + cmp, + collections::{BTreeMap, HashMap, HashSet}, + ffi::OsString, + future::Future, + iter::Iterator, + ops::{Deref, DerefMut, Range}, + path::{Path, PathBuf}, + str, + sync::Arc, + time::{Duration, Instant, SystemTime, UNIX_EPOCH}, +}; +use sum_tree::Bias; +use tree_sitter::{InputEdit, Parser, QueryCursor, Tree}; + +thread_local! { + static PARSER: RefCell = RefCell::new(Parser::new()); +} + +lazy_static! { + static ref QUERY_CURSORS: Mutex> = Default::default(); +} + +// TODO - Make this configurable +const INDENT_SIZE: u32 = 4; + +pub struct Buffer { + buffer: TextBuffer, + file: Option>, + saved_version: clock::Global, + saved_mtime: SystemTime, + language: Option>, + autoindent_requests: Vec>, + pending_autoindent: Option>, + sync_parse_timeout: Duration, + syntax_tree: Mutex>, + parsing_in_background: bool, + parse_count: usize, + #[cfg(test)] + operations: Vec, +} + +pub struct Snapshot { + text: buffer::Snapshot, + tree: Option, + is_parsing: bool, + language: Option>, + query_cursor: QueryCursorHandle, +} + +#[derive(Clone, Debug, Eq, PartialEq)] +pub enum Event { + Edited, + Dirtied, + Saved, + FileHandleChanged, + Reloaded, + Reparsed, + Closed, +} + +pub trait File { + fn worktree_id(&self) -> usize; + + fn entry_id(&self) -> Option; + + fn set_entry_id(&mut self, entry_id: Option); + + fn mtime(&self) -> SystemTime; + + fn set_mtime(&mut self, mtime: SystemTime); + + fn path(&self) -> &Arc; + + fn set_path(&mut self, path: Arc); + + fn full_path(&self, cx: &AppContext) -> PathBuf; + + /// Returns the last component of this handle's absolute path. If this handle refers to the root + /// of its worktree, then this method will return the name of the worktree itself. + fn file_name<'a>(&'a self, cx: &'a AppContext) -> Option; + + fn is_deleted(&self) -> bool; + + fn save( + &self, + buffer_id: u64, + text: Rope, + version: clock::Global, + cx: &mut MutableAppContext, + ) -> Task>; + + fn buffer_updated(&self, buffer_id: u64, operation: Operation, cx: &mut MutableAppContext); + + fn buffer_removed(&self, buffer_id: u64, cx: &mut MutableAppContext); + + fn boxed_clone(&self) -> Box; + + fn as_any(&self) -> &dyn Any; +} + +struct QueryCursorHandle(Option); + +#[derive(Clone)] +struct SyntaxTree { + tree: Tree, + version: clock::Global, +} + +#[derive(Clone)] +struct AutoindentRequest { + selection_set_ids: HashSet, + before_edit: Snapshot, + edited: AnchorSet, + inserted: Option, +} + +#[derive(Debug)] +struct IndentSuggestion { + basis_row: u32, + indent: bool, +} + +struct TextProvider<'a>(&'a Rope); + +struct Highlights<'a> { + captures: tree_sitter::QueryCaptures<'a, 'a, TextProvider<'a>>, + next_capture: Option<(tree_sitter::QueryMatch<'a, 'a>, usize)>, + stack: Vec<(usize, HighlightId)>, + highlight_map: HighlightMap, +} + +pub struct HighlightedChunks<'a> { + range: Range, + chunks: Chunks<'a>, + highlights: Option>, +} + +struct Diff { + base_version: clock::Global, + new_text: Arc, + changes: Vec<(ChangeTag, usize)>, +} + +impl Buffer { + pub fn new>>( + replica_id: ReplicaId, + base_text: T, + cx: &mut ModelContext, + ) -> Self { + Self::build( + TextBuffer::new( + replica_id, + cx.model_id() as u64, + History::new(base_text.into()), + ), + None, + None, + cx, + ) + } + + pub fn from_history( + replica_id: ReplicaId, + history: History, + file: Option>, + language: Option>, + cx: &mut ModelContext, + ) -> Self { + Self::build( + TextBuffer::new(replica_id, cx.model_id() as u64, history), + file, + language, + cx, + ) + } + + pub fn from_proto( + replica_id: ReplicaId, + message: proto::Buffer, + file: Option>, + language: Option>, + cx: &mut ModelContext, + ) -> Result { + Ok(Self::build( + TextBuffer::from_proto(replica_id, message)?, + file, + language, + cx, + )) + } + + fn build( + buffer: TextBuffer, + file: Option>, + language: Option>, + cx: &mut ModelContext, + ) -> Self { + let saved_mtime; + if let Some(file) = file.as_ref() { + saved_mtime = file.mtime(); + } else { + saved_mtime = UNIX_EPOCH; + } + + let mut result = Self { + buffer, + saved_mtime, + saved_version: clock::Global::new(), + file, + syntax_tree: Mutex::new(None), + parsing_in_background: false, + parse_count: 0, + sync_parse_timeout: Duration::from_millis(1), + autoindent_requests: Default::default(), + pending_autoindent: Default::default(), + language, + + #[cfg(test)] + operations: Default::default(), + }; + result.reparse(cx); + result + } + + pub fn snapshot(&self) -> Snapshot { + Snapshot { + text: self.buffer.snapshot(), + tree: self.syntax_tree(), + is_parsing: self.parsing_in_background, + language: self.language.clone(), + query_cursor: QueryCursorHandle::new(), + } + } + + pub fn file(&self) -> Option<&dyn File> { + self.file.as_deref() + } + + pub fn file_mut(&mut self) -> Option<&mut dyn File> { + self.file.as_mut().map(|f| f.deref_mut() as &mut dyn File) + } + + pub fn save( + &mut self, + cx: &mut ModelContext, + ) -> Result>> { + let file = self + .file + .as_ref() + .ok_or_else(|| anyhow!("buffer has no file"))?; + let text = self.as_rope().clone(); + let version = self.version.clone(); + let save = file.save(self.remote_id(), text, version, cx.as_mut()); + Ok(cx.spawn(|this, mut cx| async move { + let (version, mtime) = save.await?; + this.update(&mut cx, |this, cx| { + this.did_save(version.clone(), mtime, None, cx); + }); + Ok((version, mtime)) + })) + } + + pub fn set_language(&mut self, language: Option>, cx: &mut ModelContext) { + self.language = language; + self.reparse(cx); + } + + pub fn did_save( + &mut self, + version: clock::Global, + mtime: SystemTime, + new_file: Option>, + cx: &mut ModelContext, + ) { + self.saved_mtime = mtime; + self.saved_version = version; + if let Some(new_file) = new_file { + self.file = Some(new_file); + } + cx.emit(Event::Saved); + } + + pub fn file_updated( + &mut self, + path: Arc, + mtime: SystemTime, + new_text: Option, + cx: &mut ModelContext, + ) { + let file = self.file.as_mut().unwrap(); + let mut changed = false; + if path != *file.path() { + file.set_path(path); + changed = true; + } + + if mtime != file.mtime() { + file.set_mtime(mtime); + changed = true; + if let Some(new_text) = new_text { + if self.version == self.saved_version { + cx.spawn(|this, mut cx| async move { + let diff = this + .read_with(&cx, |this, cx| this.diff(new_text.into(), cx)) + .await; + this.update(&mut cx, |this, cx| { + if this.apply_diff(diff, cx) { + this.saved_version = this.version.clone(); + this.saved_mtime = mtime; + cx.emit(Event::Reloaded); + } + }); + }) + .detach(); + } + } + } + + if changed { + cx.emit(Event::FileHandleChanged); + } + } + + pub fn file_deleted(&mut self, cx: &mut ModelContext) { + if self.version == self.saved_version { + cx.emit(Event::Dirtied); + } + cx.emit(Event::FileHandleChanged); + } + + pub fn close(&mut self, cx: &mut ModelContext) { + cx.emit(Event::Closed); + } + + pub fn language(&self) -> Option<&Arc> { + self.language.as_ref() + } + + pub fn parse_count(&self) -> usize { + self.parse_count + } + + fn syntax_tree(&self) -> Option { + if let Some(syntax_tree) = self.syntax_tree.lock().as_mut() { + self.interpolate_tree(syntax_tree); + Some(syntax_tree.tree.clone()) + } else { + None + } + } + + #[cfg(any(test, feature = "test-support"))] + pub fn is_parsing(&self) -> bool { + self.parsing_in_background + } + + #[cfg(test)] + pub fn set_sync_parse_timeout(&mut self, timeout: Duration) { + self.sync_parse_timeout = timeout; + } + + fn reparse(&mut self, cx: &mut ModelContext) -> bool { + if self.parsing_in_background { + return false; + } + + if let Some(language) = self.language.clone() { + let old_tree = self.syntax_tree(); + let text = self.as_rope().clone(); + let parsed_version = self.version(); + let parse_task = cx.background().spawn({ + let language = language.clone(); + async move { Self::parse_text(&text, old_tree, &language) } + }); + + match cx + .background() + .block_with_timeout(self.sync_parse_timeout, parse_task) + { + Ok(new_tree) => { + self.did_finish_parsing(new_tree, parsed_version, cx); + return true; + } + Err(parse_task) => { + self.parsing_in_background = true; + cx.spawn(move |this, mut cx| async move { + let new_tree = parse_task.await; + this.update(&mut cx, move |this, cx| { + let language_changed = + this.language.as_ref().map_or(true, |curr_language| { + !Arc::ptr_eq(curr_language, &language) + }); + let parse_again = this.version > parsed_version || language_changed; + this.parsing_in_background = false; + this.did_finish_parsing(new_tree, parsed_version, cx); + + if parse_again && this.reparse(cx) { + return; + } + }); + }) + .detach(); + } + } + } + false + } + + fn parse_text(text: &Rope, old_tree: Option, language: &Language) -> Tree { + PARSER.with(|parser| { + let mut parser = parser.borrow_mut(); + parser + .set_language(language.grammar) + .expect("incompatible grammar"); + let mut chunks = text.chunks_in_range(0..text.len()); + let tree = parser + .parse_with( + &mut move |offset, _| { + chunks.seek(offset); + chunks.next().unwrap_or("").as_bytes() + }, + old_tree.as_ref(), + ) + .unwrap(); + tree + }) + } + + fn interpolate_tree(&self, tree: &mut SyntaxTree) { + let mut delta = 0_isize; + for edit in self.edits_since(tree.version.clone()) { + let start_offset = (edit.old_bytes.start as isize + delta) as usize; + let start_point = self.as_rope().to_point(start_offset); + tree.tree.edit(&InputEdit { + start_byte: start_offset, + old_end_byte: start_offset + edit.deleted_bytes(), + new_end_byte: start_offset + edit.inserted_bytes(), + start_position: start_point.into(), + old_end_position: (start_point + edit.deleted_lines()).into(), + new_end_position: self + .as_rope() + .to_point(start_offset + edit.inserted_bytes()) + .into(), + }); + delta += edit.inserted_bytes() as isize - edit.deleted_bytes() as isize; + } + tree.version = self.version(); + } + + fn did_finish_parsing( + &mut self, + tree: Tree, + version: clock::Global, + cx: &mut ModelContext, + ) { + self.parse_count += 1; + *self.syntax_tree.lock() = Some(SyntaxTree { tree, version }); + self.request_autoindent(cx); + cx.emit(Event::Reparsed); + cx.notify(); + } + + fn request_autoindent(&mut self, cx: &mut ModelContext) { + if let Some(indent_columns) = self.compute_autoindents() { + let indent_columns = cx.background().spawn(indent_columns); + match cx + .background() + .block_with_timeout(Duration::from_micros(500), indent_columns) + { + Ok(indent_columns) => self.apply_autoindents(indent_columns, cx), + Err(indent_columns) => { + self.pending_autoindent = Some(cx.spawn(|this, mut cx| async move { + let indent_columns = indent_columns.await; + this.update(&mut cx, |this, cx| { + this.apply_autoindents(indent_columns, cx); + }); + })); + } + } + } + } + + fn compute_autoindents(&self) -> Option>> { + let max_rows_between_yields = 100; + let snapshot = self.snapshot(); + if snapshot.language.is_none() + || snapshot.tree.is_none() + || self.autoindent_requests.is_empty() + { + return None; + } + + let autoindent_requests = self.autoindent_requests.clone(); + Some(async move { + let mut indent_columns = BTreeMap::new(); + for request in autoindent_requests { + let old_to_new_rows = request + .edited + .to_points(&request.before_edit) + .map(|point| point.row) + .zip(request.edited.to_points(&snapshot).map(|point| point.row)) + .collect::>(); + + let mut old_suggestions = HashMap::::default(); + let old_edited_ranges = + contiguous_ranges(old_to_new_rows.keys().copied(), max_rows_between_yields); + for old_edited_range in old_edited_ranges { + let suggestions = request + .before_edit + .suggest_autoindents(old_edited_range.clone()) + .into_iter() + .flatten(); + for (old_row, suggestion) in old_edited_range.zip(suggestions) { + let indentation_basis = old_to_new_rows + .get(&suggestion.basis_row) + .and_then(|from_row| old_suggestions.get(from_row).copied()) + .unwrap_or_else(|| { + request + .before_edit + .indent_column_for_line(suggestion.basis_row) + }); + let delta = if suggestion.indent { INDENT_SIZE } else { 0 }; + old_suggestions.insert( + *old_to_new_rows.get(&old_row).unwrap(), + indentation_basis + delta, + ); + } + yield_now().await; + } + + // At this point, old_suggestions contains the suggested indentation for all edited lines with respect to the state of the + // buffer before the edit, but keyed by the row for these lines after the edits were applied. + let new_edited_row_ranges = + contiguous_ranges(old_to_new_rows.values().copied(), max_rows_between_yields); + for new_edited_row_range in new_edited_row_ranges { + let suggestions = snapshot + .suggest_autoindents(new_edited_row_range.clone()) + .into_iter() + .flatten(); + for (new_row, suggestion) in new_edited_row_range.zip(suggestions) { + let delta = if suggestion.indent { INDENT_SIZE } else { 0 }; + let new_indentation = indent_columns + .get(&suggestion.basis_row) + .copied() + .unwrap_or_else(|| { + snapshot.indent_column_for_line(suggestion.basis_row) + }) + + delta; + if old_suggestions + .get(&new_row) + .map_or(true, |old_indentation| new_indentation != *old_indentation) + { + indent_columns.insert(new_row, new_indentation); + } + } + yield_now().await; + } + + if let Some(inserted) = request.inserted.as_ref() { + let inserted_row_ranges = contiguous_ranges( + inserted + .to_point_ranges(&snapshot) + .flat_map(|range| range.start.row..range.end.row + 1), + max_rows_between_yields, + ); + for inserted_row_range in inserted_row_ranges { + let suggestions = snapshot + .suggest_autoindents(inserted_row_range.clone()) + .into_iter() + .flatten(); + for (row, suggestion) in inserted_row_range.zip(suggestions) { + let delta = if suggestion.indent { INDENT_SIZE } else { 0 }; + let new_indentation = indent_columns + .get(&suggestion.basis_row) + .copied() + .unwrap_or_else(|| { + snapshot.indent_column_for_line(suggestion.basis_row) + }) + + delta; + indent_columns.insert(row, new_indentation); + } + yield_now().await; + } + } + } + indent_columns + }) + } + + fn apply_autoindents( + &mut self, + indent_columns: BTreeMap, + cx: &mut ModelContext, + ) { + let selection_set_ids = self + .autoindent_requests + .drain(..) + .flat_map(|req| req.selection_set_ids.clone()) + .collect::>(); + + self.start_transaction(selection_set_ids.iter().copied()) + .unwrap(); + for (row, indent_column) in &indent_columns { + self.set_indent_column_for_line(*row, *indent_column, cx); + } + + for selection_set_id in &selection_set_ids { + if let Ok(set) = self.selection_set(*selection_set_id) { + let new_selections = set + .selections + .iter() + .map(|selection| { + let start_point = selection.start.to_point(&self.buffer); + if start_point.column == 0 { + let end_point = selection.end.to_point(&self.buffer); + let delta = Point::new( + 0, + indent_columns.get(&start_point.row).copied().unwrap_or(0), + ); + if delta.column > 0 { + return Selection { + id: selection.id, + goal: selection.goal, + reversed: selection.reversed, + start: self + .anchor_at(start_point + delta, selection.start.bias), + end: self.anchor_at(end_point + delta, selection.end.bias), + }; + } + } + selection.clone() + }) + .collect::>(); + self.update_selection_set(*selection_set_id, new_selections, cx) + .unwrap(); + } + } + + self.end_transaction(selection_set_ids.iter().copied(), cx) + .unwrap(); + } + + pub fn indent_column_for_line(&self, row: u32) -> u32 { + self.content().indent_column_for_line(row) + } + + fn set_indent_column_for_line(&mut self, row: u32, column: u32, cx: &mut ModelContext) { + let current_column = self.indent_column_for_line(row); + if column > current_column { + let offset = Point::new(row, 0).to_offset(&*self); + self.edit( + [offset..offset], + " ".repeat((column - current_column) as usize), + cx, + ); + } else if column < current_column { + self.edit( + [Point::new(row, 0)..Point::new(row, current_column - column)], + "", + cx, + ); + } + } + + pub fn range_for_syntax_ancestor(&self, range: Range) -> Option> { + if let Some(tree) = self.syntax_tree() { + let root = tree.root_node(); + let range = range.start.to_offset(self)..range.end.to_offset(self); + let mut node = root.descendant_for_byte_range(range.start, range.end); + while node.map_or(false, |n| n.byte_range() == range) { + node = node.unwrap().parent(); + } + node.map(|n| n.byte_range()) + } else { + None + } + } + + pub fn enclosing_bracket_ranges( + &self, + range: Range, + ) -> Option<(Range, Range)> { + let (lang, tree) = self.language.as_ref().zip(self.syntax_tree())?; + let open_capture_ix = lang.brackets_query.capture_index_for_name("open")?; + let close_capture_ix = lang.brackets_query.capture_index_for_name("close")?; + + // Find bracket pairs that *inclusively* contain the given range. + let range = range.start.to_offset(self).saturating_sub(1)..range.end.to_offset(self) + 1; + let mut cursor = QueryCursorHandle::new(); + let matches = cursor.set_byte_range(range).matches( + &lang.brackets_query, + tree.root_node(), + TextProvider(self.as_rope()), + ); + + // Get the ranges of the innermost pair of brackets. + matches + .filter_map(|mat| { + let open = mat.nodes_for_capture_index(open_capture_ix).next()?; + let close = mat.nodes_for_capture_index(close_capture_ix).next()?; + Some((open.byte_range(), close.byte_range())) + }) + .min_by_key(|(open_range, close_range)| close_range.end - open_range.start) + } + + fn diff(&self, new_text: Arc, cx: &AppContext) -> Task { + // TODO: it would be nice to not allocate here. + let old_text = self.text(); + let base_version = self.version(); + cx.background().spawn(async move { + let changes = TextDiff::from_lines(old_text.as_str(), new_text.as_ref()) + .iter_all_changes() + .map(|c| (c.tag(), c.value().len())) + .collect::>(); + Diff { + base_version, + new_text, + changes, + } + }) + } + + pub fn set_text_from_disk(&self, new_text: Arc, cx: &mut ModelContext) -> Task<()> { + cx.spawn(|this, mut cx| async move { + let diff = this + .read_with(&cx, |this, cx| this.diff(new_text, cx)) + .await; + + this.update(&mut cx, |this, cx| { + if this.apply_diff(diff, cx) { + this.saved_version = this.version.clone(); + } + }); + }) + } + + fn apply_diff(&mut self, diff: Diff, cx: &mut ModelContext) -> bool { + if self.version == diff.base_version { + self.start_transaction(None).unwrap(); + let mut offset = 0; + for (tag, len) in diff.changes { + let range = offset..(offset + len); + match tag { + ChangeTag::Equal => offset += len, + ChangeTag::Delete => self.edit(Some(range), "", cx), + ChangeTag::Insert => { + self.edit(Some(offset..offset), &diff.new_text[range], cx); + offset += len; + } + } + } + self.end_transaction(None, cx).unwrap(); + true + } else { + false + } + } + + pub fn is_dirty(&self) -> bool { + self.version > self.saved_version + || self.file.as_ref().map_or(false, |file| file.is_deleted()) + } + + pub fn has_conflict(&self) -> bool { + self.version > self.saved_version + && self + .file + .as_ref() + .map_or(false, |file| file.mtime() > self.saved_mtime) + } + + pub fn start_transaction( + &mut self, + selection_set_ids: impl IntoIterator, + ) -> Result<()> { + self.start_transaction_at(selection_set_ids, Instant::now()) + } + + fn start_transaction_at( + &mut self, + selection_set_ids: impl IntoIterator, + now: Instant, + ) -> Result<()> { + self.buffer.start_transaction_at(selection_set_ids, now) + } + + pub fn end_transaction( + &mut self, + selection_set_ids: impl IntoIterator, + cx: &mut ModelContext, + ) -> Result<()> { + self.end_transaction_at(selection_set_ids, Instant::now(), cx) + } + + fn end_transaction_at( + &mut self, + selection_set_ids: impl IntoIterator, + now: Instant, + cx: &mut ModelContext, + ) -> Result<()> { + if let Some(start_version) = self.buffer.end_transaction_at(selection_set_ids, now) { + cx.notify(); + let was_dirty = start_version != self.saved_version; + let edited = self.edits_since(start_version).next().is_some(); + if edited { + self.did_edit(was_dirty, cx); + self.reparse(cx); + } + } + Ok(()) + } + + pub fn edit(&mut self, ranges_iter: I, new_text: T, cx: &mut ModelContext) + where + I: IntoIterator>, + S: ToOffset, + T: Into, + { + self.edit_internal(ranges_iter, new_text, false, cx) + } + + pub fn edit_with_autoindent( + &mut self, + ranges_iter: I, + new_text: T, + cx: &mut ModelContext, + ) where + I: IntoIterator>, + S: ToOffset, + T: Into, + { + self.edit_internal(ranges_iter, new_text, true, cx) + } + + pub fn edit_internal( + &mut self, + ranges_iter: I, + new_text: T, + autoindent: bool, + cx: &mut ModelContext, + ) where + I: IntoIterator>, + S: ToOffset, + T: Into, + { + let new_text = new_text.into(); + + // Skip invalid ranges and coalesce contiguous ones. + let mut ranges: Vec> = Vec::new(); + for range in ranges_iter { + let range = range.start.to_offset(&*self)..range.end.to_offset(&*self); + if !new_text.is_empty() || !range.is_empty() { + if let Some(prev_range) = ranges.last_mut() { + if prev_range.end >= range.start { + prev_range.end = cmp::max(prev_range.end, range.end); + } else { + ranges.push(range); + } + } else { + ranges.push(range); + } + } + } + if ranges.is_empty() { + return; + } + + self.start_transaction(None).unwrap(); + self.pending_autoindent.take(); + let autoindent_request = if autoindent && self.language.is_some() { + let before_edit = self.snapshot(); + let edited = self.content().anchor_set(ranges.iter().filter_map(|range| { + let start = range.start.to_point(&*self); + if new_text.starts_with('\n') && start.column == self.line_len(start.row) { + None + } else { + Some((range.start, Bias::Left)) + } + })); + Some((before_edit, edited)) + } else { + None + }; + + let first_newline_ix = new_text.find('\n'); + let new_text_len = new_text.len(); + + let edit = self.buffer.edit(ranges.iter().cloned(), new_text); + + if let Some((before_edit, edited)) = autoindent_request { + let mut inserted = None; + if let Some(first_newline_ix) = first_newline_ix { + let mut delta = 0isize; + inserted = Some(self.content().anchor_range_set(ranges.iter().map(|range| { + let start = (delta + range.start as isize) as usize + first_newline_ix + 1; + let end = (delta + range.start as isize) as usize + new_text_len; + delta += (range.end as isize - range.start as isize) + new_text_len as isize; + (start, Bias::Left)..(end, Bias::Right) + }))); + } + + let selection_set_ids = self + .buffer + .peek_undo_stack() + .unwrap() + .starting_selection_set_ids() + .collect(); + self.autoindent_requests.push(Arc::new(AutoindentRequest { + selection_set_ids, + before_edit, + edited, + inserted, + })); + } + + self.end_transaction(None, cx).unwrap(); + self.send_operation(Operation::Edit(edit), cx); + } + + fn did_edit(&self, was_dirty: bool, cx: &mut ModelContext) { + cx.emit(Event::Edited); + if !was_dirty { + cx.emit(Event::Dirtied); + } + } + + pub fn add_selection_set( + &mut self, + selections: impl Into>, + cx: &mut ModelContext, + ) -> SelectionSetId { + let operation = self.buffer.add_selection_set(selections); + if let Operation::UpdateSelections { set_id, .. } = &operation { + let set_id = *set_id; + cx.notify(); + self.send_operation(operation, cx); + set_id + } else { + unreachable!() + } + } + + pub fn update_selection_set( + &mut self, + set_id: SelectionSetId, + selections: impl Into>, + cx: &mut ModelContext, + ) -> Result<()> { + let operation = self.buffer.update_selection_set(set_id, selections)?; + cx.notify(); + self.send_operation(operation, cx); + Ok(()) + } + + pub fn set_active_selection_set( + &mut self, + set_id: Option, + cx: &mut ModelContext, + ) -> Result<()> { + let operation = self.buffer.set_active_selection_set(set_id)?; + self.send_operation(operation, cx); + Ok(()) + } + + pub fn remove_selection_set( + &mut self, + set_id: SelectionSetId, + cx: &mut ModelContext, + ) -> Result<()> { + let operation = self.buffer.remove_selection_set(set_id)?; + cx.notify(); + self.send_operation(operation, cx); + Ok(()) + } + + pub fn apply_ops>( + &mut self, + ops: I, + cx: &mut ModelContext, + ) -> Result<()> { + self.pending_autoindent.take(); + + let was_dirty = self.is_dirty(); + let old_version = self.version.clone(); + + self.buffer.apply_ops(ops)?; + + cx.notify(); + if self.edits_since(old_version).next().is_some() { + self.did_edit(was_dirty, cx); + self.reparse(cx); + } + + Ok(()) + } + + #[cfg(not(test))] + pub fn send_operation(&mut self, operation: Operation, cx: &mut ModelContext) { + if let Some(file) = &self.file { + file.buffer_updated(self.remote_id(), operation, cx.as_mut()); + } + } + + #[cfg(test)] + pub fn send_operation(&mut self, operation: Operation, _: &mut ModelContext) { + self.operations.push(operation); + } + + pub fn remove_peer(&mut self, replica_id: ReplicaId, cx: &mut ModelContext) { + self.buffer.remove_peer(replica_id); + cx.notify(); + } + + pub fn undo(&mut self, cx: &mut ModelContext) { + let was_dirty = self.is_dirty(); + let old_version = self.version.clone(); + + for operation in self.buffer.undo() { + self.send_operation(operation, cx); + } + + cx.notify(); + if self.edits_since(old_version).next().is_some() { + self.did_edit(was_dirty, cx); + self.reparse(cx); + } + } + + pub fn redo(&mut self, cx: &mut ModelContext) { + let was_dirty = self.is_dirty(); + let old_version = self.version.clone(); + + for operation in self.buffer.redo() { + self.send_operation(operation, cx); + } + + cx.notify(); + if self.edits_since(old_version).next().is_some() { + self.did_edit(was_dirty, cx); + self.reparse(cx); + } + } +} + +#[cfg(any(test, feature = "test-support"))] +impl Buffer { + pub fn randomly_edit(&mut self, rng: &mut T, old_range_count: usize) + where + T: rand::Rng, + { + self.buffer.randomly_edit(rng, old_range_count); + } + + pub fn randomly_mutate(&mut self, rng: &mut T) + where + T: rand::Rng, + { + self.buffer.randomly_mutate(rng); + } +} + +impl Entity for Buffer { + type Event = Event; + + fn release(&mut self, cx: &mut gpui::MutableAppContext) { + if let Some(file) = self.file.as_ref() { + file.buffer_removed(self.remote_id(), cx); + } + } +} + +impl Clone for Buffer { + fn clone(&self) -> Self { + Self { + buffer: self.buffer.clone(), + saved_version: self.saved_version.clone(), + saved_mtime: self.saved_mtime, + file: self.file.as_ref().map(|f| f.boxed_clone()), + language: self.language.clone(), + syntax_tree: Mutex::new(self.syntax_tree.lock().clone()), + parsing_in_background: false, + sync_parse_timeout: self.sync_parse_timeout, + parse_count: self.parse_count, + autoindent_requests: Default::default(), + pending_autoindent: Default::default(), + + #[cfg(test)] + operations: self.operations.clone(), + } + } +} + +impl Deref for Buffer { + type Target = TextBuffer; + + fn deref(&self) -> &Self::Target { + &self.buffer + } +} + +impl<'a> From<&'a Buffer> for Content<'a> { + fn from(buffer: &'a Buffer) -> Self { + Self::from(&buffer.buffer) + } +} + +impl<'a> From<&'a mut Buffer> for Content<'a> { + fn from(buffer: &'a mut Buffer) -> Self { + Self::from(&buffer.buffer) + } +} + +impl<'a> From<&'a Snapshot> for Content<'a> { + fn from(snapshot: &'a Snapshot) -> Self { + Self::from(&snapshot.text) + } +} + +impl Snapshot { + fn suggest_autoindents<'a>( + &'a self, + row_range: Range, + ) -> Option + 'a> { + let mut query_cursor = QueryCursorHandle::new(); + if let Some((language, tree)) = self.language.as_ref().zip(self.tree.as_ref()) { + let prev_non_blank_row = self.prev_non_blank_row(row_range.start); + + // Get the "indentation ranges" that intersect this row range. + let indent_capture_ix = language.indents_query.capture_index_for_name("indent"); + let end_capture_ix = language.indents_query.capture_index_for_name("end"); + query_cursor.set_point_range( + Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0).into() + ..Point::new(row_range.end, 0).into(), + ); + let mut indentation_ranges = Vec::<(Range, &'static str)>::new(); + for mat in query_cursor.matches( + &language.indents_query, + tree.root_node(), + TextProvider(self.as_rope()), + ) { + let mut node_kind = ""; + let mut start: Option = None; + let mut end: Option = None; + for capture in mat.captures { + if Some(capture.index) == indent_capture_ix { + node_kind = capture.node.kind(); + start.get_or_insert(capture.node.start_position().into()); + end.get_or_insert(capture.node.end_position().into()); + } else if Some(capture.index) == end_capture_ix { + end = Some(capture.node.start_position().into()); + } + } + + if let Some((start, end)) = start.zip(end) { + if start.row == end.row { + continue; + } + + let range = start..end; + match indentation_ranges.binary_search_by_key(&range.start, |r| r.0.start) { + Err(ix) => indentation_ranges.insert(ix, (range, node_kind)), + Ok(ix) => { + let prev_range = &mut indentation_ranges[ix]; + prev_range.0.end = prev_range.0.end.max(range.end); + } + } + } + } + + let mut prev_row = prev_non_blank_row.unwrap_or(0); + Some(row_range.map(move |row| { + let row_start = Point::new(row, self.indent_column_for_line(row)); + + let mut indent_from_prev_row = false; + let mut outdent_to_row = u32::MAX; + for (range, _node_kind) in &indentation_ranges { + if range.start.row >= row { + break; + } + + if range.start.row == prev_row && range.end > row_start { + indent_from_prev_row = true; + } + if range.end.row >= prev_row && range.end <= row_start { + outdent_to_row = outdent_to_row.min(range.start.row); + } + } + + let suggestion = if outdent_to_row == prev_row { + IndentSuggestion { + basis_row: prev_row, + indent: false, + } + } else if indent_from_prev_row { + IndentSuggestion { + basis_row: prev_row, + indent: true, + } + } else if outdent_to_row < prev_row { + IndentSuggestion { + basis_row: outdent_to_row, + indent: false, + } + } else { + IndentSuggestion { + basis_row: prev_row, + indent: false, + } + }; + + prev_row = row; + suggestion + })) + } else { + None + } + } + + fn prev_non_blank_row(&self, mut row: u32) -> Option { + while row > 0 { + row -= 1; + if !self.is_line_blank(row) { + return Some(row); + } + } + None + } + + fn is_line_blank(&self, row: u32) -> bool { + self.text_for_range(Point::new(row, 0)..Point::new(row, self.line_len(row))) + .all(|chunk| chunk.matches(|c: char| !c.is_whitespace()).next().is_none()) + } + + pub fn highlighted_text_for_range( + &mut self, + range: Range, + ) -> HighlightedChunks { + let range = range.start.to_offset(&*self)..range.end.to_offset(&*self); + let chunks = self.text.as_rope().chunks_in_range(range.clone()); + if let Some((language, tree)) = self.language.as_ref().zip(self.tree.as_ref()) { + let captures = self.query_cursor.set_byte_range(range.clone()).captures( + &language.highlights_query, + tree.root_node(), + TextProvider(self.text.as_rope()), + ); + + HighlightedChunks { + range, + chunks, + highlights: Some(Highlights { + captures, + next_capture: None, + stack: Default::default(), + highlight_map: language.highlight_map(), + }), + } + } else { + HighlightedChunks { + range, + chunks, + highlights: None, + } + } + } +} + +impl Clone for Snapshot { + fn clone(&self) -> Self { + Self { + text: self.text.clone(), + tree: self.tree.clone(), + is_parsing: self.is_parsing, + language: self.language.clone(), + query_cursor: QueryCursorHandle::new(), + } + } +} + +impl Deref for Snapshot { + type Target = buffer::Snapshot; + + fn deref(&self) -> &Self::Target { + &self.text + } +} + +impl<'a> tree_sitter::TextProvider<'a> for TextProvider<'a> { + type I = ByteChunks<'a>; + + fn text(&mut self, node: tree_sitter::Node) -> Self::I { + ByteChunks(self.0.chunks_in_range(node.byte_range())) + } +} + +struct ByteChunks<'a>(rope::Chunks<'a>); + +impl<'a> Iterator for ByteChunks<'a> { + type Item = &'a [u8]; + + fn next(&mut self) -> Option { + self.0.next().map(str::as_bytes) + } +} + +impl<'a> HighlightedChunks<'a> { + pub fn seek(&mut self, offset: usize) { + self.range.start = offset; + self.chunks.seek(self.range.start); + if let Some(highlights) = self.highlights.as_mut() { + highlights + .stack + .retain(|(end_offset, _)| *end_offset > offset); + if let Some((mat, capture_ix)) = &highlights.next_capture { + let capture = mat.captures[*capture_ix as usize]; + if offset >= capture.node.start_byte() { + let next_capture_end = capture.node.end_byte(); + if offset < next_capture_end { + highlights.stack.push(( + next_capture_end, + highlights.highlight_map.get(capture.index), + )); + } + highlights.next_capture.take(); + } + } + highlights.captures.set_byte_range(self.range.clone()); + } + } + + pub fn offset(&self) -> usize { + self.range.start + } +} + +impl<'a> Iterator for HighlightedChunks<'a> { + type Item = (&'a str, HighlightId); + + fn next(&mut self) -> Option { + let mut next_capture_start = usize::MAX; + + if let Some(highlights) = self.highlights.as_mut() { + while let Some((parent_capture_end, _)) = highlights.stack.last() { + if *parent_capture_end <= self.range.start { + highlights.stack.pop(); + } else { + break; + } + } + + if highlights.next_capture.is_none() { + highlights.next_capture = highlights.captures.next(); + } + + while let Some((mat, capture_ix)) = highlights.next_capture.as_ref() { + let capture = mat.captures[*capture_ix as usize]; + if self.range.start < capture.node.start_byte() { + next_capture_start = capture.node.start_byte(); + break; + } else { + let style_id = highlights.highlight_map.get(capture.index); + highlights.stack.push((capture.node.end_byte(), style_id)); + highlights.next_capture = highlights.captures.next(); + } + } + } + + if let Some(chunk) = self.chunks.peek() { + let chunk_start = self.range.start; + let mut chunk_end = (self.chunks.offset() + chunk.len()).min(next_capture_start); + let mut style_id = HighlightId::default(); + if let Some((parent_capture_end, parent_style_id)) = + self.highlights.as_ref().and_then(|h| h.stack.last()) + { + chunk_end = chunk_end.min(*parent_capture_end); + style_id = *parent_style_id; + } + + let slice = + &chunk[chunk_start - self.chunks.offset()..chunk_end - self.chunks.offset()]; + self.range.start = chunk_end; + if self.range.start == self.chunks.offset() + chunk.len() { + self.chunks.next().unwrap(); + } + + Some((slice, style_id)) + } else { + None + } + } +} + +impl QueryCursorHandle { + fn new() -> Self { + QueryCursorHandle(Some( + QUERY_CURSORS + .lock() + .pop() + .unwrap_or_else(|| QueryCursor::new()), + )) + } +} + +impl Deref for QueryCursorHandle { + type Target = QueryCursor; + + fn deref(&self) -> &Self::Target { + self.0.as_ref().unwrap() + } +} + +impl DerefMut for QueryCursorHandle { + fn deref_mut(&mut self) -> &mut Self::Target { + self.0.as_mut().unwrap() + } +} + +impl Drop for QueryCursorHandle { + fn drop(&mut self) { + let mut cursor = self.0.take().unwrap(); + cursor.set_byte_range(0..usize::MAX); + cursor.set_point_range(Point::zero().into()..Point::MAX.into()); + QUERY_CURSORS.lock().push(cursor) + } +} + +fn contiguous_ranges( + values: impl IntoIterator, + max_len: usize, +) -> impl Iterator> { + let mut values = values.into_iter(); + let mut current_range: Option> = None; + std::iter::from_fn(move || loop { + if let Some(value) = values.next() { + if let Some(range) = &mut current_range { + if value == range.end && range.len() < max_len { + range.end += 1; + continue; + } + } + + let prev_range = current_range.clone(); + current_range = Some(value..(value + 1)); + if prev_range.is_some() { + return prev_range; + } + } else { + return current_range.take(); + } + }) +} diff --git a/crates/buffer/src/tests/syntax.rs b/crates/language/src/tests.rs similarity index 81% rename from crates/buffer/src/tests/syntax.rs rename to crates/language/src/tests.rs index e61f8ffd5e7687dfbc9fa66945b7e1256261fd6b..23cdced4c70c09fbc8dc6bfa696fa76eebe51d1d 100644 --- a/crates/buffer/src/tests/syntax.rs +++ b/crates/language/src/tests.rs @@ -1,7 +1,81 @@ -use crate::*; +use super::*; use gpui::{ModelHandle, MutableAppContext}; +use std::rc::Rc; use unindent::Unindent as _; +#[gpui::test] +fn test_edit_events(cx: &mut gpui::MutableAppContext) { + let mut now = Instant::now(); + let buffer_1_events = Rc::new(RefCell::new(Vec::new())); + let buffer_2_events = Rc::new(RefCell::new(Vec::new())); + + let buffer1 = cx.add_model(|cx| Buffer::new(0, "abcdef", cx)); + let buffer2 = cx.add_model(|cx| Buffer::new(1, "abcdef", cx)); + let buffer_ops = buffer1.update(cx, |buffer, cx| { + let buffer_1_events = buffer_1_events.clone(); + cx.subscribe(&buffer1, move |_, _, event, _| { + buffer_1_events.borrow_mut().push(event.clone()) + }) + .detach(); + let buffer_2_events = buffer_2_events.clone(); + cx.subscribe(&buffer2, move |_, _, event, _| { + buffer_2_events.borrow_mut().push(event.clone()) + }) + .detach(); + + // An edit emits an edited event, followed by a dirtied event, + // since the buffer was previously in a clean state. + buffer.edit(Some(2..4), "XYZ", cx); + + // An empty transaction does not emit any events. + buffer.start_transaction(None).unwrap(); + buffer.end_transaction(None, cx).unwrap(); + + // A transaction containing two edits emits one edited event. + now += Duration::from_secs(1); + buffer.start_transaction_at(None, now).unwrap(); + buffer.edit(Some(5..5), "u", cx); + buffer.edit(Some(6..6), "w", cx); + buffer.end_transaction_at(None, now, cx).unwrap(); + + // Undoing a transaction emits one edited event. + buffer.undo(cx); + + buffer.operations.clone() + }); + + // Incorporating a set of remote ops emits a single edited event, + // followed by a dirtied event. + buffer2.update(cx, |buffer, cx| { + buffer.apply_ops(buffer_ops, cx).unwrap(); + }); + + let buffer_1_events = buffer_1_events.borrow(); + assert_eq!( + *buffer_1_events, + vec![Event::Edited, Event::Dirtied, Event::Edited, Event::Edited] + ); + + let buffer_2_events = buffer_2_events.borrow(); + assert_eq!(*buffer_2_events, vec![Event::Edited, Event::Dirtied]); +} + +#[gpui::test] +async fn test_apply_diff(mut cx: gpui::TestAppContext) { + let text = "a\nbb\nccc\ndddd\neeeee\nffffff\n"; + let buffer = cx.add_model(|cx| Buffer::new(0, text, cx)); + + let text = "a\nccc\ndddd\nffffff\n"; + let diff = buffer.read_with(&cx, |b, cx| b.diff(text.into(), cx)).await; + buffer.update(&mut cx, |b, cx| b.apply_diff(diff, cx)); + cx.read(|cx| assert_eq!(buffer.read(cx).text(), text)); + + let text = "a\n1\n\nccc\ndd2dd\nffffff\n"; + let diff = buffer.read_with(&cx, |b, cx| b.diff(text.into(), cx)).await; + buffer.update(&mut cx, |b, cx| b.apply_diff(diff, cx)); + cx.read(|cx| assert_eq!(buffer.read(cx).text(), text)); +} + #[gpui::test] async fn test_reparse(mut cx: gpui::TestAppContext) { let buffer = cx.add_model(|cx| { diff --git a/crates/project/Cargo.toml b/crates/project/Cargo.toml index f7d87a4625299dc0164bcc5931f456c239e6e626..fb63a4c21c61249ee99b99a1d80fec03a88ad6b4 100644 --- a/crates/project/Cargo.toml +++ b/crates/project/Cargo.toml @@ -12,6 +12,7 @@ clock = { path = "../clock" } fsevent = { path = "../fsevent" } fuzzy = { path = "../fuzzy" } gpui = { path = "../gpui" } +language = { path = "../language" } client = { path = "../client" } sum_tree = { path = "../sum_tree" } util = { path = "../util" } @@ -33,6 +34,7 @@ toml = "0.5" [dev-dependencies] client = { path = "../client", features = ["test-support"] } +language = { path = "../language", features = ["test-support"] } util = { path = "../util", features = ["test-support"] } rpc = { path = "../rpc", features = ["test-support"] } diff --git a/crates/project/src/lib.rs b/crates/project/src/lib.rs index 184dfd4d9cc54fa3a9f9bf1397ccd917aa1e17fa..3e129c8fb8a1d67feb7b7abdebb1bc89c0c5c1fe 100644 --- a/crates/project/src/lib.rs +++ b/crates/project/src/lib.rs @@ -3,11 +3,11 @@ mod ignore; mod worktree; use anyhow::Result; -use buffer::LanguageRegistry; use client::Client; use futures::Future; use fuzzy::{PathMatch, PathMatchCandidate, PathMatchCandidateSet}; use gpui::{AppContext, Entity, ModelContext, ModelHandle, Task}; +use language::LanguageRegistry; use std::{ path::Path, sync::{atomic::AtomicBool, Arc}, @@ -302,9 +302,9 @@ impl Entity for Project { #[cfg(test)] mod tests { use super::*; - use buffer::LanguageRegistry; use fs::RealFs; use gpui::TestAppContext; + use language::LanguageRegistry; use serde_json::json; use std::{os::unix, path::PathBuf}; use util::test::temp_tree; diff --git a/crates/project/src/worktree.rs b/crates/project/src/worktree.rs index 3f27b0ed3f5b8213ec66f20db40cb448610612ba..6912f66120c57fbd503d8af59fbf2b6bdf7d2ba5 100644 --- a/crates/project/src/worktree.rs +++ b/crates/project/src/worktree.rs @@ -4,7 +4,6 @@ use super::{ }; use ::ignore::gitignore::{Gitignore, GitignoreBuilder}; use anyhow::{anyhow, Result}; -use buffer::{Buffer, History, LanguageRegistry, Operation, Rope}; use client::{proto, Client, PeerId, TypedEnvelope}; use clock::ReplicaId; use futures::{Stream, StreamExt}; @@ -13,6 +12,7 @@ use gpui::{ executor, AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle, MutableAppContext, Task, UpgradeModelHandle, WeakModelHandle, }; +use language::{Buffer, History, LanguageRegistry, Operation, Rope}; use lazy_static::lazy_static; use parking_lot::Mutex; use postage::{ @@ -627,14 +627,14 @@ impl Worktree { file_changed = true; } else if !file.is_deleted() { if buffer_is_clean { - cx.emit(buffer::Event::Dirtied); + cx.emit(language::Event::Dirtied); } file.set_entry_id(None); file_changed = true; } if file_changed { - cx.emit(buffer::Event::FileHandleChanged); + cx.emit(language::Event::FileHandleChanged); } } }); @@ -862,7 +862,7 @@ impl LocalWorktree { .update(&mut cx, |this, cx| this.as_local().unwrap().load(&path, cx)) .await?; let language = this.read_with(&cx, |this, cx| { - use buffer::File; + use language::File; this.languages() .select_language(file.full_path(cx)) @@ -909,7 +909,7 @@ impl LocalWorktree { .insert(buffer.id() as u64, buffer.clone()); Ok(proto::OpenBufferResponse { - buffer: Some(buffer.update(cx.as_mut(), |buffer, cx| buffer.to_proto(cx))), + buffer: Some(buffer.update(cx.as_mut(), |buffer, _| buffer.to_proto())), }) }) }) @@ -1279,7 +1279,7 @@ impl RemoteWorktree { .ok_or_else(|| anyhow!("worktree was closed"))?; let file = File::new(entry.id, this.clone(), entry.path, entry.mtime); let language = this.read_with(&cx, |this, cx| { - use buffer::File; + use language::File; this.languages() .select_language(file.full_path(cx)) @@ -1790,7 +1790,7 @@ impl File { } } -impl buffer::File for File { +impl language::File for File { fn worktree_id(&self) -> usize { self.worktree.id() } @@ -1942,7 +1942,7 @@ impl buffer::File for File { }); } - fn boxed_clone(&self) -> Box { + fn boxed_clone(&self) -> Box { Box::new(self.clone()) } @@ -3268,7 +3268,7 @@ mod tests { assert!(buffer.is_dirty()); assert_eq!( *events.borrow(), - &[buffer::Event::Edited, buffer::Event::Dirtied] + &[language::Event::Edited, language::Event::Dirtied] ); events.borrow_mut().clear(); buffer.did_save(buffer.version(), buffer.file().unwrap().mtime(), None, cx); @@ -3277,7 +3277,7 @@ mod tests { // after saving, the buffer is not dirty, and emits a saved event. buffer1.update(&mut cx, |buffer, cx| { assert!(!buffer.is_dirty()); - assert_eq!(*events.borrow(), &[buffer::Event::Saved]); + assert_eq!(*events.borrow(), &[language::Event::Saved]); events.borrow_mut().clear(); buffer.edit(vec![1..1], "B", cx); @@ -3291,9 +3291,9 @@ mod tests { assert_eq!( *events.borrow(), &[ - buffer::Event::Edited, - buffer::Event::Dirtied, - buffer::Event::Edited + language::Event::Edited, + language::Event::Dirtied, + language::Event::Edited ], ); events.borrow_mut().clear(); @@ -3305,7 +3305,7 @@ mod tests { assert!(buffer.is_dirty()); }); - assert_eq!(*events.borrow(), &[buffer::Event::Edited]); + assert_eq!(*events.borrow(), &[language::Event::Edited]); // When a file is deleted, the buffer is considered dirty. let events = Rc::new(RefCell::new(Vec::new())); @@ -3325,7 +3325,7 @@ mod tests { buffer2.condition(&cx, |b, _| b.is_dirty()).await; assert_eq!( *events.borrow(), - &[buffer::Event::Dirtied, buffer::Event::FileHandleChanged] + &[language::Event::Dirtied, language::Event::FileHandleChanged] ); // When a file is already dirty when deleted, we don't emit a Dirtied event. @@ -3351,7 +3351,7 @@ mod tests { buffer3 .condition(&cx, |_, _| !events.borrow().is_empty()) .await; - assert_eq!(*events.borrow(), &[buffer::Event::FileHandleChanged]); + assert_eq!(*events.borrow(), &[language::Event::FileHandleChanged]); cx.read(|cx| assert!(buffer3.read(cx).is_dirty())); } diff --git a/crates/server/src/rpc.rs b/crates/server/src/rpc.rs index 147a655f72e902aa8e3d437c3750bd48c60ab444..073bf5bc7ca47f5850af7d706ce1787f1817bb6c 100644 --- a/crates/server/src/rpc.rs +++ b/crates/server/src/rpc.rs @@ -976,13 +976,13 @@ mod tests { time::Duration, }; use zed::{ - buffer::LanguageRegistry, client::{ self, test::FakeHttpClient, Channel, ChannelDetails, ChannelList, Client, Credentials, EstablishConnectionError, UserStore, }, editor::{Editor, EditorSettings, Input}, fs::{FakeFs, Fs as _}, + language::LanguageRegistry, people_panel::JoinWorktree, project::{ProjectPath, Worktree}, workspace::{Workspace, WorkspaceParams}, diff --git a/crates/workspace/Cargo.toml b/crates/workspace/Cargo.toml index 3dcd3e557e28eb99cc6aac8308c0816700b09231..a96eb23aafb9ce40687611e19d1c5f59576275af 100644 --- a/crates/workspace/Cargo.toml +++ b/crates/workspace/Cargo.toml @@ -8,7 +8,7 @@ test-support = [ "client/test-support", "project/test-support", "tree-sitter", - "tree-sitter-rust" + "tree-sitter-rust", ] [dependencies] @@ -16,6 +16,7 @@ buffer = { path = "../buffer" } client = { path = "../client" } editor = { path = "../editor" } gpui = { path = "../gpui" } +language = { path = "../language" } project = { path = "../project" } theme = { path = "../theme" } anyhow = "1.0.38" diff --git a/crates/workspace/src/items.rs b/crates/workspace/src/items.rs index b62858ed51a6defc278afc06bf8769ef4760c70a..07c511602c1e015e2468f7131e2831aa4dc616f9 100644 --- a/crates/workspace/src/items.rs +++ b/crates/workspace/src/items.rs @@ -1,9 +1,9 @@ use super::{Item, ItemView}; use crate::Settings; use anyhow::Result; -use buffer::{Buffer, File as _}; use editor::{Editor, EditorSettings, Event}; use gpui::{fonts::TextStyle, AppContext, ModelHandle, Task, ViewContext}; +use language::{Buffer, File as _}; use postage::watch; use project::{ProjectPath, Worktree}; use std::path::Path; diff --git a/crates/workspace/src/lib.rs b/crates/workspace/src/lib.rs index c227ee61bd892ea4dbb2f1a0894469abe69a0178..ec1f39e48019ab90fc1826b20cea777173b4b285 100644 --- a/crates/workspace/src/lib.rs +++ b/crates/workspace/src/lib.rs @@ -5,7 +5,7 @@ pub mod settings; pub mod sidebar; use anyhow::Result; -use buffer::{Buffer, LanguageRegistry}; +use language::{Buffer, LanguageRegistry}; use client::{Authenticate, ChannelList, Client, UserStore}; use gpui::{ action, elements::*, json::to_string_pretty, keymap::Binding, platform::CursorStyle, @@ -271,8 +271,8 @@ impl WorkspaceParams { #[cfg(any(test, feature = "test-support"))] pub fn test(cx: &mut MutableAppContext) -> Self { let mut languages = LanguageRegistry::new(); - languages.add(Arc::new(buffer::Language::new( - buffer::LanguageConfig { + languages.add(Arc::new(language::Language::new( + language::LanguageConfig { name: "Rust".to_string(), path_suffixes: vec!["rs".to_string()], ..Default::default() diff --git a/crates/zed/Cargo.toml b/crates/zed/Cargo.toml index d6695709f07952b8e124983a36516321b4b31efc..53718d5a69ba7c71428e02498e6242da2852d22e 100644 --- a/crates/zed/Cargo.toml +++ b/crates/zed/Cargo.toml @@ -18,6 +18,7 @@ test-support = [ "buffer/test-support", "client/test-support", "gpui/test-support", + "language/test-support", "project/test-support", "rpc/test-support", "tempdir", @@ -33,6 +34,7 @@ fuzzy = { path = "../fuzzy" } editor = { path = "../editor" } file_finder = { path = "../file_finder" } gpui = { path = "../gpui" } +language = { path = "../language" } people_panel = { path = "../people_panel" } project = { path = "../project" } project_panel = { path = "../project_panel" } @@ -85,6 +87,7 @@ url = "2.2" buffer = { path = "../buffer", features = ["test-support"] } editor = { path = "../editor", features = ["test-support"] } gpui = { path = "../gpui", features = ["test-support"] } +language = { path = "../language", features = ["test-support"] } project = { path = "../project", features = ["test-support"] } rpc = { path = "../rpc", features = ["test-support"] } client = { path = "../client", features = ["test-support"] } diff --git a/crates/zed/src/language.rs b/crates/zed/src/language.rs index 5a542ffc4829fdf70db3a1bef32b152c7e7d26ea..a82f7a2cbb4c5c681b1fb9d57490fd53fc92afb7 100644 --- a/crates/zed/src/language.rs +++ b/crates/zed/src/language.rs @@ -1,4 +1,4 @@ -use buffer::{Language, LanguageRegistry}; +pub use language::{Language, LanguageRegistry}; use rust_embed::RustEmbed; use std::borrow::Cow; use std::{str, sync::Arc}; diff --git a/crates/zed/src/lib.rs b/crates/zed/src/lib.rs index f8711c7175a86f5f2582312de5c2e73b13983286..cec9e29aa817b51b2c9de9c2d67d3069b69560be 100644 --- a/crates/zed/src/lib.rs +++ b/crates/zed/src/lib.rs @@ -4,8 +4,7 @@ pub mod menus; #[cfg(any(test, feature = "test-support"))] pub mod test; -pub use buffer; -use buffer::LanguageRegistry; +use self::language::LanguageRegistry; use chat_panel::ChatPanel; pub use client; pub use editor; diff --git a/crates/zed/src/test.rs b/crates/zed/src/test.rs index 3f9161a06657932d012d73170ce5024cd277bb60..8a7a3989100bbea4bc55b862f526f3749f1a8b73 100644 --- a/crates/zed/src/test.rs +++ b/crates/zed/src/test.rs @@ -1,7 +1,7 @@ use crate::{assets::Assets, AppState}; -use buffer::LanguageRegistry; use client::{http::ServerResponse, test::FakeHttpClient, ChannelList, Client, UserStore}; use gpui::{AssetSource, MutableAppContext}; +use language::LanguageRegistry; use parking_lot::Mutex; use postage::watch; use project::fs::FakeFs; From 37eae2ba67c8251d8e2ae200d19114d66268d8e9 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Thu, 21 Oct 2021 09:40:50 +0200 Subject: [PATCH 4/7] Remove unnecessary dependencies in buffer and language crates --- Cargo.lock | 13 ------------- crates/buffer/Cargo.toml | 16 +++------------- crates/buffer/src/lib.rs | 3 ++- crates/buffer/src/point.rs | 18 ------------------ crates/editor/Cargo.toml | 6 +++++- crates/language/Cargo.toml | 5 ----- crates/language/src/lib.rs | 34 ++++++++++++++++++++++++---------- 7 files changed, 34 insertions(+), 61 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 07a6c0a1a2e7a6fa399ee22dee16fec0bdcb7045..29ae3f3cfc068f9ffa3f636ba9dac7e586357f37 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -750,21 +750,12 @@ dependencies = [ "arrayvec 0.7.1", "clock", "gpui", - "lazy_static", "log", - "parking_lot", "rand 0.8.3", "rpc", "seahash", - "serde 1.0.125", - "similar", "smallvec", - "smol", "sum_tree", - "theme", - "tree-sitter", - "tree-sitter-rust", - "unindent", ] [[package]] @@ -2822,7 +2813,6 @@ name = "language" version = "0.1.0" dependencies = [ "anyhow", - "arrayvec 0.7.1", "buffer", "clock", "gpui", @@ -2831,12 +2821,9 @@ dependencies = [ "parking_lot", "rand 0.8.3", "rpc", - "seahash", "serde 1.0.125", "similar", - "smallvec", "smol", - "sum_tree", "theme", "tree-sitter", "tree-sitter-rust", diff --git a/crates/buffer/Cargo.toml b/crates/buffer/Cargo.toml index 0cb283aa463ac6f7c90b17e3284d9982f88df97c..e4112c20d5a4c8ecf95d697ecdc2412a92d4b5d6 100644 --- a/crates/buffer/Cargo.toml +++ b/crates/buffer/Cargo.toml @@ -4,30 +4,20 @@ version = "0.1.0" edition = "2018" [features] -test-support = ["rand"] +test-support = ["rand", "seahash"] [dependencies] clock = { path = "../clock" } -gpui = { path = "../gpui" } rpc = { path = "../rpc" } sum_tree = { path = "../sum_tree" } -theme = { path = "../theme" } anyhow = "1.0.38" arrayvec = "0.7.1" -lazy_static = "1.4" log = "0.4" -parking_lot = "0.11.1" rand = { version = "0.8.3", optional = true } -seahash = "4.1" -serde = { version = "1", features = ["derive"] } -similar = "1.3" +seahash = { version = "4.1", optional = true } smallvec = { version = "1.6", features = ["union"] } -smol = "1.2" -tree-sitter = "0.19.5" [dev-dependencies] gpui = { path = "../gpui", features = ["test-support"] } - +seahash = "4.1" rand = "0.8.3" -tree-sitter-rust = "0.19.0" -unindent = "0.1.7" diff --git a/crates/buffer/src/lib.rs b/crates/buffer/src/lib.rs index 3e17808b68a9735bb0e03abb48209928253dd175..715cd12eb4c7ca6988e85dde96d2c3461ff1d332 100644 --- a/crates/buffer/src/lib.rs +++ b/crates/buffer/src/lib.rs @@ -29,7 +29,8 @@ use std::{ sync::Arc, time::{Duration, Instant}, }; -use sum_tree::{Bias, FilterCursor, SumTree}; +pub use sum_tree::Bias; +use sum_tree::{FilterCursor, SumTree}; #[derive(Clone, Default)] struct DeterministicState; diff --git a/crates/buffer/src/point.rs b/crates/buffer/src/point.rs index 77dd9dfe378ba348a7d64cf7e60dd1f8bad190bf..a2da4e4f6ce245a1cf7198f7fa1bae0f1d622fe6 100644 --- a/crates/buffer/src/point.rs +++ b/crates/buffer/src/point.rs @@ -109,21 +109,3 @@ impl Ord for Point { } } } - -impl Into for Point { - fn into(self) -> tree_sitter::Point { - tree_sitter::Point { - row: self.row as usize, - column: self.column as usize, - } - } -} - -impl From for Point { - fn from(point: tree_sitter::Point) -> Self { - Self { - row: point.row as u32, - column: point.column as u32, - } - } -} diff --git a/crates/editor/Cargo.toml b/crates/editor/Cargo.toml index 1baf3b56112560d6154afb75fe31b27dbbf7c25f..59ed90d460558efcfdb9c478342144ca123e66f8 100644 --- a/crates/editor/Cargo.toml +++ b/crates/editor/Cargo.toml @@ -4,7 +4,11 @@ version = "0.1.0" edition = "2018" [features] -test-support = ["buffer/test-support", "gpui/test-support"] +test-support = [ + "buffer/test-support", + "language/test-support", + "gpui/test-support", +] [dependencies] buffer = { path = "../buffer" } diff --git a/crates/language/Cargo.toml b/crates/language/Cargo.toml index 06ac60c3b24bfbe239768642a2bec6ea04d4c8c7..236c3422289aab7bf5f351f745d3bc6daa6c6185 100644 --- a/crates/language/Cargo.toml +++ b/crates/language/Cargo.toml @@ -11,25 +11,20 @@ buffer = { path = "../buffer" } clock = { path = "../clock" } gpui = { path = "../gpui" } rpc = { path = "../rpc" } -sum_tree = { path = "../sum_tree" } theme = { path = "../theme" } anyhow = "1.0.38" -arrayvec = "0.7.1" lazy_static = "1.4" log = "0.4" parking_lot = "0.11.1" rand = { version = "0.8.3", optional = true } -seahash = "4.1" serde = { version = "1", features = ["derive"] } similar = "1.3" -smallvec = { version = "1.6", features = ["union"] } smol = "1.2" tree-sitter = "0.19.5" [dev-dependencies] buffer = { path = "../buffer", features = ["test-support"] } gpui = { path = "../gpui", features = ["test-support"] } - rand = "0.8.3" tree-sitter-rust = "0.19.0" unindent = "0.1.7" diff --git a/crates/language/src/lib.rs b/crates/language/src/lib.rs index c7b866f003271a007dbd4598c8448add5c4071b1..d05d0811a4e094eff6e75c882c9bd8dfaef366fa 100644 --- a/crates/language/src/lib.rs +++ b/crates/language/src/lib.rs @@ -30,7 +30,6 @@ use std::{ sync::Arc, time::{Duration, Instant, SystemTime, UNIX_EPOCH}, }; -use sum_tree::Bias; use tree_sitter::{InputEdit, Parser, QueryCursor, Tree}; thread_local! { @@ -456,12 +455,12 @@ impl Buffer { start_byte: start_offset, old_end_byte: start_offset + edit.deleted_bytes(), new_end_byte: start_offset + edit.inserted_bytes(), - start_position: start_point.into(), - old_end_position: (start_point + edit.deleted_lines()).into(), + start_position: start_point.to_ts_point(), + old_end_position: (start_point + edit.deleted_lines()).to_ts_point(), new_end_position: self .as_rope() .to_point(start_offset + edit.inserted_bytes()) - .into(), + .to_ts_point(), }); delta += edit.inserted_bytes() as isize - edit.deleted_bytes() as isize; } @@ -1150,8 +1149,8 @@ impl Snapshot { let indent_capture_ix = language.indents_query.capture_index_for_name("indent"); let end_capture_ix = language.indents_query.capture_index_for_name("end"); query_cursor.set_point_range( - Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0).into() - ..Point::new(row_range.end, 0).into(), + Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0).to_ts_point() + ..Point::new(row_range.end, 0).to_ts_point(), ); let mut indentation_ranges = Vec::<(Range, &'static str)>::new(); for mat in query_cursor.matches( @@ -1165,10 +1164,10 @@ impl Snapshot { for capture in mat.captures { if Some(capture.index) == indent_capture_ix { node_kind = capture.node.kind(); - start.get_or_insert(capture.node.start_position().into()); - end.get_or_insert(capture.node.end_position().into()); + start.get_or_insert(Point::from_ts_point(capture.node.start_position())); + end.get_or_insert(Point::from_ts_point(capture.node.end_position())); } else if Some(capture.index) == end_capture_ix { - end = Some(capture.node.start_position().into()); + end = Some(Point::from_ts_point(capture.node.start_position().into())); } } @@ -1439,11 +1438,26 @@ impl Drop for QueryCursorHandle { fn drop(&mut self) { let mut cursor = self.0.take().unwrap(); cursor.set_byte_range(0..usize::MAX); - cursor.set_point_range(Point::zero().into()..Point::MAX.into()); + cursor.set_point_range(Point::zero().to_ts_point()..Point::MAX.to_ts_point()); QUERY_CURSORS.lock().push(cursor) } } +trait ToTreeSitterPoint { + fn to_ts_point(self) -> tree_sitter::Point; + fn from_ts_point(point: tree_sitter::Point) -> Self; +} + +impl ToTreeSitterPoint for Point { + fn to_ts_point(self) -> tree_sitter::Point { + tree_sitter::Point::new(self.row as usize, self.column as usize) + } + + fn from_ts_point(point: tree_sitter::Point) -> Self { + Point::new(point.row as u32, point.column as u32) + } +} + fn contiguous_ranges( values: impl IntoIterator, max_len: usize, From eea0f35d3862c38aae1a4b2b1e8513ccabf6e755 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Thu, 21 Oct 2021 11:12:19 +0200 Subject: [PATCH 5/7] Rename TextBuffer back to Buffer --- crates/buffer/src/anchor.rs | 6 ++--- crates/buffer/src/lib.rs | 21 +++++++-------- crates/buffer/src/selection.rs | 8 +++--- crates/buffer/src/tests.rs | 28 ++++++++++---------- crates/language/src/lib.rs | 48 +++++++++++++++++----------------- 5 files changed, 55 insertions(+), 56 deletions(-) diff --git a/crates/buffer/src/anchor.rs b/crates/buffer/src/anchor.rs index 0a0d63d949cf254d4e9c74aca8615a1e393e5e1b..1ac82727df7485bb6d098a66b251ecb465cc1cc6 100644 --- a/crates/buffer/src/anchor.rs +++ b/crates/buffer/src/anchor.rs @@ -1,6 +1,6 @@ use crate::Point; -use super::{Content, TextBuffer}; +use super::{Buffer, Content}; use anyhow::Result; use std::{cmp::Ordering, ops::Range}; use sum_tree::Bias; @@ -65,7 +65,7 @@ impl Anchor { Ok(offset_comparison.then_with(|| self.bias.cmp(&other.bias))) } - pub fn bias_left(&self, buffer: &TextBuffer) -> Anchor { + pub fn bias_left(&self, buffer: &Buffer) -> Anchor { if self.bias == Bias::Left { self.clone() } else { @@ -73,7 +73,7 @@ impl Anchor { } } - pub fn bias_right(&self, buffer: &TextBuffer) -> Anchor { + pub fn bias_right(&self, buffer: &Buffer) -> Anchor { if self.bias == Bias::Right { self.clone() } else { diff --git a/crates/buffer/src/lib.rs b/crates/buffer/src/lib.rs index 715cd12eb4c7ca6988e85dde96d2c3461ff1d332..c2b1e8f6ca75dfba22065d5e2fad85d4bcae0ffe 100644 --- a/crates/buffer/src/lib.rs +++ b/crates/buffer/src/lib.rs @@ -56,7 +56,7 @@ type HashMap = std::collections::HashMap; type HashSet = std::collections::HashSet; #[derive(Clone)] -pub struct TextBuffer { +pub struct Buffer { fragments: SumTree, visible_text: Rope, deleted_text: Rope, @@ -441,8 +441,8 @@ pub struct UndoOperation { version: clock::Global, } -impl TextBuffer { - pub fn new(replica_id: u16, remote_id: u64, history: History) -> TextBuffer { +impl Buffer { + pub fn new(replica_id: u16, remote_id: u64, history: History) -> Buffer { let mut fragments = SumTree::new(); let visible_text = Rope::from(history.base_text.as_ref()); @@ -459,7 +459,7 @@ impl TextBuffer { ); } - TextBuffer { + Buffer { visible_text, deleted_text: Rope::new(), fragments, @@ -478,8 +478,7 @@ impl TextBuffer { } pub fn from_proto(replica_id: u16, message: proto::Buffer) -> Result { - let mut buffer = - TextBuffer::new(replica_id, message.id, History::new(message.content.into())); + let mut buffer = Buffer::new(replica_id, message.id, History::new(message.content.into())); let ops = message .history .into_iter() @@ -1369,7 +1368,7 @@ impl TextBuffer { } #[cfg(any(test, feature = "test-support"))] -impl TextBuffer { +impl Buffer { fn random_byte_range(&mut self, start_offset: usize, rng: &mut impl rand::Rng) -> Range { let end = self.clip_offset(rng.gen_range(start_offset..=self.len()), Bias::Right); let start = self.clip_offset(rng.gen_range(start_offset..=end), Bias::Right); @@ -1617,8 +1616,8 @@ impl<'a> From<&'a Snapshot> for Content<'a> { } } -impl<'a> From<&'a TextBuffer> for Content<'a> { - fn from(buffer: &'a TextBuffer) -> Self { +impl<'a> From<&'a Buffer> for Content<'a> { + fn from(buffer: &'a Buffer) -> Self { Self { visible_text: &buffer.visible_text, fragments: &buffer.fragments, @@ -1627,8 +1626,8 @@ impl<'a> From<&'a TextBuffer> for Content<'a> { } } -impl<'a> From<&'a mut TextBuffer> for Content<'a> { - fn from(buffer: &'a mut TextBuffer) -> Self { +impl<'a> From<&'a mut Buffer> for Content<'a> { + fn from(buffer: &'a mut Buffer) -> Self { Self { visible_text: &buffer.visible_text, fragments: &buffer.fragments, diff --git a/crates/buffer/src/selection.rs b/crates/buffer/src/selection.rs index 596c8dac56756031220e11aeb2138c2dbb442e1d..98f34865f55a4544a749ce2a8d5c9cb305ab9394 100644 --- a/crates/buffer/src/selection.rs +++ b/crates/buffer/src/selection.rs @@ -1,4 +1,4 @@ -use crate::{Anchor, Point, TextBuffer, ToOffset as _, ToPoint as _}; +use crate::{Anchor, Buffer, Point, ToOffset as _, ToPoint as _}; use std::{cmp::Ordering, mem, ops::Range}; pub type SelectionSetId = clock::Lamport; @@ -29,7 +29,7 @@ impl Selection { } } - pub fn set_head(&mut self, buffer: &TextBuffer, cursor: Anchor) { + pub fn set_head(&mut self, buffer: &Buffer, cursor: Anchor) { if cursor.cmp(self.tail(), buffer).unwrap() < Ordering::Equal { if !self.reversed { mem::swap(&mut self.start, &mut self.end); @@ -53,7 +53,7 @@ impl Selection { } } - pub fn point_range(&self, buffer: &TextBuffer) -> Range { + pub fn point_range(&self, buffer: &Buffer) -> Range { let start = self.start.to_point(buffer); let end = self.end.to_point(buffer); if self.reversed { @@ -63,7 +63,7 @@ impl Selection { } } - pub fn offset_range(&self, buffer: &TextBuffer) -> Range { + pub fn offset_range(&self, buffer: &Buffer) -> Range { let start = self.start.to_offset(buffer); let end = self.end.to_offset(buffer); if self.reversed { diff --git a/crates/buffer/src/tests.rs b/crates/buffer/src/tests.rs index 391dbf5ce6574f70381723d2e9f61c4359a92775..bb29f7de982257b70eaafa8ac04a0a74bec57864 100644 --- a/crates/buffer/src/tests.rs +++ b/crates/buffer/src/tests.rs @@ -10,7 +10,7 @@ use std::{ #[test] fn test_edit() { - let mut buffer = TextBuffer::new(0, 0, History::new("abc".into())); + let mut buffer = Buffer::new(0, 0, History::new("abc".into())); assert_eq!(buffer.text(), "abc"); buffer.edit(vec![3..3], "def"); assert_eq!(buffer.text(), "abcdef"); @@ -34,7 +34,7 @@ fn test_random_edits(mut rng: StdRng) { let mut reference_string = RandomCharIter::new(&mut rng) .take(reference_string_len) .collect::(); - let mut buffer = TextBuffer::new(0, 0, History::new(reference_string.clone().into())); + let mut buffer = Buffer::new(0, 0, History::new(reference_string.clone().into())); buffer.history.group_interval = Duration::from_millis(rng.gen_range(0..=200)); let mut buffer_versions = Vec::new(); log::info!( @@ -101,7 +101,7 @@ fn test_random_edits(mut rng: StdRng) { #[test] fn test_line_len() { - let mut buffer = TextBuffer::new(0, 0, History::new("".into())); + let mut buffer = Buffer::new(0, 0, History::new("".into())); buffer.edit(vec![0..0], "abcd\nefg\nhij"); buffer.edit(vec![12..12], "kl\nmno"); buffer.edit(vec![18..18], "\npqrs\n"); @@ -117,7 +117,7 @@ fn test_line_len() { #[test] fn test_text_summary_for_range() { - let buffer = TextBuffer::new(0, 0, History::new("ab\nefg\nhklm\nnopqrs\ntuvwxyz".into())); + let buffer = Buffer::new(0, 0, History::new("ab\nefg\nhklm\nnopqrs\ntuvwxyz".into())); assert_eq!( buffer.text_summary_for_range(1..3), TextSummary { @@ -177,7 +177,7 @@ fn test_text_summary_for_range() { #[test] fn test_chars_at() { - let mut buffer = TextBuffer::new(0, 0, History::new("".into())); + let mut buffer = Buffer::new(0, 0, History::new("".into())); buffer.edit(vec![0..0], "abcd\nefgh\nij"); buffer.edit(vec![12..12], "kl\nmno"); buffer.edit(vec![18..18], "\npqrs"); @@ -199,7 +199,7 @@ fn test_chars_at() { assert_eq!(chars.collect::(), "PQrs"); // Regression test: - let mut buffer = TextBuffer::new(0, 0, History::new("".into())); + let mut buffer = Buffer::new(0, 0, History::new("".into())); buffer.edit(vec![0..0], "[workspace]\nmembers = [\n \"xray_core\",\n \"xray_server\",\n \"xray_cli\",\n \"xray_wasm\",\n]\n"); buffer.edit(vec![60..60], "\n"); @@ -209,7 +209,7 @@ fn test_chars_at() { #[test] fn test_anchors() { - let mut buffer = TextBuffer::new(0, 0, History::new("".into())); + let mut buffer = Buffer::new(0, 0, History::new("".into())); buffer.edit(vec![0..0], "abc"); let left_anchor = buffer.anchor_before(2); let right_anchor = buffer.anchor_after(2); @@ -345,7 +345,7 @@ fn test_anchors() { #[test] fn test_anchors_at_start_and_end() { - let mut buffer = TextBuffer::new(0, 0, History::new("".into())); + let mut buffer = Buffer::new(0, 0, History::new("".into())); let before_start_anchor = buffer.anchor_before(0); let after_end_anchor = buffer.anchor_after(0); @@ -368,7 +368,7 @@ fn test_anchors_at_start_and_end() { #[test] fn test_undo_redo() { - let mut buffer = TextBuffer::new(0, 0, History::new("1234".into())); + let mut buffer = Buffer::new(0, 0, History::new("1234".into())); // Set group interval to zero so as to not group edits in the undo stack. buffer.history.group_interval = Duration::from_secs(0); @@ -405,7 +405,7 @@ fn test_undo_redo() { #[test] fn test_history() { let mut now = Instant::now(); - let mut buffer = TextBuffer::new(0, 0, History::new("123456".into())); + let mut buffer = Buffer::new(0, 0, History::new("123456".into())); let set_id = if let Operation::UpdateSelections { set_id, .. } = buffer.add_selection_set(buffer.selections_from_ranges(vec![4..4]).unwrap()) @@ -472,9 +472,9 @@ fn test_history() { fn test_concurrent_edits() { let text = "abcdef"; - let mut buffer1 = TextBuffer::new(1, 0, History::new(text.into())); - let mut buffer2 = TextBuffer::new(2, 0, History::new(text.into())); - let mut buffer3 = TextBuffer::new(3, 0, History::new(text.into())); + let mut buffer1 = Buffer::new(1, 0, History::new(text.into())); + let mut buffer2 = Buffer::new(2, 0, History::new(text.into())); + let mut buffer3 = Buffer::new(3, 0, History::new(text.into())); let buf1_op = buffer1.edit(vec![1..2], "12"); assert_eq!(buffer1.text(), "a12cdef"); @@ -513,7 +513,7 @@ fn test_random_concurrent_edits(mut rng: StdRng) { let mut network = Network::new(rng.clone()); for i in 0..peers { - let mut buffer = TextBuffer::new(i as ReplicaId, 0, History::new(base_text.clone().into())); + let mut buffer = Buffer::new(i as ReplicaId, 0, History::new(base_text.clone().into())); buffer.history.group_interval = Duration::from_millis(rng.gen_range(0..=200)); buffers.push(buffer); replica_ids.push(i as u16); diff --git a/crates/language/src/lib.rs b/crates/language/src/lib.rs index d05d0811a4e094eff6e75c882c9bd8dfaef366fa..d364513084674de71d29036351aa7d0a5a8ecfe3 100644 --- a/crates/language/src/lib.rs +++ b/crates/language/src/lib.rs @@ -8,7 +8,7 @@ pub use self::{ language::{BracketPair, Language, LanguageConfig, LanguageRegistry}, }; use anyhow::{anyhow, Result}; -pub use buffer::*; +pub use buffer::{Buffer as TextBuffer, *}; use clock::ReplicaId; use gpui::{AppContext, Entity, ModelContext, MutableAppContext, Task}; use lazy_static::lazy_static; @@ -44,7 +44,7 @@ lazy_static! { const INDENT_SIZE: u32 = 4; pub struct Buffer { - buffer: TextBuffer, + text: TextBuffer, file: Option>, saved_version: clock::Global, saved_mtime: SystemTime, @@ -223,7 +223,7 @@ impl Buffer { } let mut result = Self { - buffer, + text: buffer, saved_mtime, saved_version: clock::Global::new(), file, @@ -244,7 +244,7 @@ impl Buffer { pub fn snapshot(&self) -> Snapshot { Snapshot { - text: self.buffer.snapshot(), + text: self.text.snapshot(), tree: self.syntax_tree(), is_parsing: self.parsing_in_background, language: self.language.clone(), @@ -630,9 +630,9 @@ impl Buffer { .selections .iter() .map(|selection| { - let start_point = selection.start.to_point(&self.buffer); + let start_point = selection.start.to_point(&self.text); if start_point.column == 0 { - let end_point = selection.end.to_point(&self.buffer); + let end_point = selection.end.to_point(&self.text); let delta = Point::new( 0, indent_columns.get(&start_point.row).copied().unwrap_or(0), @@ -801,7 +801,7 @@ impl Buffer { selection_set_ids: impl IntoIterator, now: Instant, ) -> Result<()> { - self.buffer.start_transaction_at(selection_set_ids, now) + self.text.start_transaction_at(selection_set_ids, now) } pub fn end_transaction( @@ -818,7 +818,7 @@ impl Buffer { now: Instant, cx: &mut ModelContext, ) -> Result<()> { - if let Some(start_version) = self.buffer.end_transaction_at(selection_set_ids, now) { + if let Some(start_version) = self.text.end_transaction_at(selection_set_ids, now) { cx.notify(); let was_dirty = start_version != self.saved_version; let edited = self.edits_since(start_version).next().is_some(); @@ -905,7 +905,7 @@ impl Buffer { let first_newline_ix = new_text.find('\n'); let new_text_len = new_text.len(); - let edit = self.buffer.edit(ranges.iter().cloned(), new_text); + let edit = self.text.edit(ranges.iter().cloned(), new_text); if let Some((before_edit, edited)) = autoindent_request { let mut inserted = None; @@ -920,7 +920,7 @@ impl Buffer { } let selection_set_ids = self - .buffer + .text .peek_undo_stack() .unwrap() .starting_selection_set_ids() @@ -949,7 +949,7 @@ impl Buffer { selections: impl Into>, cx: &mut ModelContext, ) -> SelectionSetId { - let operation = self.buffer.add_selection_set(selections); + let operation = self.text.add_selection_set(selections); if let Operation::UpdateSelections { set_id, .. } = &operation { let set_id = *set_id; cx.notify(); @@ -966,7 +966,7 @@ impl Buffer { selections: impl Into>, cx: &mut ModelContext, ) -> Result<()> { - let operation = self.buffer.update_selection_set(set_id, selections)?; + let operation = self.text.update_selection_set(set_id, selections)?; cx.notify(); self.send_operation(operation, cx); Ok(()) @@ -977,7 +977,7 @@ impl Buffer { set_id: Option, cx: &mut ModelContext, ) -> Result<()> { - let operation = self.buffer.set_active_selection_set(set_id)?; + let operation = self.text.set_active_selection_set(set_id)?; self.send_operation(operation, cx); Ok(()) } @@ -987,7 +987,7 @@ impl Buffer { set_id: SelectionSetId, cx: &mut ModelContext, ) -> Result<()> { - let operation = self.buffer.remove_selection_set(set_id)?; + let operation = self.text.remove_selection_set(set_id)?; cx.notify(); self.send_operation(operation, cx); Ok(()) @@ -1003,7 +1003,7 @@ impl Buffer { let was_dirty = self.is_dirty(); let old_version = self.version.clone(); - self.buffer.apply_ops(ops)?; + self.text.apply_ops(ops)?; cx.notify(); if self.edits_since(old_version).next().is_some() { @@ -1027,7 +1027,7 @@ impl Buffer { } pub fn remove_peer(&mut self, replica_id: ReplicaId, cx: &mut ModelContext) { - self.buffer.remove_peer(replica_id); + self.text.remove_peer(replica_id); cx.notify(); } @@ -1035,7 +1035,7 @@ impl Buffer { let was_dirty = self.is_dirty(); let old_version = self.version.clone(); - for operation in self.buffer.undo() { + for operation in self.text.undo() { self.send_operation(operation, cx); } @@ -1050,7 +1050,7 @@ impl Buffer { let was_dirty = self.is_dirty(); let old_version = self.version.clone(); - for operation in self.buffer.redo() { + for operation in self.text.redo() { self.send_operation(operation, cx); } @@ -1068,14 +1068,14 @@ impl Buffer { where T: rand::Rng, { - self.buffer.randomly_edit(rng, old_range_count); + self.text.randomly_edit(rng, old_range_count); } pub fn randomly_mutate(&mut self, rng: &mut T) where T: rand::Rng, { - self.buffer.randomly_mutate(rng); + self.text.randomly_mutate(rng); } } @@ -1092,7 +1092,7 @@ impl Entity for Buffer { impl Clone for Buffer { fn clone(&self) -> Self { Self { - buffer: self.buffer.clone(), + text: self.text.clone(), saved_version: self.saved_version.clone(), saved_mtime: self.saved_mtime, file: self.file.as_ref().map(|f| f.boxed_clone()), @@ -1114,19 +1114,19 @@ impl Deref for Buffer { type Target = TextBuffer; fn deref(&self) -> &Self::Target { - &self.buffer + &self.text } } impl<'a> From<&'a Buffer> for Content<'a> { fn from(buffer: &'a Buffer) -> Self { - Self::from(&buffer.buffer) + Self::from(&buffer.text) } } impl<'a> From<&'a mut Buffer> for Content<'a> { fn from(buffer: &'a mut Buffer) -> Self { - Self::from(&buffer.buffer) + Self::from(&buffer.text) } } From eb9d7c8660d8ee110f8574d07dea870e0bfa453e Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Thu, 21 Oct 2021 12:05:44 +0200 Subject: [PATCH 6/7] Update buffer's saved mtime when file is reloaded after on-disk change Co-Authored-By: Antonio Scandurra Co-Authored-By: Nathan Sobo --- crates/language/src/lib.rs | 71 +++++++++++----------------------- crates/project/src/worktree.rs | 36 ++++++----------- 2 files changed, 35 insertions(+), 72 deletions(-) diff --git a/crates/language/src/lib.rs b/crates/language/src/lib.rs index d364513084674de71d29036351aa7d0a5a8ecfe3..090ea8d9c8c7840e53815f0a991b93d28e990a39 100644 --- a/crates/language/src/lib.rs +++ b/crates/language/src/lib.rs @@ -300,45 +300,34 @@ impl Buffer { cx.emit(Event::Saved); } + pub fn file_renamed(&self, cx: &mut ModelContext) { + cx.emit(Event::FileHandleChanged); + } + pub fn file_updated( &mut self, - path: Arc, - mtime: SystemTime, - new_text: Option, + new_text: String, cx: &mut ModelContext, - ) { - let file = self.file.as_mut().unwrap(); - let mut changed = false; - if path != *file.path() { - file.set_path(path); - changed = true; - } - - if mtime != file.mtime() { - file.set_mtime(mtime); - changed = true; - if let Some(new_text) = new_text { - if self.version == self.saved_version { - cx.spawn(|this, mut cx| async move { - let diff = this - .read_with(&cx, |this, cx| this.diff(new_text.into(), cx)) - .await; - this.update(&mut cx, |this, cx| { - if this.apply_diff(diff, cx) { - this.saved_version = this.version.clone(); - this.saved_mtime = mtime; - cx.emit(Event::Reloaded); - } - }); - }) - .detach(); - } - } - } - - if changed { + ) -> Option> { + if let Some(file) = self.file.as_ref() { cx.emit(Event::FileHandleChanged); + let mtime = file.mtime(); + if self.version == self.saved_version { + return Some(cx.spawn(|this, mut cx| async move { + let diff = this + .read_with(&cx, |this, cx| this.diff(new_text.into(), cx)) + .await; + this.update(&mut cx, |this, cx| { + if this.apply_diff(diff, cx) { + this.saved_version = this.version.clone(); + this.saved_mtime = mtime; + cx.emit(Event::Reloaded); + } + }); + })); + } } + None } pub fn file_deleted(&mut self, cx: &mut ModelContext) { @@ -740,20 +729,6 @@ impl Buffer { }) } - pub fn set_text_from_disk(&self, new_text: Arc, cx: &mut ModelContext) -> Task<()> { - cx.spawn(|this, mut cx| async move { - let diff = this - .read_with(&cx, |this, cx| this.diff(new_text, cx)) - .await; - - this.update(&mut cx, |this, cx| { - if this.apply_diff(diff, cx) { - this.saved_version = this.version.clone(); - } - }); - }) - } - fn apply_diff(&mut self, diff: Diff, cx: &mut ModelContext) -> bool { if self.version == diff.base_version { self.start_transaction(None).unwrap(); diff --git a/crates/project/src/worktree.rs b/crates/project/src/worktree.rs index 6912f66120c57fbd503d8af59fbf2b6bdf7d2ba5..72bb9c405b1940c1c539cf69ea7ecb1644925fed 100644 --- a/crates/project/src/worktree.rs +++ b/crates/project/src/worktree.rs @@ -594,20 +594,12 @@ impl Worktree { let buffer_is_clean = !buffer.is_dirty(); if let Some(file) = buffer.file_mut() { - let mut file_changed = false; - if let Some(entry) = file .entry_id() .and_then(|entry_id| self.entry_for_id(entry_id)) { - if entry.path != *file.path() { - file.set_path(entry.path.clone()); - file_changed = true; - } - if entry.mtime != file.mtime() { file.set_mtime(entry.mtime); - file_changed = true; if let Some(worktree) = self.as_local() { if buffer_is_clean { let abs_path = worktree.absolutize(file.path().as_ref()); @@ -615,6 +607,11 @@ impl Worktree { } } } + + if entry.path != *file.path() { + file.set_path(entry.path.clone()); + buffer.file_renamed(cx); + } } else if let Some(entry) = self.entry_for_path(file.path().as_ref()) { file.set_entry_id(Some(entry.id)); file.set_mtime(entry.mtime); @@ -624,17 +621,9 @@ impl Worktree { refresh_buffer(abs_path, &worktree.fs, cx); } } - file_changed = true; } else if !file.is_deleted() { - if buffer_is_clean { - cx.emit(language::Event::Dirtied); - } file.set_entry_id(None); - file_changed = true; - } - - if file_changed { - cx.emit(language::Event::FileHandleChanged); + buffer.file_deleted(cx); } } }); @@ -1186,15 +1175,14 @@ fn build_gitignore(abs_path: &Path, fs: &dyn Fs) -> Result { pub fn refresh_buffer(abs_path: PathBuf, fs: &Arc, cx: &mut ModelContext) { let fs = fs.clone(); cx.spawn(|buffer, mut cx| async move { - let new_text = fs.load(&abs_path).await; - match new_text { + match fs.load(&abs_path).await { Err(error) => log::error!("error refreshing buffer after file changed: {}", error), Ok(new_text) => { - buffer - .update(&mut cx, |buffer, cx| { - buffer.set_text_from_disk(new_text.into(), cx) - }) - .await; + if let Some(task) = + buffer.update(&mut cx, |buffer, cx| buffer.file_updated(new_text, cx)) + { + task.await; + } } } }) From 282195b13e93e7fe72380fb8722dcec0db0b6d76 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Thu, 21 Oct 2021 13:08:11 +0200 Subject: [PATCH 7/7] Assign new file handles on buffers when their files change on disk Co-Authored-By: Antonio Scandurra Co-Authored-By: Nathan Sobo --- Cargo.lock | 2 + crates/buffer/src/lib.rs | 10 ++-- crates/language/Cargo.toml | 4 +- crates/language/src/lib.rs | 89 +++++++++++++++++++------------- crates/project/Cargo.toml | 3 +- crates/project/src/worktree.rs | 93 +++++++++++++--------------------- 6 files changed, 101 insertions(+), 100 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 29ae3f3cfc068f9ffa3f636ba9dac7e586357f37..5be439f9254678ac15919dac3faa3b98281c1c59 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2815,6 +2815,7 @@ dependencies = [ "anyhow", "buffer", "clock", + "futures", "gpui", "lazy_static", "log", @@ -2828,6 +2829,7 @@ dependencies = [ "tree-sitter", "tree-sitter-rust", "unindent", + "util", ] [[package]] diff --git a/crates/buffer/src/lib.rs b/crates/buffer/src/lib.rs index c2b1e8f6ca75dfba22065d5e2fad85d4bcae0ffe..a5771ad4c0f55508bad6a93d73b674230a627e29 100644 --- a/crates/buffer/src/lib.rs +++ b/crates/buffer/src/lib.rs @@ -17,12 +17,10 @@ pub use point::*; pub use random_char_iter::*; pub use rope::{Chunks, Rope, TextSummary}; use rpc::proto; -use seahash::SeaHasher; pub use selection::*; use std::{ cmp, convert::{TryFrom, TryInto}, - hash::BuildHasher, iter::Iterator, ops::Range, str, @@ -32,14 +30,16 @@ use std::{ pub use sum_tree::Bias; use sum_tree::{FilterCursor, SumTree}; +#[cfg(any(test, feature = "test-support"))] #[derive(Clone, Default)] struct DeterministicState; -impl BuildHasher for DeterministicState { - type Hasher = SeaHasher; +#[cfg(any(test, feature = "test-support"))] +impl std::hash::BuildHasher for DeterministicState { + type Hasher = seahash::SeaHasher; fn build_hasher(&self) -> Self::Hasher { - SeaHasher::new() + seahash::SeaHasher::new() } } diff --git a/crates/language/Cargo.toml b/crates/language/Cargo.toml index 236c3422289aab7bf5f351f745d3bc6daa6c6185..3cbfb3ae1253074092f5de0066822f3d5cd050c2 100644 --- a/crates/language/Cargo.toml +++ b/crates/language/Cargo.toml @@ -4,7 +4,7 @@ version = "0.1.0" edition = "2018" [features] -test-support = ["rand"] +test-support = ["rand", "buffer/test-support"] [dependencies] buffer = { path = "../buffer" } @@ -12,7 +12,9 @@ clock = { path = "../clock" } gpui = { path = "../gpui" } rpc = { path = "../rpc" } theme = { path = "../theme" } +util = { path = "../util" } anyhow = "1.0.38" +futures = "0.3" lazy_static = "1.4" log = "0.4" parking_lot = "0.11.1" diff --git a/crates/language/src/lib.rs b/crates/language/src/lib.rs index 090ea8d9c8c7840e53815f0a991b93d28e990a39..b80eed7e331d95b01592c8ae32459d9c7e14da5b 100644 --- a/crates/language/src/lib.rs +++ b/crates/language/src/lib.rs @@ -10,6 +10,7 @@ pub use self::{ use anyhow::{anyhow, Result}; pub use buffer::{Buffer as TextBuffer, *}; use clock::ReplicaId; +use futures::FutureExt as _; use gpui::{AppContext, Entity, ModelContext, MutableAppContext, Task}; use lazy_static::lazy_static; use parking_lot::Mutex; @@ -31,6 +32,7 @@ use std::{ time::{Duration, Instant, SystemTime, UNIX_EPOCH}, }; use tree_sitter::{InputEdit, Parser, QueryCursor, Tree}; +use util::TryFutureExt as _; thread_local! { static PARSER: RefCell = RefCell::new(Parser::new()); @@ -83,16 +85,10 @@ pub trait File { fn entry_id(&self) -> Option; - fn set_entry_id(&mut self, entry_id: Option); - fn mtime(&self) -> SystemTime; - fn set_mtime(&mut self, mtime: SystemTime); - fn path(&self) -> &Arc; - fn set_path(&mut self, path: Arc); - fn full_path(&self, cx: &AppContext) -> PathBuf; /// Returns the last component of this handle's absolute path. If this handle refers to the root @@ -109,6 +105,8 @@ pub trait File { cx: &mut MutableAppContext, ) -> Task>; + fn load_local(&self, cx: &AppContext) -> Option>>; + fn buffer_updated(&self, buffer_id: u64, operation: Operation, cx: &mut MutableAppContext); fn buffer_removed(&self, buffer_id: u64, cx: &mut MutableAppContext); @@ -256,10 +254,6 @@ impl Buffer { self.file.as_deref() } - pub fn file_mut(&mut self) -> Option<&mut dyn File> { - self.file.as_mut().map(|f| f.deref_mut() as &mut dyn File) - } - pub fn save( &mut self, cx: &mut ModelContext, @@ -300,41 +294,64 @@ impl Buffer { cx.emit(Event::Saved); } - pub fn file_renamed(&self, cx: &mut ModelContext) { - cx.emit(Event::FileHandleChanged); - } - pub fn file_updated( &mut self, - new_text: String, + new_file: Box, cx: &mut ModelContext, ) -> Option> { - if let Some(file) = self.file.as_ref() { - cx.emit(Event::FileHandleChanged); - let mtime = file.mtime(); - if self.version == self.saved_version { - return Some(cx.spawn(|this, mut cx| async move { - let diff = this - .read_with(&cx, |this, cx| this.diff(new_text.into(), cx)) - .await; - this.update(&mut cx, |this, cx| { - if this.apply_diff(diff, cx) { - this.saved_version = this.version.clone(); - this.saved_mtime = mtime; - cx.emit(Event::Reloaded); + let old_file = self.file.as_ref()?; + let mut file_changed = false; + let mut task = None; + + if new_file.path() != old_file.path() { + file_changed = true; + } + + if new_file.is_deleted() { + if !old_file.is_deleted() { + file_changed = true; + if !self.is_dirty() { + cx.emit(Event::Dirtied); + } + } + } else { + let new_mtime = new_file.mtime(); + if new_mtime != old_file.mtime() { + file_changed = true; + + if !self.is_dirty() { + task = Some(cx.spawn(|this, mut cx| { + async move { + let new_text = this.read_with(&cx, |this, cx| { + this.file.as_ref().and_then(|file| file.load_local(cx)) + }); + if let Some(new_text) = new_text { + let new_text = new_text.await?; + let diff = this + .read_with(&cx, |this, cx| this.diff(new_text.into(), cx)) + .await; + this.update(&mut cx, |this, cx| { + if this.apply_diff(diff, cx) { + this.saved_version = this.version.clone(); + this.saved_mtime = new_mtime; + cx.emit(Event::Reloaded); + } + }); + } + Ok(()) } - }); - })); + .log_err() + .map(drop) + })); + } } } - None - } - pub fn file_deleted(&mut self, cx: &mut ModelContext) { - if self.version == self.saved_version { - cx.emit(Event::Dirtied); + if file_changed { + cx.emit(Event::FileHandleChanged); } - cx.emit(Event::FileHandleChanged); + self.file = Some(new_file); + task } pub fn close(&mut self, cx: &mut ModelContext) { diff --git a/crates/project/Cargo.toml b/crates/project/Cargo.toml index fb63a4c21c61249ee99b99a1d80fec03a88ad6b4..158f521f291c8af7dc75c81ad61b7ae9e85b16fc 100644 --- a/crates/project/Cargo.toml +++ b/crates/project/Cargo.toml @@ -4,7 +4,7 @@ version = "0.1.0" edition = "2018" [features] -test-support = [] +test-support = ["language/test-support", "buffer/test-support"] [dependencies] buffer = { path = "../buffer" } @@ -34,6 +34,7 @@ toml = "0.5" [dev-dependencies] client = { path = "../client", features = ["test-support"] } +gpui = { path = "../gpui", features = ["test-support"] } language = { path = "../language", features = ["test-support"] } util = { path = "../util", features = ["test-support"] } rpc = { path = "../rpc", features = ["test-support"] } diff --git a/crates/project/src/worktree.rs b/crates/project/src/worktree.rs index 72bb9c405b1940c1c539cf69ea7ecb1644925fed..0ef60a7408556b9b7f31798fbcb3c66b5ed142cb 100644 --- a/crates/project/src/worktree.rs +++ b/crates/project/src/worktree.rs @@ -587,43 +587,40 @@ impl Worktree { } }; + let worktree_handle = cx.handle(); let mut buffers_to_delete = Vec::new(); for (buffer_id, buffer) in open_buffers { if let Some(buffer) = buffer.upgrade(cx) { buffer.update(cx, |buffer, cx| { - let buffer_is_clean = !buffer.is_dirty(); - - if let Some(file) = buffer.file_mut() { - if let Some(entry) = file + if let Some(old_file) = buffer.file() { + let new_file = if let Some(entry) = old_file .entry_id() .and_then(|entry_id| self.entry_for_id(entry_id)) { - if entry.mtime != file.mtime() { - file.set_mtime(entry.mtime); - if let Some(worktree) = self.as_local() { - if buffer_is_clean { - let abs_path = worktree.absolutize(file.path().as_ref()); - refresh_buffer(abs_path, &worktree.fs, cx); - } - } + File { + entry_id: Some(entry.id), + mtime: entry.mtime, + path: entry.path.clone(), + worktree: worktree_handle.clone(), } - - if entry.path != *file.path() { - file.set_path(entry.path.clone()); - buffer.file_renamed(cx); + } else if let Some(entry) = self.entry_for_path(old_file.path().as_ref()) { + File { + entry_id: Some(entry.id), + mtime: entry.mtime, + path: entry.path.clone(), + worktree: worktree_handle.clone(), } - } else if let Some(entry) = self.entry_for_path(file.path().as_ref()) { - file.set_entry_id(Some(entry.id)); - file.set_mtime(entry.mtime); - if let Some(worktree) = self.as_local() { - if buffer_is_clean { - let abs_path = worktree.absolutize(file.path().as_ref()); - refresh_buffer(abs_path, &worktree.fs, cx); - } + } else { + File { + entry_id: None, + path: old_file.path().clone(), + mtime: old_file.mtime(), + worktree: worktree_handle.clone(), } - } else if !file.is_deleted() { - file.set_entry_id(None); - buffer.file_deleted(cx); + }; + + if let Some(task) = buffer.file_updated(Box::new(new_file), cx) { + task.detach(); } } }); @@ -1172,23 +1169,6 @@ fn build_gitignore(abs_path: &Path, fs: &dyn Fs) -> Result { Ok(builder.build()?) } -pub fn refresh_buffer(abs_path: PathBuf, fs: &Arc, cx: &mut ModelContext) { - let fs = fs.clone(); - cx.spawn(|buffer, mut cx| async move { - match fs.load(&abs_path).await { - Err(error) => log::error!("error refreshing buffer after file changed: {}", error), - Ok(new_text) => { - if let Some(task) = - buffer.update(&mut cx, |buffer, cx| buffer.file_updated(new_text, cx)) - { - task.await; - } - } - } - }) - .detach() -} - impl Deref for LocalWorktree { type Target = Snapshot; @@ -1787,26 +1767,14 @@ impl language::File for File { self.entry_id } - fn set_entry_id(&mut self, entry_id: Option) { - self.entry_id = entry_id; - } - fn mtime(&self) -> SystemTime { self.mtime } - fn set_mtime(&mut self, mtime: SystemTime) { - self.mtime = mtime; - } - fn path(&self) -> &Arc { &self.path } - fn set_path(&mut self, path: Arc) { - self.path = path; - } - fn full_path(&self, cx: &AppContext) -> PathBuf { let worktree = self.worktree.read(cx); let mut full_path = PathBuf::new(); @@ -1875,6 +1843,16 @@ impl language::File for File { }) } + fn load_local(&self, cx: &AppContext) -> Option>> { + let worktree = self.worktree.read(cx).as_local()?; + let abs_path = worktree.absolutize(&self.path); + let fs = worktree.fs.clone(); + Some( + cx.background() + .spawn(async move { fs.load(&abs_path).await }), + ) + } + fn buffer_updated(&self, buffer_id: u64, operation: Operation, cx: &mut MutableAppContext) { self.worktree.update(cx, |worktree, cx| { if let Some((rpc, remote_id)) = match worktree { @@ -3430,12 +3408,13 @@ mod tests { buffer.update(&mut cx, |buffer, cx| { buffer.edit(vec![0..0], " ", cx); assert!(buffer.is_dirty()); + assert!(!buffer.has_conflict()); }); // Change the file on disk again, adding blank lines to the beginning. fs::write(&abs_path, "\n\n\nAAAA\naaa\nBB\nbbbbb\n").unwrap(); - // Becaues the buffer is modified, it doesn't reload from disk, but is + // Because the buffer is modified, it doesn't reload from disk, but is // marked as having a conflict. buffer .condition(&cx, |buffer, _| buffer.has_conflict())