diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index f3aa166a691b8a29ec8d174b1c9503afbaafc6a4..40576d90c54cd80e637c536ee990496e3fc1c396 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -140,7 +140,6 @@ use mouse_context_menu::MouseContextMenu; use movement::TextLayoutDetails; use multi_buffer::{ ExcerptInfo, ExpandExcerptDirection, MultiBufferDiffHunk, MultiBufferPoint, MultiBufferRow, - ToOffsetUtf16, }; use parking_lot::Mutex; use persistence::DB; diff --git a/crates/editor/src/items.rs b/crates/editor/src/items.rs index 28a416925672a937a163e85fcaa59066529481b1..6dab57db52700bc499376abb0ab80e9cdb45e5e9 100644 --- a/crates/editor/src/items.rs +++ b/crates/editor/src/items.rs @@ -938,8 +938,9 @@ impl Item for Editor { fn breadcrumbs(&self, variant: &Theme, cx: &App) -> Option> { let cursor = self.selections.newest_anchor().head(); let multibuffer = &self.buffer().read(cx); - let (buffer_id, symbols) = - multibuffer.symbols_containing(cursor, Some(variant.syntax()), cx)?; + let (buffer_id, symbols) = multibuffer + .read(cx) + .symbols_containing(cursor, Some(variant.syntax()))?; let buffer = multibuffer.buffer(buffer_id)?; let buffer = buffer.read(cx); diff --git a/crates/editor/src/test/editor_lsp_test_context.rs b/crates/editor/src/test/editor_lsp_test_context.rs index 5a850bf4cff924b85ea5599c3d75c2b602b4dd1d..3132e2e6d5976754d0bdb7fea312fa152d4c35ac 100644 --- a/crates/editor/src/test/editor_lsp_test_context.rs +++ b/crates/editor/src/test/editor_lsp_test_context.rs @@ -19,7 +19,6 @@ use language::{ point_to_lsp, }; use lsp::{notification, request}; -use multi_buffer::ToPointUtf16; use project::Project; use smol::stream::StreamExt; use workspace::{AppState, Workspace, WorkspaceHandle}; diff --git a/crates/language/src/buffer.rs b/crates/language/src/buffer.rs index 973add14f33a3a9554df4a20c55aff3eb3453683..6e4007fdae2ad4af4c6ab56b82bff78c196b2d73 100644 --- a/crates/language/src/buffer.rs +++ b/crates/language/src/buffer.rs @@ -2078,12 +2078,15 @@ impl Buffer { } } + /// Set the change bit for all "listeners". fn was_changed(&mut self) { self.change_bits.retain(|change_bit| { - change_bit.upgrade().is_some_and(|bit| { - bit.replace(true); - true - }) + change_bit + .upgrade() + .inspect(|bit| { + _ = bit.replace(true); + }) + .is_some() }); } diff --git a/crates/multi_buffer/src/anchor.rs b/crates/multi_buffer/src/anchor.rs index a2498cb02fb836c6a70af9407d2a4e520c9d3d3b..d5009172084d6d683f722a8ad2aa5b8b21ae0493 100644 --- a/crates/multi_buffer/src/anchor.rs +++ b/crates/multi_buffer/src/anchor.rs @@ -1,4 +1,4 @@ -use super::{ExcerptId, MultiBufferSnapshot, ToOffset, ToOffsetUtf16, ToPoint}; +use super::{ExcerptId, MultiBufferSnapshot, ToOffset, ToPoint}; use language::{OffsetUtf16, Point, TextDimension}; use std::{ cmp::Ordering, @@ -185,9 +185,6 @@ impl ToOffset for Anchor { fn to_offset(&self, snapshot: &MultiBufferSnapshot) -> usize { self.summary(snapshot) } -} - -impl ToOffsetUtf16 for Anchor { fn to_offset_utf16(&self, snapshot: &MultiBufferSnapshot) -> OffsetUtf16 { self.summary(snapshot) } @@ -197,6 +194,9 @@ impl ToPoint for Anchor { fn to_point<'a>(&self, snapshot: &MultiBufferSnapshot) -> Point { self.summary(snapshot) } + fn to_point_utf16(&self, snapshot: &MultiBufferSnapshot) -> rope::PointUtf16 { + self.summary(snapshot) + } } pub trait AnchorRangeExt { diff --git a/crates/multi_buffer/src/multi_buffer.rs b/crates/multi_buffer/src/multi_buffer.rs index 646d7fce825c05204c07f42619d5f9964d5cd321..0163a49c95eeea5372a61824d2754a233ec07740 100644 --- a/crates/multi_buffer/src/multi_buffer.rs +++ b/crates/multi_buffer/src/multi_buffer.rs @@ -1,7 +1,11 @@ mod anchor; #[cfg(test)] mod multi_buffer_tests; +mod path_key; mod position; +mod transaction; + +use self::transaction::History; pub use anchor::{Anchor, AnchorRangeExt, Offset}; pub use position::{TypedOffset, TypedPoint, TypedRow}; @@ -13,7 +17,7 @@ use buffer_diff::{ }; use clock::ReplicaId; use collections::{BTreeMap, Bound, HashMap, HashSet}; -use gpui::{App, AppContext as _, Context, Entity, EntityId, EventEmitter, Task}; +use gpui::{App, Context, Entity, EntityId, EventEmitter}; use itertools::Itertools; use language::{ AutoindentMode, Buffer, BufferChunks, BufferRow, BufferSnapshot, Capability, CharClassifier, @@ -24,6 +28,9 @@ use language::{ language_settings::{LanguageSettings, language_settings}, }; +#[cfg(any(test, feature = "test-support"))] +use gpui::AppContext as _; + use rope::DimensionPair; use smallvec::SmallVec; use smol::future::yield_now; @@ -40,7 +47,7 @@ use std::{ rc::Rc, str, sync::Arc, - time::{Duration, Instant}, + time::Duration, }; use sum_tree::{Bias, Cursor, Dimension, Dimensions, SumTree, Summary, TreeMap}; use text::{ @@ -49,9 +56,9 @@ use text::{ subscription::{Subscription, Topic}, }; use theme::SyntaxTheme; -use util::{post_inc, rel_path::RelPath}; +use util::post_inc; -const NEWLINES: &[u8] = &[b'\n'; rope::Chunk::MASK_BITS]; +pub use self::path_key::PathKey; #[derive(Debug, Default, Clone, Copy, Hash, PartialEq, Eq, PartialOrd, Ord)] pub struct ExcerptId(u32); @@ -163,35 +170,6 @@ impl MultiBufferDiffHunk { } } -#[derive(PartialEq, Eq, Ord, PartialOrd, Clone, Hash, Debug)] -pub struct PathKey { - // Used by the derived PartialOrd & Ord - pub sort_prefix: Option, - pub path: Arc, -} - -impl PathKey { - pub fn with_sort_prefix(sort_prefix: u64, path: Arc) -> Self { - Self { - sort_prefix: Some(sort_prefix), - path, - } - } - - pub fn for_buffer(buffer: &Entity, cx: &App) -> Self { - if let Some(file) = buffer.read(cx).file() { - Self::with_sort_prefix(file.worktree_id(cx).to_proto(), file.path().clone()) - } else { - Self { - sort_prefix: None, - path: RelPath::unix(&buffer.entity_id().to_string()) - .unwrap() - .into_arc(), - } - } - } -} - pub type MultiBufferPoint = Point; type ExcerptOffset = TypedOffset; type ExcerptPoint = TypedPoint; @@ -213,37 +191,13 @@ impl std::ops::Add for MultiBufferRow { } } -#[derive(Clone)] -struct History { - next_transaction_id: TransactionId, - undo_stack: Vec, - redo_stack: Vec, - transaction_depth: usize, - group_interval: Duration, -} - -#[derive(Clone)] -struct Transaction { - id: TransactionId, - buffer_transactions: HashMap, - first_edit_at: Instant, - last_edit_at: Instant, - suppress_grouping: bool, -} - pub trait ToOffset: 'static + fmt::Debug { fn to_offset(&self, snapshot: &MultiBufferSnapshot) -> usize; -} - -pub trait ToOffsetUtf16: 'static + fmt::Debug { fn to_offset_utf16(&self, snapshot: &MultiBufferSnapshot) -> OffsetUtf16; } pub trait ToPoint: 'static + fmt::Debug { fn to_point(&self, snapshot: &MultiBufferSnapshot) -> Point; -} - -pub trait ToPointUtf16: 'static + fmt::Debug { fn to_point_utf16(&self, snapshot: &MultiBufferSnapshot) -> PointUtf16; } @@ -281,24 +235,20 @@ impl DiffState { /// The contents of a [`MultiBuffer`] at a single point in time. #[derive(Clone, Default)] pub struct MultiBufferSnapshot { - singleton: bool, - /* mut */ excerpts: SumTree, - /* mut */ - excerpt_ids: SumTree, diffs: TreeMap, diff_transforms: SumTree, - /* mut */ - replaced_excerpts: TreeMap, - /* mut */ - trailing_excerpt_update_count: usize, - all_diff_hunks_expanded: bool, non_text_state_update_count: usize, edit_count: usize, - /* mut */ is_dirty: bool, has_deleted_file: bool, has_conflict: bool, + /// immutable fields + singleton: bool, + excerpt_ids: SumTree, + replaced_excerpts: TreeMap, + trailing_excerpt_update_count: usize, + all_diff_hunks_expanded: bool, show_headers: bool, } @@ -555,7 +505,7 @@ struct MultiBufferRegion<'a, D: TextDimension> { struct ExcerptChunks<'a> { excerpt_id: ExcerptId, content_chunks: BufferChunks<'a>, - footer_height: usize, + has_footer: bool, } #[derive(Debug)] @@ -660,13 +610,7 @@ impl MultiBuffer { excerpts_by_path: Default::default(), paths_by_excerpt: Default::default(), buffer_changed_since_sync: Default::default(), - history: History { - next_transaction_id: clock::Lamport::MIN, - undo_stack: Vec::new(), - redo_stack: Vec::new(), - transaction_depth: 0, - group_interval: Duration::from_millis(300), - }, + history: History::default(), } } @@ -712,6 +656,10 @@ impl MultiBuffer { } } + pub fn set_group_interval(&mut self, group_interval: Duration) { + self.history.set_group_interval(group_interval); + } + pub fn with_title(mut self, title: String) -> Self { self.title = Some(title); self @@ -770,17 +718,8 @@ impl MultiBuffer { self.buffers.is_empty() } - pub fn symbols_containing( - &self, - offset: T, - theme: Option<&SyntaxTheme>, - cx: &App, - ) -> Option<(BufferId, Vec>)> { - self.read(cx).symbols_containing(offset, theme) - } - pub fn edit( - &self, + &mut self, edits: I, autoindent_mode: Option, cx: &mut Context, @@ -789,11 +728,15 @@ impl MultiBuffer { S: ToOffset, T: Into>, { - let snapshot = self.read(cx); + if self.read_only() || self.buffers.is_empty() { + return; + } + self.sync_mut(cx); let edits = edits .into_iter() .map(|(range, new_text)| { - let mut range = range.start.to_offset(&snapshot)..range.end.to_offset(&snapshot); + let mut range = range.start.to_offset(self.snapshot.get_mut()) + ..range.end.to_offset(self.snapshot.get_mut()); if range.start > range.end { mem::swap(&mut range.start, &mut range.end); } @@ -801,20 +744,15 @@ impl MultiBuffer { }) .collect::>(); - return edit_internal(self, snapshot, edits, autoindent_mode, cx); + return edit_internal(self, edits, autoindent_mode, cx); // Non-generic part of edit, hoisted out to avoid blowing up LLVM IR. fn edit_internal( - this: &MultiBuffer, - snapshot: Ref, + this: &mut MultiBuffer, edits: Vec<(Range, Arc)>, mut autoindent_mode: Option, cx: &mut Context, ) { - if this.read_only() || this.buffers.is_empty() { - return; - } - let original_indent_columns = match &mut autoindent_mode { Some(AutoindentMode::Block { original_indent_columns, @@ -822,9 +760,11 @@ impl MultiBuffer { _ => Default::default(), }; - let (buffer_edits, edited_excerpt_ids) = - this.convert_edits_to_buffer_edits(edits, &snapshot, &original_indent_columns); - drop(snapshot); + let (buffer_edits, edited_excerpt_ids) = MultiBuffer::convert_edits_to_buffer_edits( + edits, + this.snapshot.get_mut(), + &original_indent_columns, + ); let mut buffer_ids = Vec::with_capacity(buffer_edits.len()); for (buffer_id, mut edits) in buffer_edits { @@ -908,7 +848,6 @@ impl MultiBuffer { } fn convert_edits_to_buffer_edits( - &self, edits: Vec<(Range, Arc)>, snapshot: &MultiBufferSnapshot, original_indent_columns: &[Option], @@ -1028,17 +967,21 @@ impl MultiBuffer { (buffer_edits, edited_excerpt_ids) } - pub fn autoindent_ranges(&self, ranges: I, cx: &mut Context) + pub fn autoindent_ranges(&mut self, ranges: I, cx: &mut Context) where I: IntoIterator>, S: ToOffset, { - let snapshot = self.read(cx); + if self.read_only() || self.buffers.is_empty() { + return; + } + self.sync_mut(cx); let empty = Arc::::from(""); let edits = ranges .into_iter() .map(|range| { - let mut range = range.start.to_offset(&snapshot)..range.end.to_offset(&snapshot); + let mut range = range.start.to_offset(self.snapshot.get_mut()) + ..range.end.to_offset(&self.snapshot.get_mut()); if range.start > range.end { mem::swap(&mut range.start, &mut range.end); } @@ -1046,21 +989,15 @@ impl MultiBuffer { }) .collect::>(); - return autoindent_ranges_internal(self, snapshot, edits, cx); + return autoindent_ranges_internal(self, edits, cx); fn autoindent_ranges_internal( - this: &MultiBuffer, - snapshot: Ref, + this: &mut MultiBuffer, edits: Vec<(Range, Arc)>, cx: &mut Context, ) { - if this.read_only() || this.buffers.is_empty() { - return; - } - let (buffer_edits, edited_excerpt_ids) = - this.convert_edits_to_buffer_edits(edits, &snapshot, &[]); - drop(snapshot); + MultiBuffer::convert_edits_to_buffer_edits(edits, this.snapshot.get_mut(), &[]); let mut buffer_ids = Vec::new(); for (buffer_id, mut edits) in buffer_edits { @@ -1090,9 +1027,9 @@ impl MultiBuffer { } } - // Inserts newlines at the given position to create an empty line, returning the start of the new line. - // You can also request the insertion of empty lines above and below the line starting at the returned point. - // Panics if the given position is invalid. + /// Inserts newlines at the given position to create an empty line, returning the start of the new line. + /// You can also request the insertion of empty lines above and below the line starting at the returned point. + /// Panics if the given position is invalid. pub fn insert_empty_line( &mut self, position: impl ToPoint, @@ -1110,186 +1047,6 @@ impl MultiBuffer { multibuffer_point + (empty_line_start - buffer_point) } - pub fn start_transaction(&mut self, cx: &mut Context) -> Option { - self.start_transaction_at(Instant::now(), cx) - } - - pub fn start_transaction_at( - &mut self, - now: Instant, - cx: &mut Context, - ) -> Option { - if let Some(buffer) = self.as_singleton() { - return buffer.update(cx, |buffer, _| buffer.start_transaction_at(now)); - } - - for BufferState { buffer, .. } in self.buffers.values() { - buffer.update(cx, |buffer, _| buffer.start_transaction_at(now)); - } - self.history.start_transaction(now) - } - - pub fn last_transaction_id(&self, cx: &App) -> Option { - if let Some(buffer) = self.as_singleton() { - buffer - .read(cx) - .peek_undo_stack() - .map(|history_entry| history_entry.transaction_id()) - } else { - let last_transaction = self.history.undo_stack.last()?; - Some(last_transaction.id) - } - } - - pub fn end_transaction(&mut self, cx: &mut Context) -> Option { - self.end_transaction_at(Instant::now(), cx) - } - - pub fn end_transaction_at( - &mut self, - now: Instant, - cx: &mut Context, - ) -> Option { - if let Some(buffer) = self.as_singleton() { - return buffer.update(cx, |buffer, cx| buffer.end_transaction_at(now, cx)); - } - - let mut buffer_transactions = HashMap::default(); - for BufferState { buffer, .. } in self.buffers.values() { - if let Some(transaction_id) = - buffer.update(cx, |buffer, cx| buffer.end_transaction_at(now, cx)) - { - buffer_transactions.insert(buffer.read(cx).remote_id(), transaction_id); - } - } - - if self.history.end_transaction(now, buffer_transactions) { - let transaction_id = self.history.group().unwrap(); - Some(transaction_id) - } else { - None - } - } - - pub fn edited_ranges_for_transaction( - &self, - transaction_id: TransactionId, - cx: &App, - ) -> Vec> - where - D: TextDimension + Ord + Sub, - { - let Some(transaction) = self.history.transaction(transaction_id) else { - return Vec::new(); - }; - - let mut ranges = Vec::new(); - let snapshot = self.read(cx); - let mut cursor = snapshot.excerpts.cursor::(()); - - for (buffer_id, buffer_transaction) in &transaction.buffer_transactions { - let Some(buffer_state) = self.buffers.get(buffer_id) else { - continue; - }; - - let buffer = buffer_state.buffer.read(cx); - for range in buffer.edited_ranges_for_transaction_id::(*buffer_transaction) { - for excerpt_id in &buffer_state.excerpts { - cursor.seek(excerpt_id, Bias::Left); - if let Some(excerpt) = cursor.item() - && excerpt.locator == *excerpt_id - { - let excerpt_buffer_start = excerpt.range.context.start.summary::(buffer); - let excerpt_buffer_end = excerpt.range.context.end.summary::(buffer); - let excerpt_range = excerpt_buffer_start..excerpt_buffer_end; - if excerpt_range.contains(&range.start) - && excerpt_range.contains(&range.end) - { - let excerpt_start = D::from_text_summary(&cursor.start().text); - - let mut start = excerpt_start; - start.add_assign(&(range.start - excerpt_buffer_start)); - let mut end = excerpt_start; - end.add_assign(&(range.end - excerpt_buffer_start)); - - ranges.push(start..end); - break; - } - } - } - } - } - - ranges.sort_by_key(|range| range.start); - ranges - } - - pub fn merge_transactions( - &mut self, - transaction: TransactionId, - destination: TransactionId, - cx: &mut Context, - ) { - if let Some(buffer) = self.as_singleton() { - buffer.update(cx, |buffer, _| { - buffer.merge_transactions(transaction, destination) - }); - } else if let Some(transaction) = self.history.forget(transaction) - && let Some(destination) = self.history.transaction_mut(destination) - { - for (buffer_id, buffer_transaction_id) in transaction.buffer_transactions { - if let Some(destination_buffer_transaction_id) = - destination.buffer_transactions.get(&buffer_id) - { - if let Some(state) = self.buffers.get(&buffer_id) { - state.buffer.update(cx, |buffer, _| { - buffer.merge_transactions( - buffer_transaction_id, - *destination_buffer_transaction_id, - ) - }); - } - } else { - destination - .buffer_transactions - .insert(buffer_id, buffer_transaction_id); - } - } - } - } - - pub fn finalize_last_transaction(&mut self, cx: &mut Context) { - self.history.finalize_last_transaction(); - for BufferState { buffer, .. } in self.buffers.values() { - buffer.update(cx, |buffer, _| { - buffer.finalize_last_transaction(); - }); - } - } - - pub fn push_transaction<'a, T>(&mut self, buffer_transactions: T, cx: &Context) - where - T: IntoIterator, &'a language::Transaction)>, - { - self.history - .push_transaction(buffer_transactions, Instant::now(), cx); - self.history.finalize_last_transaction(); - } - - pub fn group_until_transaction( - &mut self, - transaction_id: TransactionId, - cx: &mut Context, - ) { - if let Some(buffer) = self.as_singleton() { - buffer.update(cx, |buffer, _| { - buffer.group_until_transaction(transaction_id) - }); - } else { - self.history.group_until(transaction_id); - } - } - pub fn set_active_selections( &self, selections: &[Selection], @@ -1357,325 +1114,30 @@ impl MultiBuffer { } } Some(selection) - })); - buffer.set_active_selections(merged_selections, line_mode, cursor_shape, cx); - }); - } - } - - pub fn remove_active_selections(&self, cx: &mut Context) { - for buffer in self.buffers.values() { - buffer - .buffer - .update(cx, |buffer, cx| buffer.remove_active_selections(cx)); - } - } - - pub fn undo(&mut self, cx: &mut Context) -> Option { - let mut transaction_id = None; - if let Some(buffer) = self.as_singleton() { - transaction_id = buffer.update(cx, |buffer, cx| buffer.undo(cx)); - } else { - while let Some(transaction) = self.history.pop_undo() { - let mut undone = false; - for (buffer_id, buffer_transaction_id) in &mut transaction.buffer_transactions { - if let Some(BufferState { buffer, .. }) = self.buffers.get(buffer_id) { - undone |= buffer.update(cx, |buffer, cx| { - let undo_to = *buffer_transaction_id; - if let Some(entry) = buffer.peek_undo_stack() { - *buffer_transaction_id = entry.transaction_id(); - } - buffer.undo_to_transaction(undo_to, cx) - }); - } - } - - if undone { - transaction_id = Some(transaction.id); - break; - } - } - } - - if let Some(transaction_id) = transaction_id { - cx.emit(Event::TransactionUndone { transaction_id }); - } - - transaction_id - } - - pub fn redo(&mut self, cx: &mut Context) -> Option { - if let Some(buffer) = self.as_singleton() { - return buffer.update(cx, |buffer, cx| buffer.redo(cx)); - } - - while let Some(transaction) = self.history.pop_redo() { - let mut redone = false; - for (buffer_id, buffer_transaction_id) in &mut transaction.buffer_transactions { - if let Some(BufferState { buffer, .. }) = self.buffers.get(buffer_id) { - redone |= buffer.update(cx, |buffer, cx| { - let redo_to = *buffer_transaction_id; - if let Some(entry) = buffer.peek_redo_stack() { - *buffer_transaction_id = entry.transaction_id(); - } - buffer.redo_to_transaction(redo_to, cx) - }); - } - } - - if redone { - return Some(transaction.id); - } - } - - None - } - - pub fn undo_transaction(&mut self, transaction_id: TransactionId, cx: &mut Context) { - if let Some(buffer) = self.as_singleton() { - buffer.update(cx, |buffer, cx| buffer.undo_transaction(transaction_id, cx)); - } else if let Some(transaction) = self.history.remove_from_undo(transaction_id) { - for (buffer_id, transaction_id) in &transaction.buffer_transactions { - if let Some(BufferState { buffer, .. }) = self.buffers.get(buffer_id) { - buffer.update(cx, |buffer, cx| { - buffer.undo_transaction(*transaction_id, cx) - }); - } - } - } - } - - pub fn forget_transaction(&mut self, transaction_id: TransactionId, cx: &mut Context) { - if let Some(buffer) = self.as_singleton() { - buffer.update(cx, |buffer, _| { - buffer.forget_transaction(transaction_id); - }); - } else if let Some(transaction) = self.history.forget(transaction_id) { - for (buffer_id, buffer_transaction_id) in transaction.buffer_transactions { - if let Some(state) = self.buffers.get_mut(&buffer_id) { - state.buffer.update(cx, |buffer, _| { - buffer.forget_transaction(buffer_transaction_id); - }); - } - } - } - } - - pub fn push_excerpts( - &mut self, - buffer: Entity, - ranges: impl IntoIterator>, - cx: &mut Context, - ) -> Vec - where - O: text::ToOffset, - { - self.insert_excerpts_after(ExcerptId::max(), buffer, ranges, cx) - } - - pub fn location_for_path(&self, path: &PathKey, cx: &App) -> Option { - let excerpt_id = self.excerpts_by_path.get(path)?.first()?; - let snapshot = self.snapshot(cx); - let excerpt = snapshot.excerpt(*excerpt_id)?; - Some(Anchor::in_buffer( - *excerpt_id, - excerpt.buffer_id, - excerpt.range.context.start, - )) - } - - pub fn excerpt_paths(&self) -> impl Iterator { - self.excerpts_by_path.keys() - } - - fn expand_excerpts_with_paths( - &mut self, - ids: impl IntoIterator, - line_count: u32, - direction: ExpandExcerptDirection, - cx: &mut Context, - ) { - let grouped = ids - .into_iter() - .chunk_by(|id| self.paths_by_excerpt.get(id).cloned()) - .into_iter() - .flat_map(|(k, v)| Some((k?, v.into_iter().collect::>()))) - .collect::>(); - let snapshot = self.snapshot(cx); - - for (path, ids) in grouped.into_iter() { - let Some(excerpt_ids) = self.excerpts_by_path.get(&path) else { - continue; - }; - - let ids_to_expand = HashSet::from_iter(ids); - let expanded_ranges = excerpt_ids.iter().filter_map(|excerpt_id| { - let excerpt = snapshot.excerpt(*excerpt_id)?; - - let mut context = excerpt.range.context.to_point(&excerpt.buffer); - if ids_to_expand.contains(excerpt_id) { - match direction { - ExpandExcerptDirection::Up => { - context.start.row = context.start.row.saturating_sub(line_count); - context.start.column = 0; - } - ExpandExcerptDirection::Down => { - context.end.row = - (context.end.row + line_count).min(excerpt.buffer.max_point().row); - context.end.column = excerpt.buffer.line_len(context.end.row); - } - ExpandExcerptDirection::UpAndDown => { - context.start.row = context.start.row.saturating_sub(line_count); - context.start.column = 0; - context.end.row = - (context.end.row + line_count).min(excerpt.buffer.max_point().row); - context.end.column = excerpt.buffer.line_len(context.end.row); - } - } - } - - Some(ExcerptRange { - context, - primary: excerpt.range.primary.to_point(&excerpt.buffer), - }) - }); - let mut merged_ranges: Vec> = Vec::new(); - for range in expanded_ranges { - if let Some(last_range) = merged_ranges.last_mut() - && last_range.context.end >= range.context.start - { - last_range.context.end = range.context.end; - continue; - } - merged_ranges.push(range) - } - let Some(excerpt_id) = excerpt_ids.first() else { - continue; - }; - let Some(buffer_id) = &snapshot.buffer_id_for_excerpt(*excerpt_id) else { - continue; - }; - - let Some(buffer) = self.buffers.get(buffer_id).map(|b| b.buffer.clone()) else { - continue; - }; - - let buffer_snapshot = buffer.read(cx).snapshot(); - self.update_path_excerpts(path.clone(), buffer, &buffer_snapshot, merged_ranges, cx); - } - } - - /// Sets excerpts, returns `true` if at least one new excerpt was added. - pub fn set_excerpts_for_path( - &mut self, - path: PathKey, - buffer: Entity, - ranges: impl IntoIterator>, - context_line_count: u32, - cx: &mut Context, - ) -> (Vec>, bool) { - let buffer_snapshot = buffer.read(cx).snapshot(); - let excerpt_ranges = build_excerpt_ranges(ranges, context_line_count, &buffer_snapshot); - - let (new, counts) = Self::merge_excerpt_ranges(&excerpt_ranges); - self.set_merged_excerpt_ranges_for_path( - path, - buffer, - excerpt_ranges, - &buffer_snapshot, - new, - counts, - cx, - ) - } - - pub fn set_excerpt_ranges_for_path( - &mut self, - path: PathKey, - buffer: Entity, - buffer_snapshot: &BufferSnapshot, - excerpt_ranges: Vec>, - cx: &mut Context, - ) -> (Vec>, bool) { - let (new, counts) = Self::merge_excerpt_ranges(&excerpt_ranges); - self.set_merged_excerpt_ranges_for_path( - path, - buffer, - excerpt_ranges, - buffer_snapshot, - new, - counts, - cx, - ) - } - - pub fn set_anchored_excerpts_for_path( - &self, - path_key: PathKey, - buffer: Entity, - ranges: Vec>, - context_line_count: u32, - cx: &mut Context, - ) -> Task>> { - let buffer_snapshot = buffer.read(cx).snapshot(); - cx.spawn(async move |multi_buffer, cx| { - let snapshot = buffer_snapshot.clone(); - let (excerpt_ranges, new, counts) = cx - .background_spawn(async move { - let ranges = ranges.into_iter().map(|range| range.to_point(&snapshot)); - let excerpt_ranges = - build_excerpt_ranges(ranges, context_line_count, &snapshot); - let (new, counts) = Self::merge_excerpt_ranges(&excerpt_ranges); - (excerpt_ranges, new, counts) - }) - .await; - - multi_buffer - .update(cx, move |multi_buffer, cx| { - let (ranges, _) = multi_buffer.set_merged_excerpt_ranges_for_path( - path_key, - buffer, - excerpt_ranges, - &buffer_snapshot, - new, - counts, - cx, - ); - ranges - }) - .ok() - .unwrap_or_default() - }) + })); + buffer.set_active_selections(merged_selections, line_mode, cursor_shape, cx); + }); + } + } + + pub fn remove_active_selections(&self, cx: &mut Context) { + for buffer in self.buffers.values() { + buffer + .buffer + .update(cx, |buffer, cx| buffer.remove_active_selections(cx)); + } } - /// Sets excerpts, returns `true` if at least one new excerpt was added. - fn set_merged_excerpt_ranges_for_path( + pub fn push_excerpts( &mut self, - path: PathKey, buffer: Entity, - ranges: Vec>, - buffer_snapshot: &BufferSnapshot, - new: Vec>, - counts: Vec, + ranges: impl IntoIterator>, cx: &mut Context, - ) -> (Vec>, bool) { - let (excerpt_ids, added_a_new_excerpt) = - self.update_path_excerpts(path, buffer, buffer_snapshot, new, cx); - - let mut result = Vec::new(); - let mut ranges = ranges.into_iter(); - for (excerpt_id, range_count) in excerpt_ids.into_iter().zip(counts.into_iter()) { - for range in ranges.by_ref().take(range_count) { - let range = Anchor::range_in_buffer( - excerpt_id, - buffer_snapshot.remote_id(), - buffer_snapshot.anchor_before(&range.primary.start) - ..buffer_snapshot.anchor_after(&range.primary.end), - ); - result.push(range) - } - } - (result, added_a_new_excerpt) + ) -> Vec + where + O: text::ToOffset, + { + self.insert_excerpts_after(ExcerptId::max(), buffer, ranges, cx) } fn merge_excerpt_ranges<'a>( @@ -1703,174 +1165,6 @@ impl MultiBuffer { (merged_ranges, counts) } - fn update_path_excerpts( - &mut self, - path: PathKey, - buffer: Entity, - buffer_snapshot: &BufferSnapshot, - new: Vec>, - cx: &mut Context, - ) -> (Vec, bool) { - let mut insert_after = self - .excerpts_by_path - .range(..path.clone()) - .next_back() - .map(|(_, value)| *value.last().unwrap()) - .unwrap_or(ExcerptId::min()); - - let existing = self - .excerpts_by_path - .get(&path) - .cloned() - .unwrap_or_default(); - - let mut new_iter = new.into_iter().peekable(); - let mut existing_iter = existing.into_iter().peekable(); - - let mut excerpt_ids = Vec::new(); - let mut to_remove = Vec::new(); - let mut to_insert: Vec<(ExcerptId, ExcerptRange)> = Vec::new(); - let mut added_a_new_excerpt = false; - let snapshot = self.snapshot(cx); - - let mut next_excerpt_id = - if let Some(last_entry) = self.snapshot.borrow().excerpt_ids.last() { - last_entry.id.0 + 1 - } else { - 1 - }; - - let mut next_excerpt_id = move || ExcerptId(post_inc(&mut next_excerpt_id)); - - let mut excerpts_cursor = snapshot.excerpts.cursor::>(()); - excerpts_cursor.next(); - - loop { - let new = new_iter.peek(); - let existing = if let Some(existing_id) = existing_iter.peek() { - let locator = snapshot.excerpt_locator_for_id(*existing_id); - excerpts_cursor.seek_forward(&Some(locator), Bias::Left); - if let Some(excerpt) = excerpts_cursor.item() { - if excerpt.buffer_id != buffer_snapshot.remote_id() { - to_remove.push(*existing_id); - existing_iter.next(); - continue; - } - Some(( - *existing_id, - excerpt.range.context.to_point(buffer_snapshot), - )) - } else { - None - } - } else { - None - }; - - if let Some((last_id, last)) = to_insert.last_mut() { - if let Some(new) = new - && last.context.end >= new.context.start - { - last.context.end = last.context.end.max(new.context.end); - excerpt_ids.push(*last_id); - new_iter.next(); - continue; - } - if let Some((existing_id, existing_range)) = &existing - && last.context.end >= existing_range.start - { - last.context.end = last.context.end.max(existing_range.end); - to_remove.push(*existing_id); - self.snapshot - .get_mut() - .replaced_excerpts - .insert(*existing_id, *last_id); - existing_iter.next(); - continue; - } - } - - match (new, existing) { - (None, None) => break, - (None, Some((existing_id, _))) => { - existing_iter.next(); - to_remove.push(existing_id); - continue; - } - (Some(_), None) => { - added_a_new_excerpt = true; - let new_id = next_excerpt_id(); - excerpt_ids.push(new_id); - to_insert.push((new_id, new_iter.next().unwrap())); - continue; - } - (Some(new), Some((_, existing_range))) => { - if existing_range.end < new.context.start { - let existing_id = existing_iter.next().unwrap(); - to_remove.push(existing_id); - continue; - } else if existing_range.start > new.context.end { - let new_id = next_excerpt_id(); - excerpt_ids.push(new_id); - to_insert.push((new_id, new_iter.next().unwrap())); - continue; - } - - if existing_range.start == new.context.start - && existing_range.end == new.context.end - { - self.insert_excerpts_with_ids_after( - insert_after, - buffer.clone(), - mem::take(&mut to_insert), - cx, - ); - insert_after = existing_iter.next().unwrap(); - excerpt_ids.push(insert_after); - new_iter.next(); - } else { - let existing_id = existing_iter.next().unwrap(); - let new_id = next_excerpt_id(); - self.snapshot - .get_mut() - .replaced_excerpts - .insert(existing_id, new_id); - to_remove.push(existing_id); - let mut range = new_iter.next().unwrap(); - range.context.start = range.context.start.min(existing_range.start); - range.context.end = range.context.end.max(existing_range.end); - excerpt_ids.push(new_id); - to_insert.push((new_id, range)); - } - } - }; - } - - self.insert_excerpts_with_ids_after(insert_after, buffer, to_insert, cx); - self.remove_excerpts(to_remove, cx); - if excerpt_ids.is_empty() { - self.excerpts_by_path.remove(&path); - } else { - for excerpt_id in &excerpt_ids { - self.paths_by_excerpt.insert(*excerpt_id, path.clone()); - } - self.excerpts_by_path - .insert(path, excerpt_ids.iter().dedup().cloned().collect()); - } - - (excerpt_ids, added_a_new_excerpt) - } - - pub fn paths(&self) -> impl Iterator + '_ { - self.excerpts_by_path.keys().cloned() - } - - pub fn remove_excerpts_for_path(&mut self, path: PathKey, cx: &mut Context) { - if let Some(to_remove) = self.excerpts_by_path.remove(&path) { - self.remove_excerpts(to_remove, cx) - } - } - pub fn insert_excerpts_after( &mut self, prev_excerpt_id: ExcerptId, @@ -1910,13 +1204,13 @@ impl MultiBuffer { ) where O: text::ToOffset, { - assert_eq!(self.history.transaction_depth, 0); + assert_eq!(self.history.transaction_depth(), 0); let mut ranges = ranges.into_iter().peekable(); if ranges.peek().is_none() { return Default::default(); } - self.sync(cx); + self.sync_mut(cx); let buffer_snapshot = buffer.read(cx).snapshot(); let buffer_id = buffer_snapshot.remote_id(); @@ -2028,23 +1322,38 @@ impl MultiBuffer { } pub fn clear(&mut self, cx: &mut Context) { - self.sync(cx); + self.sync_mut(cx); let ids = self.excerpt_ids(); let removed_buffer_ids = self.buffers.drain().map(|(id, _)| id).collect(); self.excerpts_by_path.clear(); self.paths_by_excerpt.clear(); - let mut snapshot = self.snapshot.get_mut(); + let MultiBufferSnapshot { + excerpts, + diffs: _, + diff_transforms: _, + non_text_state_update_count: _, + edit_count: _, + is_dirty, + has_deleted_file, + has_conflict, + singleton: _, + excerpt_ids: _, + replaced_excerpts, + trailing_excerpt_update_count, + all_diff_hunks_expanded: _, + show_headers: _, + } = self.snapshot.get_mut(); let start = ExcerptOffset::new(0); - let prev_len = ExcerptOffset::new(snapshot.excerpts.summary().text.len); - snapshot.excerpts = Default::default(); - snapshot.trailing_excerpt_update_count += 1; - snapshot.is_dirty = false; - snapshot.has_deleted_file = false; - snapshot.has_conflict = false; - snapshot.replaced_excerpts.clear(); + let prev_len = ExcerptOffset::new(excerpts.summary().text.len); + *excerpts = Default::default(); + *trailing_excerpt_update_count += 1; + *is_dirty = false; + *has_deleted_file = false; + *has_conflict = false; + replaced_excerpts.clear(); let edits = Self::sync_diff_transforms( - &mut snapshot, + self.snapshot.get_mut(), vec![Edit { old: start..prev_len, new: start..start, @@ -2236,11 +1545,12 @@ impl MultiBuffer { excerpt_ids: impl IntoIterator, cx: &mut Context, ) { - self.sync(cx); + self.sync_mut(cx); let ids = excerpt_ids.into_iter().collect::>(); if ids.is_empty() { return; } + self.buffer_changed_since_sync.replace(true); let mut snapshot = self.snapshot.get_mut(); let mut new_excerpts = SumTree::default(); @@ -2327,7 +1637,6 @@ impl MultiBuffer { if !edits.is_empty() { self.subscriptions.publish(edits); } - self.buffer_changed_since_sync.replace(true); cx.emit(Event::Edited { edited_buffer: None, }); @@ -2408,12 +1717,10 @@ impl MultiBuffer { } fn buffer_diff_language_changed(&mut self, diff: Entity, cx: &mut Context) { - self.sync(cx); - let snapshot = self.snapshot.get_mut(); let diff = diff.read(cx); let buffer_id = diff.buffer_id; let diff = diff.snapshot(cx); - snapshot.diffs.insert(buffer_id, diff); + self.snapshot.get_mut().diffs.insert(buffer_id, diff); } fn buffer_diff_changed( @@ -2422,14 +1729,14 @@ impl MultiBuffer { range: Range, cx: &mut Context, ) { - self.sync(cx); - self.buffer_changed_since_sync.replace(true); + self.sync_mut(cx); let diff = diff.read(cx); let buffer_id = diff.buffer_id; let Some(buffer_state) = self.buffers.get(&buffer_id) else { return; }; + self.buffer_changed_since_sync.replace(true); let buffer = buffer_state.buffer.read(cx); let diff_change_range = range.to_offset(buffer); @@ -2728,7 +2035,7 @@ impl MultiBuffer { if self.snapshot.borrow().all_diff_hunks_expanded && !expand { return; } - self.sync(cx); + self.sync_mut(cx); let mut snapshot = self.snapshot.get_mut(); let mut excerpt_edits = Vec::new(); let mut last_hunk_row = None; @@ -2799,7 +2106,7 @@ impl MultiBuffer { range: Range, cx: &mut Context, ) { - self.sync(cx); + self.sync_mut(cx); let mut snapshot = self.snapshot.get_mut(); let locator = snapshot.excerpt_locator_for_id(id); @@ -2872,7 +2179,7 @@ impl MultiBuffer { if line_count == 0 { return; } - self.sync(cx); + self.sync_mut(cx); if !self.excerpts_by_path.is_empty() { self.expand_excerpts_with_paths(ids, line_count, direction, cx); return; @@ -2974,15 +2281,59 @@ impl MultiBuffer { if !changed { return; } + let edits = Self::sync_( + &mut self.snapshot.borrow_mut(), + &self.buffers, + &self.diffs, + cx, + ); + if !edits.is_empty() { + self.subscriptions.publish(edits); + } + } + + fn sync_mut(&mut self, cx: &App) { + let changed = self.buffer_changed_since_sync.replace(false); + if !changed { + return; + } + let edits = Self::sync_(self.snapshot.get_mut(), &self.buffers, &self.diffs, cx); + + if !edits.is_empty() { + self.subscriptions.publish(edits); + } + } + + fn sync_( + snapshot: &mut MultiBufferSnapshot, + buffers: &HashMap, + diffs: &HashMap, + cx: &App, + ) -> Vec> { + let MultiBufferSnapshot { + excerpts, + diffs: buffer_diff, + diff_transforms: _, + non_text_state_update_count, + edit_count, + is_dirty, + has_deleted_file, + has_conflict, + singleton: _, + excerpt_ids: _, + replaced_excerpts: _, + trailing_excerpt_update_count: _, + all_diff_hunks_expanded: _, + show_headers: _, + } = snapshot; + *is_dirty = false; + *has_deleted_file = false; + *has_conflict = false; - let mut snapshot = self.snapshot.borrow_mut(); let mut excerpts_to_edit = Vec::new(); let mut non_text_state_updated = false; - let mut is_dirty = false; - let mut has_deleted_file = false; - let mut has_conflict = false; let mut edited = false; - for buffer_state in self.buffers.values() { + for buffer_state in buffers.values() { let buffer = buffer_state.buffer.read(cx); let version = buffer.version(); let non_text_state_update_count = buffer.non_text_state_update_count(); @@ -3005,25 +2356,22 @@ impl MultiBuffer { edited |= buffer_edited; non_text_state_updated |= buffer_non_text_state_updated; - is_dirty |= buffer.is_dirty(); - has_deleted_file |= buffer + *is_dirty |= buffer.is_dirty(); + *has_deleted_file |= buffer .file() .is_some_and(|file| file.disk_state() == DiskState::Deleted); - has_conflict |= buffer.has_conflict(); + *has_conflict |= buffer.has_conflict(); } if edited { - snapshot.edit_count += 1; + *edit_count += 1; } if non_text_state_updated { - snapshot.non_text_state_update_count += 1; + *non_text_state_update_count += 1; } - snapshot.is_dirty = is_dirty; - snapshot.has_deleted_file = has_deleted_file; - snapshot.has_conflict = has_conflict; - for (id, diff) in self.diffs.iter() { - if snapshot.diffs.get(id).is_none() { - snapshot.diffs.insert(*id, diff.diff.read(cx).snapshot(cx)); + for (id, diff) in diffs.iter() { + if buffer_diff.get(id).is_none() { + buffer_diff.insert(*id, diff.diff.read(cx).snapshot(cx)); } } @@ -3031,9 +2379,7 @@ impl MultiBuffer { let mut edits = Vec::new(); let mut new_excerpts = SumTree::default(); - let mut cursor = snapshot - .excerpts - .cursor::, ExcerptOffset>>(()); + let mut cursor = excerpts.cursor::, ExcerptOffset>>(()); for (locator, buffer, buffer_edited) in excerpts_to_edit { new_excerpts.append(cursor.slice(&Some(locator), Bias::Left), ()); @@ -3083,12 +2429,8 @@ impl MultiBuffer { new_excerpts.append(cursor.suffix(), ()); drop(cursor); - snapshot.excerpts = new_excerpts; - - let edits = Self::sync_diff_transforms(&mut snapshot, edits, DiffChangeKind::BufferEdited); - if !edits.is_empty() { - self.subscriptions.publish(edits); - } + *excerpts = new_excerpts; + Self::sync_diff_transforms(snapshot, edits, DiffChangeKind::BufferEdited) } fn sync_diff_transforms( @@ -4365,10 +3707,18 @@ impl MultiBufferSnapshot { self.convert_dimension(point, text::BufferSnapshot::point_to_point_utf16) } + pub fn point_utf16_to_point(&self, point: PointUtf16) -> Point { + self.convert_dimension(point, text::BufferSnapshot::point_utf16_to_point) + } + pub fn point_to_offset(&self, point: Point) -> usize { self.convert_dimension(point, text::BufferSnapshot::point_to_offset) } + pub fn point_to_offset_utf16(&self, point: Point) -> OffsetUtf16 { + self.convert_dimension(point, text::BufferSnapshot::point_to_offset_utf16) + } + pub fn offset_utf16_to_offset(&self, offset: OffsetUtf16) -> usize { self.convert_dimension(offset, text::BufferSnapshot::offset_utf16_to_offset) } @@ -4381,6 +3731,10 @@ impl MultiBufferSnapshot { self.convert_dimension(point, text::BufferSnapshot::point_utf16_to_offset) } + pub fn point_utf16_to_offset_utf16(&self, point: PointUtf16) -> OffsetUtf16 { + self.convert_dimension(point, text::BufferSnapshot::point_utf16_to_offset_utf16) + } + fn clip_dimension( &self, position: D, @@ -6730,208 +6084,6 @@ where } } -impl History { - fn start_transaction(&mut self, now: Instant) -> Option { - self.transaction_depth += 1; - if self.transaction_depth == 1 { - let id = self.next_transaction_id.tick(); - self.undo_stack.push(Transaction { - id, - buffer_transactions: Default::default(), - first_edit_at: now, - last_edit_at: now, - suppress_grouping: false, - }); - Some(id) - } else { - None - } - } - - fn end_transaction( - &mut self, - now: Instant, - buffer_transactions: HashMap, - ) -> bool { - assert_ne!(self.transaction_depth, 0); - self.transaction_depth -= 1; - if self.transaction_depth == 0 { - if buffer_transactions.is_empty() { - self.undo_stack.pop(); - false - } else { - self.redo_stack.clear(); - let transaction = self.undo_stack.last_mut().unwrap(); - transaction.last_edit_at = now; - for (buffer_id, transaction_id) in buffer_transactions { - transaction - .buffer_transactions - .entry(buffer_id) - .or_insert(transaction_id); - } - true - } - } else { - false - } - } - - fn push_transaction<'a, T>( - &mut self, - buffer_transactions: T, - now: Instant, - cx: &Context, - ) where - T: IntoIterator, &'a language::Transaction)>, - { - assert_eq!(self.transaction_depth, 0); - let transaction = Transaction { - id: self.next_transaction_id.tick(), - buffer_transactions: buffer_transactions - .into_iter() - .map(|(buffer, transaction)| (buffer.read(cx).remote_id(), transaction.id)) - .collect(), - first_edit_at: now, - last_edit_at: now, - suppress_grouping: false, - }; - if !transaction.buffer_transactions.is_empty() { - self.undo_stack.push(transaction); - self.redo_stack.clear(); - } - } - - fn finalize_last_transaction(&mut self) { - if let Some(transaction) = self.undo_stack.last_mut() { - transaction.suppress_grouping = true; - } - } - - fn forget(&mut self, transaction_id: TransactionId) -> Option { - if let Some(ix) = self - .undo_stack - .iter() - .rposition(|transaction| transaction.id == transaction_id) - { - Some(self.undo_stack.remove(ix)) - } else if let Some(ix) = self - .redo_stack - .iter() - .rposition(|transaction| transaction.id == transaction_id) - { - Some(self.redo_stack.remove(ix)) - } else { - None - } - } - - fn transaction(&self, transaction_id: TransactionId) -> Option<&Transaction> { - self.undo_stack - .iter() - .find(|transaction| transaction.id == transaction_id) - .or_else(|| { - self.redo_stack - .iter() - .find(|transaction| transaction.id == transaction_id) - }) - } - - fn transaction_mut(&mut self, transaction_id: TransactionId) -> Option<&mut Transaction> { - self.undo_stack - .iter_mut() - .find(|transaction| transaction.id == transaction_id) - .or_else(|| { - self.redo_stack - .iter_mut() - .find(|transaction| transaction.id == transaction_id) - }) - } - - fn pop_undo(&mut self) -> Option<&mut Transaction> { - assert_eq!(self.transaction_depth, 0); - if let Some(transaction) = self.undo_stack.pop() { - self.redo_stack.push(transaction); - self.redo_stack.last_mut() - } else { - None - } - } - - fn pop_redo(&mut self) -> Option<&mut Transaction> { - assert_eq!(self.transaction_depth, 0); - if let Some(transaction) = self.redo_stack.pop() { - self.undo_stack.push(transaction); - self.undo_stack.last_mut() - } else { - None - } - } - - fn remove_from_undo(&mut self, transaction_id: TransactionId) -> Option<&Transaction> { - let ix = self - .undo_stack - .iter() - .rposition(|transaction| transaction.id == transaction_id)?; - let transaction = self.undo_stack.remove(ix); - self.redo_stack.push(transaction); - self.redo_stack.last() - } - - fn group(&mut self) -> Option { - let mut count = 0; - let mut transactions = self.undo_stack.iter(); - if let Some(mut transaction) = transactions.next_back() { - while let Some(prev_transaction) = transactions.next_back() { - if !prev_transaction.suppress_grouping - && transaction.first_edit_at - prev_transaction.last_edit_at - <= self.group_interval - { - transaction = prev_transaction; - count += 1; - } else { - break; - } - } - } - self.group_trailing(count) - } - - fn group_until(&mut self, transaction_id: TransactionId) { - let mut count = 0; - for transaction in self.undo_stack.iter().rev() { - if transaction.id == transaction_id { - self.group_trailing(count); - break; - } else if transaction.suppress_grouping { - break; - } else { - count += 1; - } - } - } - - fn group_trailing(&mut self, n: usize) -> Option { - let new_len = self.undo_stack.len() - n; - let (transactions_to_keep, transactions_to_merge) = self.undo_stack.split_at_mut(new_len); - if let Some(last_transaction) = transactions_to_keep.last_mut() { - if let Some(transaction) = transactions_to_merge.last() { - last_transaction.last_edit_at = transaction.last_edit_at; - } - for to_merge in transactions_to_merge { - for (buffer_id, transaction_id) in &to_merge.buffer_transactions { - last_transaction - .buffer_transactions - .entry(*buffer_id) - .or_insert(*transaction_id); - } - } - } - - self.undo_stack.truncate(new_len); - self.undo_stack.last().map(|t| t.id) - } -} - impl Excerpt { fn new( id: ExcerptId, @@ -6959,21 +6111,16 @@ impl Excerpt { let chunks_start = content_start + range.start; let chunks_end = content_start + cmp::min(range.end, self.text_summary.len); - let footer_height = if self.has_trailing_newline + let has_footer = self.has_trailing_newline && range.start <= self.text_summary.len - && range.end > self.text_summary.len - { - 1 - } else { - 0 - }; + && range.end > self.text_summary.len; let content_chunks = self.buffer.chunks(chunks_start..chunks_end, language_aware); ExcerptChunks { excerpt_id: self.id, content_chunks, - footer_height, + has_footer, } } @@ -6982,14 +6129,9 @@ impl Excerpt { let chunks_start = content_start + range.start; let chunks_end = content_start + cmp::min(range.end, self.text_summary.len); excerpt_chunks.content_chunks.seek(chunks_start..chunks_end); - excerpt_chunks.footer_height = if self.has_trailing_newline + excerpt_chunks.has_footer = self.has_trailing_newline && range.start <= self.text_summary.len - && range.end > self.text_summary.len - { - 1 - } else { - 0 - }; + && range.end > self.text_summary.len; } fn clip_anchor(&self, text_anchor: text::Anchor) -> text::Anchor { @@ -7879,12 +7021,10 @@ impl<'a> Iterator for ExcerptChunks<'a> { return Some(chunk); } - if self.footer_height > 0 { - let text = unsafe { str::from_utf8_unchecked(&NEWLINES[..self.footer_height]) }; - let chars = 1u128 - .unbounded_shl(self.footer_height as u32) - .wrapping_sub(1); - self.footer_height = 0; + if self.has_footer { + let text = "\n"; + let chars = 0b1; + self.has_footer = false; return Some(Chunk { text, chars, @@ -7900,6 +7040,9 @@ impl ToOffset for Point { fn to_offset<'a>(&self, snapshot: &MultiBufferSnapshot) -> usize { snapshot.point_to_offset(*self) } + fn to_offset_utf16(&self, snapshot: &MultiBufferSnapshot) -> OffsetUtf16 { + snapshot.point_to_offset_utf16(*self) + } } impl ToOffset for usize { @@ -7913,29 +7056,27 @@ impl ToOffset for usize { ); *self } + fn to_offset_utf16(&self, snapshot: &MultiBufferSnapshot) -> OffsetUtf16 { + snapshot.offset_to_offset_utf16(*self) + } } impl ToOffset for OffsetUtf16 { fn to_offset<'a>(&self, snapshot: &MultiBufferSnapshot) -> usize { snapshot.offset_utf16_to_offset(*self) } -} - -impl ToOffset for PointUtf16 { - fn to_offset<'a>(&self, snapshot: &MultiBufferSnapshot) -> usize { - snapshot.point_utf16_to_offset(*self) - } -} -impl ToOffsetUtf16 for OffsetUtf16 { fn to_offset_utf16(&self, _snapshot: &MultiBufferSnapshot) -> OffsetUtf16 { *self } } -impl ToOffsetUtf16 for usize { +impl ToOffset for PointUtf16 { + fn to_offset<'a>(&self, snapshot: &MultiBufferSnapshot) -> usize { + snapshot.point_utf16_to_offset(*self) + } fn to_offset_utf16(&self, snapshot: &MultiBufferSnapshot) -> OffsetUtf16 { - snapshot.offset_to_offset_utf16(*self) + snapshot.point_utf16_to_offset_utf16(*self) } } @@ -7943,27 +7084,24 @@ impl ToPoint for usize { fn to_point<'a>(&self, snapshot: &MultiBufferSnapshot) -> Point { snapshot.offset_to_point(*self) } + fn to_point_utf16<'a>(&self, snapshot: &MultiBufferSnapshot) -> PointUtf16 { + snapshot.offset_to_point_utf16(*self) + } } impl ToPoint for Point { fn to_point<'a>(&self, _: &MultiBufferSnapshot) -> Point { *self } -} - -impl ToPointUtf16 for usize { - fn to_point_utf16<'a>(&self, snapshot: &MultiBufferSnapshot) -> PointUtf16 { - snapshot.offset_to_point_utf16(*self) - } -} - -impl ToPointUtf16 for Point { fn to_point_utf16<'a>(&self, snapshot: &MultiBufferSnapshot) -> PointUtf16 { snapshot.point_to_point_utf16(*self) } } -impl ToPointUtf16 for PointUtf16 { +impl ToPoint for PointUtf16 { + fn to_point<'a>(&self, snapshot: &MultiBufferSnapshot) -> Point { + snapshot.point_utf16_to_point(*self) + } fn to_point_utf16<'a>(&self, _: &MultiBufferSnapshot) -> PointUtf16 { *self } diff --git a/crates/multi_buffer/src/multi_buffer_tests.rs b/crates/multi_buffer/src/multi_buffer_tests.rs index 49db1fc2e264583f90f1a96195c560f0e52e8205..a9121b9104400d88d5f22801db1bfebaeeb060d6 100644 --- a/crates/multi_buffer/src/multi_buffer_tests.rs +++ b/crates/multi_buffer/src/multi_buffer_tests.rs @@ -7,6 +7,7 @@ use parking_lot::RwLock; use rand::prelude::*; use settings::SettingsStore; use std::env; +use std::time::{Duration, Instant}; use util::RandomCharIter; use util::rel_path::rel_path; use util::test::sample_text; @@ -2984,7 +2985,7 @@ fn test_history(cx: &mut App) { }); let multibuffer = cx.new(|_| MultiBuffer::new(Capability::ReadWrite)); multibuffer.update(cx, |this, _| { - this.history.group_interval = group_interval; + this.set_group_interval(group_interval); }); multibuffer.update(cx, |multibuffer, cx| { multibuffer.push_excerpts( diff --git a/crates/multi_buffer/src/path_key.rs b/crates/multi_buffer/src/path_key.rs new file mode 100644 index 0000000000000000000000000000000000000000..b6175b7aaab4f631728bcfaf8094120068032994 --- /dev/null +++ b/crates/multi_buffer/src/path_key.rs @@ -0,0 +1,417 @@ +use std::{mem, ops::Range, sync::Arc}; + +use collections::HashSet; +use gpui::{App, AppContext, Context, Entity, Task}; +use itertools::Itertools; +use language::{Buffer, BufferSnapshot}; +use rope::Point; +use text::{Bias, OffsetRangeExt, locator::Locator}; +use util::{post_inc, rel_path::RelPath}; + +use crate::{ + Anchor, ExcerptId, ExcerptRange, ExpandExcerptDirection, MultiBuffer, build_excerpt_ranges, +}; + +#[derive(PartialEq, Eq, Ord, PartialOrd, Clone, Hash, Debug)] +pub struct PathKey { + // Used by the derived PartialOrd & Ord + pub sort_prefix: Option, + pub path: Arc, +} + +impl PathKey { + pub fn with_sort_prefix(sort_prefix: u64, path: Arc) -> Self { + Self { + sort_prefix: Some(sort_prefix), + path, + } + } + + pub fn for_buffer(buffer: &Entity, cx: &App) -> Self { + if let Some(file) = buffer.read(cx).file() { + Self::with_sort_prefix(file.worktree_id(cx).to_proto(), file.path().clone()) + } else { + Self { + sort_prefix: None, + path: RelPath::unix(&buffer.entity_id().to_string()) + .unwrap() + .into_arc(), + } + } + } +} + +impl MultiBuffer { + pub fn paths(&self) -> impl Iterator + '_ { + self.excerpts_by_path.keys().cloned() + } + + pub fn remove_excerpts_for_path(&mut self, path: PathKey, cx: &mut Context) { + if let Some(to_remove) = self.excerpts_by_path.remove(&path) { + self.remove_excerpts(to_remove, cx) + } + } + + pub fn location_for_path(&self, path: &PathKey, cx: &App) -> Option { + let excerpt_id = self.excerpts_by_path.get(path)?.first()?; + let snapshot = self.read(cx); + let excerpt = snapshot.excerpt(*excerpt_id)?; + Some(Anchor::in_buffer( + *excerpt_id, + excerpt.buffer_id, + excerpt.range.context.start, + )) + } + + pub fn excerpt_paths(&self) -> impl Iterator { + self.excerpts_by_path.keys() + } + + /// Sets excerpts, returns `true` if at least one new excerpt was added. + pub fn set_excerpts_for_path( + &mut self, + path: PathKey, + buffer: Entity, + ranges: impl IntoIterator>, + context_line_count: u32, + cx: &mut Context, + ) -> (Vec>, bool) { + let buffer_snapshot = buffer.read(cx).snapshot(); + let excerpt_ranges = build_excerpt_ranges(ranges, context_line_count, &buffer_snapshot); + + let (new, counts) = Self::merge_excerpt_ranges(&excerpt_ranges); + self.set_merged_excerpt_ranges_for_path( + path, + buffer, + excerpt_ranges, + &buffer_snapshot, + new, + counts, + cx, + ) + } + + pub fn set_excerpt_ranges_for_path( + &mut self, + path: PathKey, + buffer: Entity, + buffer_snapshot: &BufferSnapshot, + excerpt_ranges: Vec>, + cx: &mut Context, + ) -> (Vec>, bool) { + let (new, counts) = Self::merge_excerpt_ranges(&excerpt_ranges); + self.set_merged_excerpt_ranges_for_path( + path, + buffer, + excerpt_ranges, + buffer_snapshot, + new, + counts, + cx, + ) + } + + pub fn set_anchored_excerpts_for_path( + &self, + path_key: PathKey, + buffer: Entity, + ranges: Vec>, + context_line_count: u32, + cx: &mut Context, + ) -> Task>> { + let buffer_snapshot = buffer.read(cx).snapshot(); + cx.spawn(async move |multi_buffer, cx| { + let snapshot = buffer_snapshot.clone(); + let (excerpt_ranges, new, counts) = cx + .background_spawn(async move { + let ranges = ranges.into_iter().map(|range| range.to_point(&snapshot)); + let excerpt_ranges = + build_excerpt_ranges(ranges, context_line_count, &snapshot); + let (new, counts) = Self::merge_excerpt_ranges(&excerpt_ranges); + (excerpt_ranges, new, counts) + }) + .await; + + multi_buffer + .update(cx, move |multi_buffer, cx| { + let (ranges, _) = multi_buffer.set_merged_excerpt_ranges_for_path( + path_key, + buffer, + excerpt_ranges, + &buffer_snapshot, + new, + counts, + cx, + ); + ranges + }) + .ok() + .unwrap_or_default() + }) + } + + pub(super) fn expand_excerpts_with_paths( + &mut self, + ids: impl IntoIterator, + line_count: u32, + direction: ExpandExcerptDirection, + cx: &mut Context, + ) { + let grouped = ids + .into_iter() + .chunk_by(|id| self.paths_by_excerpt.get(id).cloned()) + .into_iter() + .flat_map(|(k, v)| Some((k?, v.into_iter().collect::>()))) + .collect::>(); + let snapshot = self.snapshot(cx); + + for (path, ids) in grouped.into_iter() { + let Some(excerpt_ids) = self.excerpts_by_path.get(&path) else { + continue; + }; + + let ids_to_expand = HashSet::from_iter(ids); + let expanded_ranges = excerpt_ids.iter().filter_map(|excerpt_id| { + let excerpt = snapshot.excerpt(*excerpt_id)?; + + let mut context = excerpt.range.context.to_point(&excerpt.buffer); + if ids_to_expand.contains(excerpt_id) { + match direction { + ExpandExcerptDirection::Up => { + context.start.row = context.start.row.saturating_sub(line_count); + context.start.column = 0; + } + ExpandExcerptDirection::Down => { + context.end.row = + (context.end.row + line_count).min(excerpt.buffer.max_point().row); + context.end.column = excerpt.buffer.line_len(context.end.row); + } + ExpandExcerptDirection::UpAndDown => { + context.start.row = context.start.row.saturating_sub(line_count); + context.start.column = 0; + context.end.row = + (context.end.row + line_count).min(excerpt.buffer.max_point().row); + context.end.column = excerpt.buffer.line_len(context.end.row); + } + } + } + + Some(ExcerptRange { + context, + primary: excerpt.range.primary.to_point(&excerpt.buffer), + }) + }); + let mut merged_ranges: Vec> = Vec::new(); + for range in expanded_ranges { + if let Some(last_range) = merged_ranges.last_mut() + && last_range.context.end >= range.context.start + { + last_range.context.end = range.context.end; + continue; + } + merged_ranges.push(range) + } + let Some(excerpt_id) = excerpt_ids.first() else { + continue; + }; + let Some(buffer_id) = &snapshot.buffer_id_for_excerpt(*excerpt_id) else { + continue; + }; + + let Some(buffer) = self.buffers.get(buffer_id).map(|b| b.buffer.clone()) else { + continue; + }; + + let buffer_snapshot = buffer.read(cx).snapshot(); + self.update_path_excerpts(path.clone(), buffer, &buffer_snapshot, merged_ranges, cx); + } + } + + /// Sets excerpts, returns `true` if at least one new excerpt was added. + fn set_merged_excerpt_ranges_for_path( + &mut self, + path: PathKey, + buffer: Entity, + ranges: Vec>, + buffer_snapshot: &BufferSnapshot, + new: Vec>, + counts: Vec, + cx: &mut Context, + ) -> (Vec>, bool) { + let (excerpt_ids, added_a_new_excerpt) = + self.update_path_excerpts(path, buffer, buffer_snapshot, new, cx); + + let mut result = Vec::new(); + let mut ranges = ranges.into_iter(); + for (excerpt_id, range_count) in excerpt_ids.into_iter().zip(counts.into_iter()) { + for range in ranges.by_ref().take(range_count) { + let range = Anchor::range_in_buffer( + excerpt_id, + buffer_snapshot.remote_id(), + buffer_snapshot.anchor_before(&range.primary.start) + ..buffer_snapshot.anchor_after(&range.primary.end), + ); + result.push(range) + } + } + (result, added_a_new_excerpt) + } + + fn update_path_excerpts( + &mut self, + path: PathKey, + buffer: Entity, + buffer_snapshot: &BufferSnapshot, + new: Vec>, + cx: &mut Context, + ) -> (Vec, bool) { + let mut insert_after = self + .excerpts_by_path + .range(..path.clone()) + .next_back() + .map(|(_, value)| *value.last().unwrap()) + .unwrap_or(ExcerptId::min()); + + let existing = self + .excerpts_by_path + .get(&path) + .cloned() + .unwrap_or_default(); + + let mut new_iter = new.into_iter().peekable(); + let mut existing_iter = existing.into_iter().peekable(); + + let mut excerpt_ids = Vec::new(); + let mut to_remove = Vec::new(); + let mut to_insert: Vec<(ExcerptId, ExcerptRange)> = Vec::new(); + let mut added_a_new_excerpt = false; + let snapshot = self.snapshot(cx); + + let mut next_excerpt_id = + if let Some(last_entry) = self.snapshot.borrow().excerpt_ids.last() { + last_entry.id.0 + 1 + } else { + 1 + }; + + let mut next_excerpt_id = move || ExcerptId(post_inc(&mut next_excerpt_id)); + + let mut excerpts_cursor = snapshot.excerpts.cursor::>(()); + excerpts_cursor.next(); + + loop { + let new = new_iter.peek(); + let existing = if let Some(existing_id) = existing_iter.peek() { + let locator = snapshot.excerpt_locator_for_id(*existing_id); + excerpts_cursor.seek_forward(&Some(locator), Bias::Left); + if let Some(excerpt) = excerpts_cursor.item() { + if excerpt.buffer_id != buffer_snapshot.remote_id() { + to_remove.push(*existing_id); + existing_iter.next(); + continue; + } + Some(( + *existing_id, + excerpt.range.context.to_point(buffer_snapshot), + )) + } else { + None + } + } else { + None + }; + + if let Some((last_id, last)) = to_insert.last_mut() { + if let Some(new) = new + && last.context.end >= new.context.start + { + last.context.end = last.context.end.max(new.context.end); + excerpt_ids.push(*last_id); + new_iter.next(); + continue; + } + if let Some((existing_id, existing_range)) = &existing + && last.context.end >= existing_range.start + { + last.context.end = last.context.end.max(existing_range.end); + to_remove.push(*existing_id); + self.snapshot + .get_mut() + .replaced_excerpts + .insert(*existing_id, *last_id); + existing_iter.next(); + continue; + } + } + + match (new, existing) { + (None, None) => break, + (None, Some((existing_id, _))) => { + existing_iter.next(); + to_remove.push(existing_id); + continue; + } + (Some(_), None) => { + added_a_new_excerpt = true; + let new_id = next_excerpt_id(); + excerpt_ids.push(new_id); + to_insert.push((new_id, new_iter.next().unwrap())); + continue; + } + (Some(new), Some((_, existing_range))) => { + if existing_range.end < new.context.start { + let existing_id = existing_iter.next().unwrap(); + to_remove.push(existing_id); + continue; + } else if existing_range.start > new.context.end { + let new_id = next_excerpt_id(); + excerpt_ids.push(new_id); + to_insert.push((new_id, new_iter.next().unwrap())); + continue; + } + + if existing_range.start == new.context.start + && existing_range.end == new.context.end + { + self.insert_excerpts_with_ids_after( + insert_after, + buffer.clone(), + mem::take(&mut to_insert), + cx, + ); + insert_after = existing_iter.next().unwrap(); + excerpt_ids.push(insert_after); + new_iter.next(); + } else { + let existing_id = existing_iter.next().unwrap(); + let new_id = next_excerpt_id(); + self.snapshot + .get_mut() + .replaced_excerpts + .insert(existing_id, new_id); + to_remove.push(existing_id); + let mut range = new_iter.next().unwrap(); + range.context.start = range.context.start.min(existing_range.start); + range.context.end = range.context.end.max(existing_range.end); + excerpt_ids.push(new_id); + to_insert.push((new_id, range)); + } + } + }; + } + + self.insert_excerpts_with_ids_after(insert_after, buffer, to_insert, cx); + self.remove_excerpts(to_remove, cx); + if excerpt_ids.is_empty() { + self.excerpts_by_path.remove(&path); + } else { + for excerpt_id in &excerpt_ids { + self.paths_by_excerpt.insert(*excerpt_id, path.clone()); + } + self.excerpts_by_path + .insert(path, excerpt_ids.iter().dedup().cloned().collect()); + } + + (excerpt_ids, added_a_new_excerpt) + } +} diff --git a/crates/multi_buffer/src/transaction.rs b/crates/multi_buffer/src/transaction.rs new file mode 100644 index 0000000000000000000000000000000000000000..062d25d8233777190113aaa3e6a7f62396cfd08f --- /dev/null +++ b/crates/multi_buffer/src/transaction.rs @@ -0,0 +1,524 @@ +use gpui::{App, Context, Entity}; +use language::{self, Buffer, TextDimension, TransactionId}; +use std::{ + collections::HashMap, + ops::{Range, Sub}, + time::{Duration, Instant}, +}; +use sum_tree::Bias; +use text::BufferId; + +use crate::BufferState; + +use super::{Event, ExcerptSummary, MultiBuffer}; + +#[derive(Clone)] +pub(super) struct History { + next_transaction_id: TransactionId, + undo_stack: Vec, + redo_stack: Vec, + transaction_depth: usize, + group_interval: Duration, +} + +impl Default for History { + fn default() -> Self { + History { + next_transaction_id: clock::Lamport::MIN, + undo_stack: Vec::new(), + redo_stack: Vec::new(), + transaction_depth: 0, + group_interval: Duration::from_millis(300), + } + } +} + +#[derive(Clone)] +struct Transaction { + id: TransactionId, + buffer_transactions: HashMap, + first_edit_at: Instant, + last_edit_at: Instant, + suppress_grouping: bool, +} + +impl History { + fn start_transaction(&mut self, now: Instant) -> Option { + self.transaction_depth += 1; + if self.transaction_depth == 1 { + let id = self.next_transaction_id.tick(); + self.undo_stack.push(Transaction { + id, + buffer_transactions: Default::default(), + first_edit_at: now, + last_edit_at: now, + suppress_grouping: false, + }); + Some(id) + } else { + None + } + } + + fn end_transaction( + &mut self, + now: Instant, + buffer_transactions: HashMap, + ) -> bool { + assert_ne!(self.transaction_depth, 0); + self.transaction_depth -= 1; + if self.transaction_depth == 0 { + if buffer_transactions.is_empty() { + self.undo_stack.pop(); + false + } else { + self.redo_stack.clear(); + let transaction = self.undo_stack.last_mut().unwrap(); + transaction.last_edit_at = now; + for (buffer_id, transaction_id) in buffer_transactions { + transaction + .buffer_transactions + .entry(buffer_id) + .or_insert(transaction_id); + } + true + } + } else { + false + } + } + + fn push_transaction<'a, T>( + &mut self, + buffer_transactions: T, + now: Instant, + cx: &Context, + ) where + T: IntoIterator, &'a language::Transaction)>, + { + assert_eq!(self.transaction_depth, 0); + let transaction = Transaction { + id: self.next_transaction_id.tick(), + buffer_transactions: buffer_transactions + .into_iter() + .map(|(buffer, transaction)| (buffer.read(cx).remote_id(), transaction.id)) + .collect(), + first_edit_at: now, + last_edit_at: now, + suppress_grouping: false, + }; + if !transaction.buffer_transactions.is_empty() { + self.undo_stack.push(transaction); + self.redo_stack.clear(); + } + } + + fn finalize_last_transaction(&mut self) { + if let Some(transaction) = self.undo_stack.last_mut() { + transaction.suppress_grouping = true; + } + } + + fn forget(&mut self, transaction_id: TransactionId) -> Option { + if let Some(ix) = self + .undo_stack + .iter() + .rposition(|transaction| transaction.id == transaction_id) + { + Some(self.undo_stack.remove(ix)) + } else if let Some(ix) = self + .redo_stack + .iter() + .rposition(|transaction| transaction.id == transaction_id) + { + Some(self.redo_stack.remove(ix)) + } else { + None + } + } + + fn transaction(&self, transaction_id: TransactionId) -> Option<&Transaction> { + self.undo_stack + .iter() + .find(|transaction| transaction.id == transaction_id) + .or_else(|| { + self.redo_stack + .iter() + .find(|transaction| transaction.id == transaction_id) + }) + } + + fn transaction_mut(&mut self, transaction_id: TransactionId) -> Option<&mut Transaction> { + self.undo_stack + .iter_mut() + .find(|transaction| transaction.id == transaction_id) + .or_else(|| { + self.redo_stack + .iter_mut() + .find(|transaction| transaction.id == transaction_id) + }) + } + + fn pop_undo(&mut self) -> Option<&mut Transaction> { + assert_eq!(self.transaction_depth, 0); + if let Some(transaction) = self.undo_stack.pop() { + self.redo_stack.push(transaction); + self.redo_stack.last_mut() + } else { + None + } + } + + fn pop_redo(&mut self) -> Option<&mut Transaction> { + assert_eq!(self.transaction_depth, 0); + if let Some(transaction) = self.redo_stack.pop() { + self.undo_stack.push(transaction); + self.undo_stack.last_mut() + } else { + None + } + } + + fn remove_from_undo(&mut self, transaction_id: TransactionId) -> Option<&Transaction> { + let ix = self + .undo_stack + .iter() + .rposition(|transaction| transaction.id == transaction_id)?; + let transaction = self.undo_stack.remove(ix); + self.redo_stack.push(transaction); + self.redo_stack.last() + } + + fn group(&mut self) -> Option { + let mut count = 0; + let mut transactions = self.undo_stack.iter(); + if let Some(mut transaction) = transactions.next_back() { + while let Some(prev_transaction) = transactions.next_back() { + if !prev_transaction.suppress_grouping + && transaction.first_edit_at - prev_transaction.last_edit_at + <= self.group_interval + { + transaction = prev_transaction; + count += 1; + } else { + break; + } + } + } + self.group_trailing(count) + } + + fn group_until(&mut self, transaction_id: TransactionId) { + let mut count = 0; + for transaction in self.undo_stack.iter().rev() { + if transaction.id == transaction_id { + self.group_trailing(count); + break; + } else if transaction.suppress_grouping { + break; + } else { + count += 1; + } + } + } + + fn group_trailing(&mut self, n: usize) -> Option { + let new_len = self.undo_stack.len() - n; + let (transactions_to_keep, transactions_to_merge) = self.undo_stack.split_at_mut(new_len); + if let Some(last_transaction) = transactions_to_keep.last_mut() { + if let Some(transaction) = transactions_to_merge.last() { + last_transaction.last_edit_at = transaction.last_edit_at; + } + for to_merge in transactions_to_merge { + for (buffer_id, transaction_id) in &to_merge.buffer_transactions { + last_transaction + .buffer_transactions + .entry(*buffer_id) + .or_insert(*transaction_id); + } + } + } + + self.undo_stack.truncate(new_len); + self.undo_stack.last().map(|t| t.id) + } + + pub(super) fn transaction_depth(&self) -> usize { + self.transaction_depth + } + + pub fn set_group_interval(&mut self, group_interval: Duration) { + self.group_interval = group_interval; + } +} + +impl MultiBuffer { + pub fn start_transaction(&mut self, cx: &mut Context) -> Option { + self.start_transaction_at(Instant::now(), cx) + } + + pub fn start_transaction_at( + &mut self, + now: Instant, + cx: &mut Context, + ) -> Option { + if let Some(buffer) = self.as_singleton() { + return buffer.update(cx, |buffer, _| buffer.start_transaction_at(now)); + } + + for BufferState { buffer, .. } in self.buffers.values() { + buffer.update(cx, |buffer, _| buffer.start_transaction_at(now)); + } + self.history.start_transaction(now) + } + + pub fn last_transaction_id(&self, cx: &App) -> Option { + if let Some(buffer) = self.as_singleton() { + buffer + .read(cx) + .peek_undo_stack() + .map(|history_entry| history_entry.transaction_id()) + } else { + let last_transaction = self.history.undo_stack.last()?; + Some(last_transaction.id) + } + } + + pub fn end_transaction(&mut self, cx: &mut Context) -> Option { + self.end_transaction_at(Instant::now(), cx) + } + + pub fn end_transaction_at( + &mut self, + now: Instant, + cx: &mut Context, + ) -> Option { + if let Some(buffer) = self.as_singleton() { + return buffer.update(cx, |buffer, cx| buffer.end_transaction_at(now, cx)); + } + + let mut buffer_transactions = HashMap::default(); + for BufferState { buffer, .. } in self.buffers.values() { + if let Some(transaction_id) = + buffer.update(cx, |buffer, cx| buffer.end_transaction_at(now, cx)) + { + buffer_transactions.insert(buffer.read(cx).remote_id(), transaction_id); + } + } + + if self.history.end_transaction(now, buffer_transactions) { + let transaction_id = self.history.group().unwrap(); + Some(transaction_id) + } else { + None + } + } + + pub fn edited_ranges_for_transaction( + &self, + transaction_id: TransactionId, + cx: &App, + ) -> Vec> + where + D: TextDimension + Ord + Sub, + { + let Some(transaction) = self.history.transaction(transaction_id) else { + return Vec::new(); + }; + + let mut ranges = Vec::new(); + let snapshot = self.read(cx); + let mut cursor = snapshot.excerpts.cursor::(()); + + for (buffer_id, buffer_transaction) in &transaction.buffer_transactions { + let Some(buffer_state) = self.buffers.get(buffer_id) else { + continue; + }; + + let buffer = buffer_state.buffer.read(cx); + for range in buffer.edited_ranges_for_transaction_id::(*buffer_transaction) { + for excerpt_id in &buffer_state.excerpts { + cursor.seek(excerpt_id, Bias::Left); + if let Some(excerpt) = cursor.item() + && excerpt.locator == *excerpt_id + { + let excerpt_buffer_start = excerpt.range.context.start.summary::(buffer); + let excerpt_buffer_end = excerpt.range.context.end.summary::(buffer); + let excerpt_range = excerpt_buffer_start..excerpt_buffer_end; + if excerpt_range.contains(&range.start) + && excerpt_range.contains(&range.end) + { + let excerpt_start = D::from_text_summary(&cursor.start().text); + + let mut start = excerpt_start; + start.add_assign(&(range.start - excerpt_buffer_start)); + let mut end = excerpt_start; + end.add_assign(&(range.end - excerpt_buffer_start)); + + ranges.push(start..end); + break; + } + } + } + } + } + + ranges.sort_by_key(|range| range.start); + ranges + } + + pub fn merge_transactions( + &mut self, + transaction: TransactionId, + destination: TransactionId, + cx: &mut Context, + ) { + if let Some(buffer) = self.as_singleton() { + buffer.update(cx, |buffer, _| { + buffer.merge_transactions(transaction, destination) + }); + } else if let Some(transaction) = self.history.forget(transaction) + && let Some(destination) = self.history.transaction_mut(destination) + { + for (buffer_id, buffer_transaction_id) in transaction.buffer_transactions { + if let Some(destination_buffer_transaction_id) = + destination.buffer_transactions.get(&buffer_id) + { + if let Some(state) = self.buffers.get(&buffer_id) { + state.buffer.update(cx, |buffer, _| { + buffer.merge_transactions( + buffer_transaction_id, + *destination_buffer_transaction_id, + ) + }); + } + } else { + destination + .buffer_transactions + .insert(buffer_id, buffer_transaction_id); + } + } + } + } + + pub fn finalize_last_transaction(&mut self, cx: &mut Context) { + self.history.finalize_last_transaction(); + for BufferState { buffer, .. } in self.buffers.values() { + buffer.update(cx, |buffer, _| { + buffer.finalize_last_transaction(); + }); + } + } + + pub fn push_transaction<'a, T>(&mut self, buffer_transactions: T, cx: &Context) + where + T: IntoIterator, &'a language::Transaction)>, + { + self.history + .push_transaction(buffer_transactions, Instant::now(), cx); + self.history.finalize_last_transaction(); + } + + pub fn group_until_transaction( + &mut self, + transaction_id: TransactionId, + cx: &mut Context, + ) { + if let Some(buffer) = self.as_singleton() { + buffer.update(cx, |buffer, _| { + buffer.group_until_transaction(transaction_id) + }); + } else { + self.history.group_until(transaction_id); + } + } + pub fn undo(&mut self, cx: &mut Context) -> Option { + let mut transaction_id = None; + if let Some(buffer) = self.as_singleton() { + transaction_id = buffer.update(cx, |buffer, cx| buffer.undo(cx)); + } else { + while let Some(transaction) = self.history.pop_undo() { + let mut undone = false; + for (buffer_id, buffer_transaction_id) in &mut transaction.buffer_transactions { + if let Some(BufferState { buffer, .. }) = self.buffers.get(buffer_id) { + undone |= buffer.update(cx, |buffer, cx| { + let undo_to = *buffer_transaction_id; + if let Some(entry) = buffer.peek_undo_stack() { + *buffer_transaction_id = entry.transaction_id(); + } + buffer.undo_to_transaction(undo_to, cx) + }); + } + } + + if undone { + transaction_id = Some(transaction.id); + break; + } + } + } + + if let Some(transaction_id) = transaction_id { + cx.emit(Event::TransactionUndone { transaction_id }); + } + + transaction_id + } + + pub fn redo(&mut self, cx: &mut Context) -> Option { + if let Some(buffer) = self.as_singleton() { + return buffer.update(cx, |buffer, cx| buffer.redo(cx)); + } + + while let Some(transaction) = self.history.pop_redo() { + let mut redone = false; + for (buffer_id, buffer_transaction_id) in transaction.buffer_transactions.iter_mut() { + if let Some(BufferState { buffer, .. }) = self.buffers.get(buffer_id) { + redone |= buffer.update(cx, |buffer, cx| { + let redo_to = *buffer_transaction_id; + if let Some(entry) = buffer.peek_redo_stack() { + *buffer_transaction_id = entry.transaction_id(); + } + buffer.redo_to_transaction(redo_to, cx) + }); + } + } + + if redone { + return Some(transaction.id); + } + } + + None + } + + pub fn undo_transaction(&mut self, transaction_id: TransactionId, cx: &mut Context) { + if let Some(buffer) = self.as_singleton() { + buffer.update(cx, |buffer, cx| buffer.undo_transaction(transaction_id, cx)); + } else if let Some(transaction) = self.history.remove_from_undo(transaction_id) { + for (buffer_id, transaction_id) in &transaction.buffer_transactions { + if let Some(BufferState { buffer, .. }) = self.buffers.get(buffer_id) { + buffer.update(cx, |buffer, cx| { + buffer.undo_transaction(*transaction_id, cx) + }); + } + } + } + } + + pub fn forget_transaction(&mut self, transaction_id: TransactionId, cx: &mut Context) { + if let Some(buffer) = self.as_singleton() { + buffer.update(cx, |buffer, _| { + buffer.forget_transaction(transaction_id); + }); + } else if let Some(transaction) = self.history.forget(transaction_id) { + for (buffer_id, buffer_transaction_id) in transaction.buffer_transactions { + if let Some(state) = self.buffers.get_mut(&buffer_id) { + state.buffer.update(cx, |buffer, _| { + buffer.forget_transaction(buffer_transaction_id); + }); + } + } + } + } +} diff --git a/crates/rope/src/chunk.rs b/crates/rope/src/chunk.rs index d0be336c9faf2c5834182387307a7775ba00db38..2fa6112dd439a5835891db813dc9ce12cb22809d 100644 --- a/crates/rope/src/chunk.rs +++ b/crates/rope/src/chunk.rs @@ -448,6 +448,19 @@ impl<'a> ChunkSlice<'a> { } } + #[inline(always)] + pub fn point_to_offset_utf16(&self, point: Point) -> OffsetUtf16 { + if point.row > self.lines().row { + debug_panic!( + "point {:?} extends beyond rows for string {:?}", + point, + self.text + ); + return self.len_utf16(); + } + self.offset_to_offset_utf16(self.point_to_offset(point)) + } + #[inline(always)] pub fn offset_to_offset_utf16(&self, offset: usize) -> OffsetUtf16 { let mask = (1 as Bitmap).unbounded_shl(offset as u32).wrapping_sub(1); diff --git a/crates/rope/src/rope.rs b/crates/rope/src/rope.rs index 5a43e22ea5ef43c5b31aeb63d52dcecdea72f5fe..0195f61dcb30bdc85ae3dbe541fa5fba5f76a2c9 100644 --- a/crates/rope/src/rope.rs +++ b/crates/rope/src/rope.rs @@ -440,6 +440,21 @@ impl Rope { }) } + pub fn point_utf16_to_point(&self, point: PointUtf16) -> Point { + if point >= self.summary().lines_utf16() { + return self.summary().lines; + } + let mut cursor = self.chunks.cursor::>(()); + cursor.seek(&point, Bias::Left); + let overshoot = point - cursor.start().0; + cursor.start().1 + + cursor.item().map_or(Point::zero(), |chunk| { + chunk + .as_slice() + .offset_to_point(chunk.as_slice().point_utf16_to_offset(overshoot, false)) + }) + } + pub fn point_to_offset(&self, point: Point) -> usize { if point >= self.summary().lines { return self.summary().len; @@ -451,10 +466,27 @@ impl Rope { start.1 + item.map_or(0, |chunk| chunk.as_slice().point_to_offset(overshoot)) } + pub fn point_to_offset_utf16(&self, point: Point) -> OffsetUtf16 { + if point >= self.summary().lines { + return self.summary().len_utf16; + } + let mut cursor = self.chunks.cursor::>(()); + cursor.seek(&point, Bias::Left); + let overshoot = point - cursor.start().0; + cursor.start().1 + + cursor.item().map_or(OffsetUtf16(0), |chunk| { + chunk.as_slice().point_to_offset_utf16(overshoot) + }) + } + pub fn point_utf16_to_offset(&self, point: PointUtf16) -> usize { self.point_utf16_to_offset_impl(point, false) } + pub fn point_utf16_to_offset_utf16(&self, point: PointUtf16) -> OffsetUtf16 { + self.point_utf16_to_offset_utf16_impl(point, false) + } + pub fn unclipped_point_utf16_to_offset(&self, point: Unclipped) -> usize { self.point_utf16_to_offset_impl(point.0, true) } @@ -473,6 +505,23 @@ impl Rope { }) } + fn point_utf16_to_offset_utf16_impl(&self, point: PointUtf16, clip: bool) -> OffsetUtf16 { + if point >= self.summary().lines_utf16() { + return self.summary().len_utf16; + } + let mut cursor = self + .chunks + .cursor::>(()); + cursor.seek(&point, Bias::Left); + let overshoot = point - cursor.start().0; + cursor.start().1 + + cursor.item().map_or(OffsetUtf16(0), |chunk| { + chunk + .as_slice() + .offset_to_offset_utf16(chunk.as_slice().point_utf16_to_offset(overshoot, clip)) + }) + } + pub fn unclipped_point_utf16_to_point(&self, point: Unclipped) -> Point { if point.0 >= self.summary().lines_utf16() { return self.summary().lines; diff --git a/crates/text/src/text.rs b/crates/text/src/text.rs index d30a3dca0d5a3a5809440b816b9491f7f1d940c8..0634212c8ca41de539c9791193321cce77c9263e 100644 --- a/crates/text/src/text.rs +++ b/crates/text/src/text.rs @@ -2051,6 +2051,14 @@ impl BufferSnapshot { self.visible_text.point_to_offset(point) } + pub fn point_to_offset_utf16(&self, point: Point) -> OffsetUtf16 { + self.visible_text.point_to_offset_utf16(point) + } + + pub fn point_utf16_to_offset_utf16(&self, point: PointUtf16) -> OffsetUtf16 { + self.visible_text.point_utf16_to_offset_utf16(point) + } + pub fn point_utf16_to_offset(&self, point: PointUtf16) -> usize { self.visible_text.point_utf16_to_offset(point) } @@ -2083,6 +2091,10 @@ impl BufferSnapshot { self.visible_text.point_to_point_utf16(point) } + pub fn point_utf16_to_point(&self, point: PointUtf16) -> Point { + self.visible_text.point_utf16_to_point(point) + } + pub fn version(&self) -> &clock::Global { &self.version }