1mod anchor;
2pub mod locator;
3#[cfg(any(test, feature = "test-support"))]
4pub mod network;
5pub mod operation_queue;
6mod patch;
7mod selection;
8pub mod subscription;
9#[cfg(test)]
10mod tests;
11mod undo_map;
12
13pub use anchor::*;
14use anyhow::{Context as _, Result};
15use clock::Lamport;
16pub use clock::ReplicaId;
17use collections::{HashMap, HashSet};
18use gpui::BackgroundExecutor;
19use locator::Locator;
20use operation_queue::OperationQueue;
21pub use patch::Patch;
22use postage::{oneshot, prelude::*};
23
24use regex::Regex;
25pub use rope::*;
26pub use selection::*;
27use std::{
28 borrow::Cow,
29 cmp::{self, Ordering, Reverse},
30 fmt::Display,
31 future::Future,
32 iter::Iterator,
33 num::NonZeroU64,
34 ops::{self, Deref, Range, Sub},
35 str,
36 sync::{Arc, LazyLock},
37 time::{Duration, Instant},
38};
39pub use subscription::*;
40pub use sum_tree::Bias;
41use sum_tree::{Dimensions, FilterCursor, SumTree, TreeMap, TreeSet};
42use undo_map::UndoMap;
43
44#[cfg(any(test, feature = "test-support"))]
45use util::RandomCharIter;
46
47static LINE_SEPARATORS_REGEX: LazyLock<Regex> =
48 LazyLock::new(|| Regex::new(r"\r\n|\r").expect("Failed to create LINE_SEPARATORS_REGEX"));
49
50pub type TransactionId = clock::Lamport;
51
52pub struct Buffer {
53 snapshot: BufferSnapshot,
54 history: History,
55 deferred_ops: OperationQueue<Operation>,
56 deferred_replicas: HashSet<ReplicaId>,
57 pub lamport_clock: clock::Lamport,
58 subscriptions: Topic,
59 edit_id_resolvers: HashMap<clock::Lamport, Vec<oneshot::Sender<()>>>,
60 wait_for_version_txs: Vec<(clock::Global, oneshot::Sender<()>)>,
61}
62
63#[repr(transparent)]
64#[derive(Clone, Copy, Debug, Hash, PartialEq, PartialOrd, Ord, Eq)]
65pub struct BufferId(NonZeroU64);
66
67impl Display for BufferId {
68 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
69 write!(f, "{}", self.0)
70 }
71}
72
73impl From<NonZeroU64> for BufferId {
74 fn from(id: NonZeroU64) -> Self {
75 BufferId(id)
76 }
77}
78
79impl BufferId {
80 /// Returns Err if `id` is outside of BufferId domain.
81 pub fn new(id: u64) -> anyhow::Result<Self> {
82 let id = NonZeroU64::new(id).context("Buffer id cannot be 0.")?;
83 Ok(Self(id))
84 }
85
86 /// Increments this buffer id, returning the old value.
87 /// So that's a post-increment operator in disguise.
88 pub fn next(&mut self) -> Self {
89 let old = *self;
90 self.0 = self.0.saturating_add(1);
91 old
92 }
93
94 pub fn to_proto(self) -> u64 {
95 self.into()
96 }
97}
98
99impl From<BufferId> for u64 {
100 fn from(id: BufferId) -> Self {
101 id.0.get()
102 }
103}
104
105#[derive(Clone)]
106pub struct BufferSnapshot {
107 replica_id: ReplicaId,
108 remote_id: BufferId,
109 visible_text: Rope,
110 deleted_text: Rope,
111 line_ending: LineEnding,
112 undo_map: UndoMap,
113 fragments: SumTree<Fragment>,
114 insertions: SumTree<InsertionFragment>,
115 insertion_slices: TreeSet<InsertionSlice>,
116 pub version: clock::Global,
117}
118
119#[derive(Clone, Debug)]
120pub struct HistoryEntry {
121 transaction: Transaction,
122 first_edit_at: Instant,
123 last_edit_at: Instant,
124 suppress_grouping: bool,
125}
126
127#[derive(Clone, Debug)]
128pub struct Transaction {
129 pub id: TransactionId,
130 pub edit_ids: Vec<clock::Lamport>,
131 pub start: clock::Global,
132}
133
134impl Transaction {
135 pub fn merge_in(&mut self, other: Transaction) {
136 self.edit_ids.extend(other.edit_ids);
137 }
138}
139
140impl HistoryEntry {
141 pub fn transaction_id(&self) -> TransactionId {
142 self.transaction.id
143 }
144}
145
146struct History {
147 base_text: Rope,
148 operations: TreeMap<clock::Lamport, Operation>,
149 undo_stack: Vec<HistoryEntry>,
150 redo_stack: Vec<HistoryEntry>,
151 transaction_depth: usize,
152 group_interval: Duration,
153}
154
155#[derive(Clone, Debug, Eq, PartialEq)]
156struct InsertionSlice {
157 edit_id: clock::Lamport,
158 insertion_id: clock::Lamport,
159 range: Range<usize>,
160}
161
162impl Ord for InsertionSlice {
163 fn cmp(&self, other: &Self) -> Ordering {
164 self.edit_id
165 .cmp(&other.edit_id)
166 .then_with(|| self.insertion_id.cmp(&other.insertion_id))
167 .then_with(|| self.range.start.cmp(&other.range.start))
168 .then_with(|| self.range.end.cmp(&other.range.end))
169 }
170}
171
172impl PartialOrd for InsertionSlice {
173 fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
174 Some(self.cmp(other))
175 }
176}
177
178impl InsertionSlice {
179 fn from_fragment(edit_id: clock::Lamport, fragment: &Fragment) -> Self {
180 Self {
181 edit_id,
182 insertion_id: fragment.timestamp,
183 range: fragment.insertion_offset..fragment.insertion_offset + fragment.len,
184 }
185 }
186}
187
188impl History {
189 pub fn new(base_text: Rope) -> Self {
190 Self {
191 base_text,
192 operations: Default::default(),
193 undo_stack: Vec::new(),
194 redo_stack: Vec::new(),
195 transaction_depth: 0,
196 // Don't group transactions in tests unless we opt in, because it's a footgun.
197 #[cfg(any(test, feature = "test-support"))]
198 group_interval: Duration::ZERO,
199 #[cfg(not(any(test, feature = "test-support")))]
200 group_interval: Duration::from_millis(300),
201 }
202 }
203
204 fn push(&mut self, op: Operation) {
205 self.operations.insert(op.timestamp(), op);
206 }
207
208 fn start_transaction(
209 &mut self,
210 start: clock::Global,
211 now: Instant,
212 clock: &mut clock::Lamport,
213 ) -> Option<TransactionId> {
214 self.transaction_depth += 1;
215 if self.transaction_depth == 1 {
216 let id = clock.tick();
217 self.undo_stack.push(HistoryEntry {
218 transaction: Transaction {
219 id,
220 start,
221 edit_ids: Default::default(),
222 },
223 first_edit_at: now,
224 last_edit_at: now,
225 suppress_grouping: false,
226 });
227 Some(id)
228 } else {
229 None
230 }
231 }
232
233 fn end_transaction(&mut self, now: Instant) -> Option<&HistoryEntry> {
234 assert_ne!(self.transaction_depth, 0);
235 self.transaction_depth -= 1;
236 if self.transaction_depth == 0 {
237 if self
238 .undo_stack
239 .last()
240 .unwrap()
241 .transaction
242 .edit_ids
243 .is_empty()
244 {
245 self.undo_stack.pop();
246 None
247 } else {
248 self.redo_stack.clear();
249 let entry = self.undo_stack.last_mut().unwrap();
250 entry.last_edit_at = now;
251 Some(entry)
252 }
253 } else {
254 None
255 }
256 }
257
258 fn group(&mut self) -> Option<TransactionId> {
259 let mut count = 0;
260 let mut entries = self.undo_stack.iter();
261 if let Some(mut entry) = entries.next_back() {
262 while let Some(prev_entry) = entries.next_back() {
263 if !prev_entry.suppress_grouping
264 && entry.first_edit_at - prev_entry.last_edit_at < self.group_interval
265 {
266 entry = prev_entry;
267 count += 1;
268 } else {
269 break;
270 }
271 }
272 }
273 self.group_trailing(count)
274 }
275
276 fn group_until(&mut self, transaction_id: TransactionId) {
277 let mut count = 0;
278 for entry in self.undo_stack.iter().rev() {
279 if entry.transaction_id() == transaction_id {
280 self.group_trailing(count);
281 break;
282 } else if entry.suppress_grouping {
283 break;
284 } else {
285 count += 1;
286 }
287 }
288 }
289
290 fn group_trailing(&mut self, n: usize) -> Option<TransactionId> {
291 let new_len = self.undo_stack.len() - n;
292 let (entries_to_keep, entries_to_merge) = self.undo_stack.split_at_mut(new_len);
293 if let Some(last_entry) = entries_to_keep.last_mut() {
294 for entry in &*entries_to_merge {
295 for edit_id in &entry.transaction.edit_ids {
296 last_entry.transaction.edit_ids.push(*edit_id);
297 }
298 }
299
300 if let Some(entry) = entries_to_merge.last_mut() {
301 last_entry.last_edit_at = entry.last_edit_at;
302 }
303 }
304
305 self.undo_stack.truncate(new_len);
306 self.undo_stack.last().map(|e| e.transaction.id)
307 }
308
309 fn finalize_last_transaction(&mut self) -> Option<&Transaction> {
310 self.undo_stack.last_mut().map(|entry| {
311 entry.suppress_grouping = true;
312 &entry.transaction
313 })
314 }
315
316 fn push_transaction(&mut self, transaction: Transaction, now: Instant) {
317 assert_eq!(self.transaction_depth, 0);
318 self.undo_stack.push(HistoryEntry {
319 transaction,
320 first_edit_at: now,
321 last_edit_at: now,
322 suppress_grouping: false,
323 });
324 }
325
326 /// Differs from `push_transaction` in that it does not clear the redo
327 /// stack. Intended to be used to create a parent transaction to merge
328 /// potential child transactions into.
329 ///
330 /// The caller is responsible for removing it from the undo history using
331 /// `forget_transaction` if no edits are merged into it. Otherwise, if edits
332 /// are merged into this transaction, the caller is responsible for ensuring
333 /// the redo stack is cleared. The easiest way to ensure the redo stack is
334 /// cleared is to create transactions with the usual `start_transaction` and
335 /// `end_transaction` methods and merging the resulting transactions into
336 /// the transaction created by this method
337 fn push_empty_transaction(
338 &mut self,
339 start: clock::Global,
340 now: Instant,
341 clock: &mut clock::Lamport,
342 ) -> TransactionId {
343 assert_eq!(self.transaction_depth, 0);
344 let id = clock.tick();
345 let transaction = Transaction {
346 id,
347 start,
348 edit_ids: Vec::new(),
349 };
350 self.undo_stack.push(HistoryEntry {
351 transaction,
352 first_edit_at: now,
353 last_edit_at: now,
354 suppress_grouping: false,
355 });
356 id
357 }
358
359 fn push_undo(&mut self, op_id: clock::Lamport) {
360 assert_ne!(self.transaction_depth, 0);
361 if let Some(Operation::Edit(_)) = self.operations.get(&op_id) {
362 let last_transaction = self.undo_stack.last_mut().unwrap();
363 last_transaction.transaction.edit_ids.push(op_id);
364 }
365 }
366
367 fn pop_undo(&mut self) -> Option<&HistoryEntry> {
368 assert_eq!(self.transaction_depth, 0);
369 if let Some(entry) = self.undo_stack.pop() {
370 self.redo_stack.push(entry);
371 self.redo_stack.last()
372 } else {
373 None
374 }
375 }
376
377 fn remove_from_undo(&mut self, transaction_id: TransactionId) -> Option<&HistoryEntry> {
378 assert_eq!(self.transaction_depth, 0);
379
380 let entry_ix = self
381 .undo_stack
382 .iter()
383 .rposition(|entry| entry.transaction.id == transaction_id)?;
384 let entry = self.undo_stack.remove(entry_ix);
385 self.redo_stack.push(entry);
386 self.redo_stack.last()
387 }
388
389 fn remove_from_undo_until(&mut self, transaction_id: TransactionId) -> &[HistoryEntry] {
390 assert_eq!(self.transaction_depth, 0);
391
392 let redo_stack_start_len = self.redo_stack.len();
393 if let Some(entry_ix) = self
394 .undo_stack
395 .iter()
396 .rposition(|entry| entry.transaction.id == transaction_id)
397 {
398 self.redo_stack
399 .extend(self.undo_stack.drain(entry_ix..).rev());
400 }
401 &self.redo_stack[redo_stack_start_len..]
402 }
403
404 fn forget(&mut self, transaction_id: TransactionId) -> Option<Transaction> {
405 assert_eq!(self.transaction_depth, 0);
406 if let Some(entry_ix) = self
407 .undo_stack
408 .iter()
409 .rposition(|entry| entry.transaction.id == transaction_id)
410 {
411 Some(self.undo_stack.remove(entry_ix).transaction)
412 } else if let Some(entry_ix) = self
413 .redo_stack
414 .iter()
415 .rposition(|entry| entry.transaction.id == transaction_id)
416 {
417 Some(self.redo_stack.remove(entry_ix).transaction)
418 } else {
419 None
420 }
421 }
422
423 fn transaction(&self, transaction_id: TransactionId) -> Option<&Transaction> {
424 let entry = self
425 .undo_stack
426 .iter()
427 .rfind(|entry| entry.transaction.id == transaction_id)
428 .or_else(|| {
429 self.redo_stack
430 .iter()
431 .rfind(|entry| entry.transaction.id == transaction_id)
432 })?;
433 Some(&entry.transaction)
434 }
435
436 fn transaction_mut(&mut self, transaction_id: TransactionId) -> Option<&mut Transaction> {
437 let entry = self
438 .undo_stack
439 .iter_mut()
440 .rfind(|entry| entry.transaction.id == transaction_id)
441 .or_else(|| {
442 self.redo_stack
443 .iter_mut()
444 .rfind(|entry| entry.transaction.id == transaction_id)
445 })?;
446 Some(&mut entry.transaction)
447 }
448
449 fn merge_transactions(&mut self, transaction: TransactionId, destination: TransactionId) {
450 if let Some(transaction) = self.forget(transaction)
451 && let Some(destination) = self.transaction_mut(destination)
452 {
453 destination.edit_ids.extend(transaction.edit_ids);
454 }
455 }
456
457 fn pop_redo(&mut self) -> Option<&HistoryEntry> {
458 assert_eq!(self.transaction_depth, 0);
459 if let Some(entry) = self.redo_stack.pop() {
460 self.undo_stack.push(entry);
461 self.undo_stack.last()
462 } else {
463 None
464 }
465 }
466
467 fn remove_from_redo(&mut self, transaction_id: TransactionId) -> &[HistoryEntry] {
468 assert_eq!(self.transaction_depth, 0);
469
470 let undo_stack_start_len = self.undo_stack.len();
471 if let Some(entry_ix) = self
472 .redo_stack
473 .iter()
474 .rposition(|entry| entry.transaction.id == transaction_id)
475 {
476 self.undo_stack
477 .extend(self.redo_stack.drain(entry_ix..).rev());
478 }
479 &self.undo_stack[undo_stack_start_len..]
480 }
481}
482
483struct Edits<'a, D: TextDimension, F: FnMut(&FragmentSummary) -> bool> {
484 visible_cursor: rope::Cursor<'a>,
485 deleted_cursor: rope::Cursor<'a>,
486 fragments_cursor: Option<FilterCursor<'a, 'static, F, Fragment, FragmentTextSummary>>,
487 undos: &'a UndoMap,
488 since: &'a clock::Global,
489 old_end: D,
490 new_end: D,
491 range: Range<(&'a Locator, usize)>,
492 buffer_id: BufferId,
493}
494
495#[derive(Clone, Debug, Default, Eq, PartialEq)]
496pub struct Edit<D> {
497 pub old: Range<D>,
498 pub new: Range<D>,
499}
500
501impl<D> Edit<D>
502where
503 D: Sub<D, Output = D> + PartialEq + Copy,
504{
505 pub fn old_len(&self) -> D {
506 self.old.end - self.old.start
507 }
508
509 pub fn new_len(&self) -> D {
510 self.new.end - self.new.start
511 }
512
513 pub fn is_empty(&self) -> bool {
514 self.old.start == self.old.end && self.new.start == self.new.end
515 }
516}
517
518impl<D1, D2> Edit<(D1, D2)> {
519 pub fn flatten(self) -> (Edit<D1>, Edit<D2>) {
520 (
521 Edit {
522 old: self.old.start.0..self.old.end.0,
523 new: self.new.start.0..self.new.end.0,
524 },
525 Edit {
526 old: self.old.start.1..self.old.end.1,
527 new: self.new.start.1..self.new.end.1,
528 },
529 )
530 }
531}
532
533#[derive(Eq, PartialEq, Clone, Debug)]
534pub struct Fragment {
535 pub id: Locator,
536 pub timestamp: clock::Lamport,
537 pub insertion_offset: usize,
538 pub len: usize,
539 pub visible: bool,
540 pub deletions: HashSet<clock::Lamport>,
541 pub max_undos: clock::Global,
542}
543
544#[derive(Eq, PartialEq, Clone, Debug)]
545pub struct FragmentSummary {
546 text: FragmentTextSummary,
547 max_id: Locator,
548 max_version: clock::Global,
549 min_insertion_version: clock::Global,
550 max_insertion_version: clock::Global,
551}
552
553#[derive(Copy, Default, Clone, Debug, PartialEq, Eq)]
554struct FragmentTextSummary {
555 visible: usize,
556 deleted: usize,
557}
558
559impl<'a> sum_tree::Dimension<'a, FragmentSummary> for FragmentTextSummary {
560 fn zero(_: &Option<clock::Global>) -> Self {
561 Default::default()
562 }
563
564 fn add_summary(&mut self, summary: &'a FragmentSummary, _: &Option<clock::Global>) {
565 self.visible += summary.text.visible;
566 self.deleted += summary.text.deleted;
567 }
568}
569
570#[derive(Eq, PartialEq, Clone, Debug)]
571struct InsertionFragment {
572 timestamp: clock::Lamport,
573 split_offset: usize,
574 fragment_id: Locator,
575}
576
577#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord)]
578struct InsertionFragmentKey {
579 timestamp: clock::Lamport,
580 split_offset: usize,
581}
582
583#[derive(Clone, Debug, Eq, PartialEq)]
584pub enum Operation {
585 Edit(EditOperation),
586 Undo(UndoOperation),
587}
588
589#[derive(Clone, Debug, Eq, PartialEq)]
590pub struct EditOperation {
591 pub timestamp: clock::Lamport,
592 pub version: clock::Global,
593 pub ranges: Vec<Range<FullOffset>>,
594 pub new_text: Vec<Arc<str>>,
595}
596
597#[derive(Clone, Debug, Eq, PartialEq)]
598pub struct UndoOperation {
599 pub timestamp: clock::Lamport,
600 pub version: clock::Global,
601 pub counts: HashMap<clock::Lamport, u32>,
602}
603
604/// Stores information about the indentation of a line (tabs and spaces).
605#[derive(Clone, Copy, Debug, Eq, PartialEq)]
606pub struct LineIndent {
607 pub tabs: u32,
608 pub spaces: u32,
609 pub line_blank: bool,
610}
611
612impl LineIndent {
613 pub fn from_chunks(chunks: &mut Chunks) -> Self {
614 let mut tabs = 0;
615 let mut spaces = 0;
616 let mut line_blank = true;
617
618 'outer: while let Some(chunk) = chunks.peek() {
619 for ch in chunk.chars() {
620 if ch == '\t' {
621 tabs += 1;
622 } else if ch == ' ' {
623 spaces += 1;
624 } else {
625 if ch != '\n' {
626 line_blank = false;
627 }
628 break 'outer;
629 }
630 }
631
632 chunks.next();
633 }
634
635 Self {
636 tabs,
637 spaces,
638 line_blank,
639 }
640 }
641
642 /// Constructs a new `LineIndent` which only contains spaces.
643 pub fn spaces(spaces: u32) -> Self {
644 Self {
645 tabs: 0,
646 spaces,
647 line_blank: true,
648 }
649 }
650
651 /// Constructs a new `LineIndent` which only contains tabs.
652 pub fn tabs(tabs: u32) -> Self {
653 Self {
654 tabs,
655 spaces: 0,
656 line_blank: true,
657 }
658 }
659
660 /// Indicates whether the line is empty.
661 pub fn is_line_empty(&self) -> bool {
662 self.tabs == 0 && self.spaces == 0 && self.line_blank
663 }
664
665 /// Indicates whether the line is blank (contains only whitespace).
666 pub fn is_line_blank(&self) -> bool {
667 self.line_blank
668 }
669
670 /// Returns the number of indentation characters (tabs or spaces).
671 pub fn raw_len(&self) -> u32 {
672 self.tabs + self.spaces
673 }
674
675 /// Returns the number of indentation characters (tabs or spaces), taking tab size into account.
676 pub fn len(&self, tab_size: u32) -> u32 {
677 self.tabs * tab_size + self.spaces
678 }
679}
680
681impl From<&str> for LineIndent {
682 fn from(value: &str) -> Self {
683 Self::from_iter(value.chars())
684 }
685}
686
687impl FromIterator<char> for LineIndent {
688 fn from_iter<T: IntoIterator<Item = char>>(chars: T) -> Self {
689 let mut tabs = 0;
690 let mut spaces = 0;
691 let mut line_blank = true;
692 for c in chars {
693 if c == '\t' {
694 tabs += 1;
695 } else if c == ' ' {
696 spaces += 1;
697 } else {
698 if c != '\n' {
699 line_blank = false;
700 }
701 break;
702 }
703 }
704 Self {
705 tabs,
706 spaces,
707 line_blank,
708 }
709 }
710}
711
712impl Buffer {
713 /// Create a new buffer from a string.
714 pub fn new(
715 replica_id: ReplicaId,
716 remote_id: BufferId,
717 base_text: impl Into<String>,
718 executor: &BackgroundExecutor,
719 ) -> Buffer {
720 let mut base_text = base_text.into();
721 let line_ending = LineEnding::detect(&base_text);
722 LineEnding::normalize(&mut base_text);
723 Self::new_normalized(
724 replica_id,
725 remote_id,
726 line_ending,
727 Rope::from_str(&base_text, executor),
728 )
729 }
730
731 /// Create a new buffer from a string.
732 ///
733 /// Unlike [`Buffer::new`], this does not construct the backing rope in parallel if it is large enough.
734 pub fn new_slow(
735 replica_id: ReplicaId,
736 remote_id: BufferId,
737 base_text: impl Into<String>,
738 ) -> Buffer {
739 let mut base_text = base_text.into();
740 let line_ending = LineEnding::detect(&base_text);
741 LineEnding::normalize(&mut base_text);
742 Self::new_normalized(
743 replica_id,
744 remote_id,
745 line_ending,
746 Rope::from_str_small(&base_text),
747 )
748 }
749
750 pub fn new_normalized(
751 replica_id: ReplicaId,
752 remote_id: BufferId,
753 line_ending: LineEnding,
754 normalized: Rope,
755 ) -> Buffer {
756 let history = History::new(normalized);
757 let mut fragments = SumTree::new(&None);
758 let mut insertions = SumTree::default();
759
760 let mut lamport_clock = clock::Lamport::new(replica_id);
761 let mut version = clock::Global::new();
762
763 let visible_text = history.base_text.clone();
764 if !visible_text.is_empty() {
765 let insertion_timestamp = clock::Lamport::new(ReplicaId::LOCAL);
766 lamport_clock.observe(insertion_timestamp);
767 version.observe(insertion_timestamp);
768 let fragment_id = Locator::between(&Locator::min(), &Locator::max());
769 let fragment = Fragment {
770 id: fragment_id,
771 timestamp: insertion_timestamp,
772 insertion_offset: 0,
773 len: visible_text.len(),
774 visible: true,
775 deletions: Default::default(),
776 max_undos: Default::default(),
777 };
778 insertions.push(InsertionFragment::new(&fragment), ());
779 fragments.push(fragment, &None);
780 }
781
782 Buffer {
783 snapshot: BufferSnapshot {
784 replica_id,
785 remote_id,
786 visible_text,
787 deleted_text: Rope::new(),
788 line_ending,
789 fragments,
790 insertions,
791 version,
792 undo_map: Default::default(),
793 insertion_slices: Default::default(),
794 },
795 history,
796 deferred_ops: OperationQueue::new(),
797 deferred_replicas: HashSet::default(),
798 lamport_clock,
799 subscriptions: Default::default(),
800 edit_id_resolvers: Default::default(),
801 wait_for_version_txs: Default::default(),
802 }
803 }
804
805 pub fn version(&self) -> clock::Global {
806 self.version.clone()
807 }
808
809 pub fn snapshot(&self) -> BufferSnapshot {
810 self.snapshot.clone()
811 }
812
813 pub fn branch(&self) -> Self {
814 Self {
815 snapshot: self.snapshot.clone(),
816 history: History::new(self.base_text().clone()),
817 deferred_ops: OperationQueue::new(),
818 deferred_replicas: HashSet::default(),
819 lamport_clock: clock::Lamport::new(ReplicaId::LOCAL_BRANCH),
820 subscriptions: Default::default(),
821 edit_id_resolvers: Default::default(),
822 wait_for_version_txs: Default::default(),
823 }
824 }
825
826 pub fn replica_id(&self) -> ReplicaId {
827 self.lamport_clock.replica_id
828 }
829
830 pub fn remote_id(&self) -> BufferId {
831 self.remote_id
832 }
833
834 pub fn deferred_ops_len(&self) -> usize {
835 self.deferred_ops.len()
836 }
837
838 pub fn transaction_group_interval(&self) -> Duration {
839 self.history.group_interval
840 }
841
842 pub fn edit<R, I, S, T>(&mut self, edits: R, cx: &BackgroundExecutor) -> Operation
843 where
844 R: IntoIterator<IntoIter = I>,
845 I: ExactSizeIterator<Item = (Range<S>, T)>,
846 S: ToOffset,
847 T: Into<Arc<str>>,
848 {
849 let edits = edits
850 .into_iter()
851 .map(|(range, new_text)| (range, new_text.into()));
852
853 self.start_transaction();
854 let timestamp = self.lamport_clock.tick();
855 let operation = Operation::Edit(self.apply_local_edit(edits, timestamp, cx));
856
857 self.history.push(operation.clone());
858 self.history.push_undo(operation.timestamp());
859 self.snapshot.version.observe(operation.timestamp());
860 self.end_transaction();
861 operation
862 }
863
864 fn apply_local_edit<S: ToOffset, T: Into<Arc<str>>>(
865 &mut self,
866 edits: impl ExactSizeIterator<Item = (Range<S>, T)>,
867 timestamp: clock::Lamport,
868 executor: &BackgroundExecutor,
869 ) -> EditOperation {
870 let mut edits_patch = Patch::default();
871 let mut edit_op = EditOperation {
872 timestamp,
873 version: self.version(),
874 ranges: Vec::with_capacity(edits.len()),
875 new_text: Vec::with_capacity(edits.len()),
876 };
877 let mut new_insertions = Vec::new();
878 let mut insertion_offset = 0;
879 let mut insertion_slices = Vec::new();
880
881 let mut edits = edits
882 .map(|(range, new_text)| (range.to_offset(&*self), new_text))
883 .peekable();
884
885 let mut new_ropes =
886 RopeBuilder::new(self.visible_text.cursor(0), self.deleted_text.cursor(0));
887 let mut old_fragments = self.fragments.cursor::<FragmentTextSummary>(&None);
888 let mut new_fragments = old_fragments.slice(&edits.peek().unwrap().0.start, Bias::Right);
889 new_ropes.append(new_fragments.summary().text);
890
891 let mut fragment_start = old_fragments.start().visible;
892 for (range, new_text) in edits {
893 let new_text = LineEnding::normalize_arc(new_text.into());
894 let fragment_end = old_fragments.end().visible;
895
896 // If the current fragment ends before this range, then jump ahead to the first fragment
897 // that extends past the start of this range, reusing any intervening fragments.
898 if fragment_end < range.start {
899 // If the current fragment has been partially consumed, then consume the rest of it
900 // and advance to the next fragment before slicing.
901 if fragment_start > old_fragments.start().visible {
902 if fragment_end > fragment_start {
903 let mut suffix = old_fragments.item().unwrap().clone();
904 suffix.len = fragment_end - fragment_start;
905 suffix.insertion_offset += fragment_start - old_fragments.start().visible;
906 new_insertions.push(InsertionFragment::insert_new(&suffix));
907 new_ropes.push_fragment(&suffix, suffix.visible);
908 new_fragments.push(suffix, &None);
909 }
910 old_fragments.next();
911 }
912
913 let slice = old_fragments.slice(&range.start, Bias::Right);
914 new_ropes.append(slice.summary().text);
915 new_fragments.append(slice, &None);
916 fragment_start = old_fragments.start().visible;
917 }
918
919 let full_range_start = FullOffset(range.start + old_fragments.start().deleted);
920
921 // Preserve any portion of the current fragment that precedes this range.
922 if fragment_start < range.start {
923 let mut prefix = old_fragments.item().unwrap().clone();
924 prefix.len = range.start - fragment_start;
925 prefix.insertion_offset += fragment_start - old_fragments.start().visible;
926 prefix.id = Locator::between(&new_fragments.summary().max_id, &prefix.id);
927 new_insertions.push(InsertionFragment::insert_new(&prefix));
928 new_ropes.push_fragment(&prefix, prefix.visible);
929 new_fragments.push(prefix, &None);
930 fragment_start = range.start;
931 }
932
933 // Insert the new text before any existing fragments within the range.
934 if !new_text.is_empty() {
935 let new_start = new_fragments.summary().text.visible;
936
937 let fragment = Fragment {
938 id: Locator::between(
939 &new_fragments.summary().max_id,
940 old_fragments
941 .item()
942 .map_or(&Locator::max(), |old_fragment| &old_fragment.id),
943 ),
944 timestamp,
945 insertion_offset,
946 len: new_text.len(),
947 deletions: Default::default(),
948 max_undos: Default::default(),
949 visible: true,
950 };
951 edits_patch.push(Edit {
952 old: fragment_start..fragment_start,
953 new: new_start..new_start + new_text.len(),
954 });
955 insertion_slices.push(InsertionSlice::from_fragment(timestamp, &fragment));
956 new_insertions.push(InsertionFragment::insert_new(&fragment));
957 new_ropes.push_str(new_text.as_ref(), executor);
958 new_fragments.push(fragment, &None);
959 insertion_offset += new_text.len();
960 }
961
962 // Advance through every fragment that intersects this range, marking the intersecting
963 // portions as deleted.
964 while fragment_start < range.end {
965 let fragment = old_fragments.item().unwrap();
966 let fragment_end = old_fragments.end().visible;
967 let mut intersection = fragment.clone();
968 let intersection_end = cmp::min(range.end, fragment_end);
969 if fragment.visible {
970 intersection.len = intersection_end - fragment_start;
971 intersection.insertion_offset += fragment_start - old_fragments.start().visible;
972 intersection.id =
973 Locator::between(&new_fragments.summary().max_id, &intersection.id);
974 intersection.deletions.insert(timestamp);
975 intersection.visible = false;
976 }
977 if intersection.len > 0 {
978 if fragment.visible && !intersection.visible {
979 let new_start = new_fragments.summary().text.visible;
980 edits_patch.push(Edit {
981 old: fragment_start..intersection_end,
982 new: new_start..new_start,
983 });
984 insertion_slices
985 .push(InsertionSlice::from_fragment(timestamp, &intersection));
986 }
987 new_insertions.push(InsertionFragment::insert_new(&intersection));
988 new_ropes.push_fragment(&intersection, fragment.visible);
989 new_fragments.push(intersection, &None);
990 fragment_start = intersection_end;
991 }
992 if fragment_end <= range.end {
993 old_fragments.next();
994 }
995 }
996
997 let full_range_end = FullOffset(range.end + old_fragments.start().deleted);
998 edit_op.ranges.push(full_range_start..full_range_end);
999 edit_op.new_text.push(new_text);
1000 }
1001
1002 // If the current fragment has been partially consumed, then consume the rest of it
1003 // and advance to the next fragment before slicing.
1004 if fragment_start > old_fragments.start().visible {
1005 let fragment_end = old_fragments.end().visible;
1006 if fragment_end > fragment_start {
1007 let mut suffix = old_fragments.item().unwrap().clone();
1008 suffix.len = fragment_end - fragment_start;
1009 suffix.insertion_offset += fragment_start - old_fragments.start().visible;
1010 new_insertions.push(InsertionFragment::insert_new(&suffix));
1011 new_ropes.push_fragment(&suffix, suffix.visible);
1012 new_fragments.push(suffix, &None);
1013 }
1014 old_fragments.next();
1015 }
1016
1017 let suffix = old_fragments.suffix();
1018 new_ropes.append(suffix.summary().text);
1019 new_fragments.append(suffix, &None);
1020 let (visible_text, deleted_text) = new_ropes.finish();
1021 drop(old_fragments);
1022
1023 self.snapshot.fragments = new_fragments;
1024 self.snapshot.insertions.edit(new_insertions, ());
1025 self.snapshot.visible_text = visible_text;
1026 self.snapshot.deleted_text = deleted_text;
1027 self.subscriptions.publish_mut(&edits_patch);
1028 self.snapshot.insertion_slices.extend(insertion_slices);
1029 edit_op
1030 }
1031
1032 pub fn set_line_ending(&mut self, line_ending: LineEnding) {
1033 self.snapshot.line_ending = line_ending;
1034 }
1035
1036 pub fn apply_ops<I: IntoIterator<Item = Operation>>(
1037 &mut self,
1038 ops: I,
1039 executor: Option<&BackgroundExecutor>,
1040 ) {
1041 let mut deferred_ops = Vec::new();
1042 for op in ops {
1043 self.history.push(op.clone());
1044 if self.can_apply_op(&op) {
1045 self.apply_op(op, executor);
1046 } else {
1047 self.deferred_replicas.insert(op.replica_id());
1048 deferred_ops.push(op);
1049 }
1050 }
1051 self.deferred_ops.insert(deferred_ops);
1052 self.flush_deferred_ops(executor);
1053 }
1054
1055 fn apply_op(&mut self, op: Operation, executor: Option<&BackgroundExecutor>) {
1056 match op {
1057 Operation::Edit(edit) => {
1058 if !self.version.observed(edit.timestamp) {
1059 self.apply_remote_edit(
1060 &edit.version,
1061 &edit.ranges,
1062 &edit.new_text,
1063 edit.timestamp,
1064 executor,
1065 );
1066 self.snapshot.version.observe(edit.timestamp);
1067 self.lamport_clock.observe(edit.timestamp);
1068 self.resolve_edit(edit.timestamp);
1069 }
1070 }
1071 Operation::Undo(undo) => {
1072 if !self.version.observed(undo.timestamp) {
1073 self.apply_undo(&undo);
1074 self.snapshot.version.observe(undo.timestamp);
1075 self.lamport_clock.observe(undo.timestamp);
1076 }
1077 }
1078 }
1079 self.wait_for_version_txs.retain_mut(|(version, tx)| {
1080 if self.snapshot.version().observed_all(version) {
1081 tx.try_send(()).ok();
1082 false
1083 } else {
1084 true
1085 }
1086 });
1087 }
1088
1089 fn apply_remote_edit(
1090 &mut self,
1091 version: &clock::Global,
1092 ranges: &[Range<FullOffset>],
1093 new_text: &[Arc<str>],
1094 timestamp: clock::Lamport,
1095 executor: Option<&BackgroundExecutor>,
1096 ) {
1097 if ranges.is_empty() {
1098 return;
1099 }
1100
1101 let edits = ranges.iter().zip(new_text.iter());
1102 let mut edits_patch = Patch::default();
1103 let mut insertion_slices = Vec::new();
1104 let cx = Some(version.clone());
1105 let mut new_insertions = Vec::new();
1106 let mut insertion_offset = 0;
1107 let mut new_ropes =
1108 RopeBuilder::new(self.visible_text.cursor(0), self.deleted_text.cursor(0));
1109 let mut old_fragments = self
1110 .fragments
1111 .cursor::<Dimensions<VersionedFullOffset, usize>>(&cx);
1112 let mut new_fragments =
1113 old_fragments.slice(&VersionedFullOffset::Offset(ranges[0].start), Bias::Left);
1114 new_ropes.append(new_fragments.summary().text);
1115
1116 let mut fragment_start = old_fragments.start().0.full_offset();
1117 for (range, new_text) in edits {
1118 let fragment_end = old_fragments.end().0.full_offset();
1119
1120 // If the current fragment ends before this range, then jump ahead to the first fragment
1121 // that extends past the start of this range, reusing any intervening fragments.
1122 if fragment_end < range.start {
1123 // If the current fragment has been partially consumed, then consume the rest of it
1124 // and advance to the next fragment before slicing.
1125 if fragment_start > old_fragments.start().0.full_offset() {
1126 if fragment_end > fragment_start {
1127 let mut suffix = old_fragments.item().unwrap().clone();
1128 suffix.len = fragment_end.0 - fragment_start.0;
1129 suffix.insertion_offset +=
1130 fragment_start - old_fragments.start().0.full_offset();
1131 new_insertions.push(InsertionFragment::insert_new(&suffix));
1132 new_ropes.push_fragment(&suffix, suffix.visible);
1133 new_fragments.push(suffix, &None);
1134 }
1135 old_fragments.next();
1136 }
1137
1138 let slice =
1139 old_fragments.slice(&VersionedFullOffset::Offset(range.start), Bias::Left);
1140 new_ropes.append(slice.summary().text);
1141 new_fragments.append(slice, &None);
1142 fragment_start = old_fragments.start().0.full_offset();
1143 }
1144
1145 // If we are at the end of a non-concurrent fragment, advance to the next one.
1146 let fragment_end = old_fragments.end().0.full_offset();
1147 if fragment_end == range.start && fragment_end > fragment_start {
1148 let mut fragment = old_fragments.item().unwrap().clone();
1149 fragment.len = fragment_end.0 - fragment_start.0;
1150 fragment.insertion_offset += fragment_start - old_fragments.start().0.full_offset();
1151 new_insertions.push(InsertionFragment::insert_new(&fragment));
1152 new_ropes.push_fragment(&fragment, fragment.visible);
1153 new_fragments.push(fragment, &None);
1154 old_fragments.next();
1155 fragment_start = old_fragments.start().0.full_offset();
1156 }
1157
1158 // Skip over insertions that are concurrent to this edit, but have a lower lamport
1159 // timestamp.
1160 while let Some(fragment) = old_fragments.item() {
1161 if fragment_start == range.start && fragment.timestamp > timestamp {
1162 new_ropes.push_fragment(fragment, fragment.visible);
1163 new_fragments.push(fragment.clone(), &None);
1164 old_fragments.next();
1165 debug_assert_eq!(fragment_start, range.start);
1166 } else {
1167 break;
1168 }
1169 }
1170 debug_assert!(fragment_start <= range.start);
1171
1172 // Preserve any portion of the current fragment that precedes this range.
1173 if fragment_start < range.start {
1174 let mut prefix = old_fragments.item().unwrap().clone();
1175 prefix.len = range.start.0 - fragment_start.0;
1176 prefix.insertion_offset += fragment_start - old_fragments.start().0.full_offset();
1177 prefix.id = Locator::between(&new_fragments.summary().max_id, &prefix.id);
1178 new_insertions.push(InsertionFragment::insert_new(&prefix));
1179 fragment_start = range.start;
1180 new_ropes.push_fragment(&prefix, prefix.visible);
1181 new_fragments.push(prefix, &None);
1182 }
1183
1184 // Insert the new text before any existing fragments within the range.
1185 if !new_text.is_empty() {
1186 let mut old_start = old_fragments.start().1;
1187 if old_fragments.item().is_some_and(|f| f.visible) {
1188 old_start += fragment_start.0 - old_fragments.start().0.full_offset().0;
1189 }
1190 let new_start = new_fragments.summary().text.visible;
1191 let fragment = Fragment {
1192 id: Locator::between(
1193 &new_fragments.summary().max_id,
1194 old_fragments
1195 .item()
1196 .map_or(&Locator::max(), |old_fragment| &old_fragment.id),
1197 ),
1198 timestamp,
1199 insertion_offset,
1200 len: new_text.len(),
1201 deletions: Default::default(),
1202 max_undos: Default::default(),
1203 visible: true,
1204 };
1205 edits_patch.push(Edit {
1206 old: old_start..old_start,
1207 new: new_start..new_start + new_text.len(),
1208 });
1209 insertion_slices.push(InsertionSlice::from_fragment(timestamp, &fragment));
1210 new_insertions.push(InsertionFragment::insert_new(&fragment));
1211 match executor {
1212 Some(executor) => new_ropes.push_str(new_text, executor),
1213 None => new_ropes.push_str_small(new_text),
1214 }
1215 new_fragments.push(fragment, &None);
1216 insertion_offset += new_text.len();
1217 }
1218
1219 // Advance through every fragment that intersects this range, marking the intersecting
1220 // portions as deleted.
1221 while fragment_start < range.end {
1222 let fragment = old_fragments.item().unwrap();
1223 let fragment_end = old_fragments.end().0.full_offset();
1224 let mut intersection = fragment.clone();
1225 let intersection_end = cmp::min(range.end, fragment_end);
1226 if fragment.was_visible(version, &self.undo_map) {
1227 intersection.len = intersection_end.0 - fragment_start.0;
1228 intersection.insertion_offset +=
1229 fragment_start - old_fragments.start().0.full_offset();
1230 intersection.id =
1231 Locator::between(&new_fragments.summary().max_id, &intersection.id);
1232 intersection.deletions.insert(timestamp);
1233 intersection.visible = false;
1234 insertion_slices.push(InsertionSlice::from_fragment(timestamp, &intersection));
1235 }
1236 if intersection.len > 0 {
1237 if fragment.visible && !intersection.visible {
1238 let old_start = old_fragments.start().1
1239 + (fragment_start.0 - old_fragments.start().0.full_offset().0);
1240 let new_start = new_fragments.summary().text.visible;
1241 edits_patch.push(Edit {
1242 old: old_start..old_start + intersection.len,
1243 new: new_start..new_start,
1244 });
1245 }
1246 new_insertions.push(InsertionFragment::insert_new(&intersection));
1247 new_ropes.push_fragment(&intersection, fragment.visible);
1248 new_fragments.push(intersection, &None);
1249 fragment_start = intersection_end;
1250 }
1251 if fragment_end <= range.end {
1252 old_fragments.next();
1253 }
1254 }
1255 }
1256
1257 // If the current fragment has been partially consumed, then consume the rest of it
1258 // and advance to the next fragment before slicing.
1259 if fragment_start > old_fragments.start().0.full_offset() {
1260 let fragment_end = old_fragments.end().0.full_offset();
1261 if fragment_end > fragment_start {
1262 let mut suffix = old_fragments.item().unwrap().clone();
1263 suffix.len = fragment_end.0 - fragment_start.0;
1264 suffix.insertion_offset += fragment_start - old_fragments.start().0.full_offset();
1265 new_insertions.push(InsertionFragment::insert_new(&suffix));
1266 new_ropes.push_fragment(&suffix, suffix.visible);
1267 new_fragments.push(suffix, &None);
1268 }
1269 old_fragments.next();
1270 }
1271
1272 let suffix = old_fragments.suffix();
1273 new_ropes.append(suffix.summary().text);
1274 new_fragments.append(suffix, &None);
1275 let (visible_text, deleted_text) = new_ropes.finish();
1276 drop(old_fragments);
1277
1278 self.snapshot.fragments = new_fragments;
1279 self.snapshot.visible_text = visible_text;
1280 self.snapshot.deleted_text = deleted_text;
1281 self.snapshot.insertions.edit(new_insertions, ());
1282 self.snapshot.insertion_slices.extend(insertion_slices);
1283 self.subscriptions.publish_mut(&edits_patch)
1284 }
1285
1286 fn fragment_ids_for_edits<'a>(
1287 &'a self,
1288 edit_ids: impl Iterator<Item = &'a clock::Lamport>,
1289 ) -> Vec<&'a Locator> {
1290 // Get all of the insertion slices changed by the given edits.
1291 let mut insertion_slices = Vec::new();
1292 for edit_id in edit_ids {
1293 let insertion_slice = InsertionSlice {
1294 edit_id: *edit_id,
1295 insertion_id: clock::Lamport::MIN,
1296 range: 0..0,
1297 };
1298 let slices = self
1299 .snapshot
1300 .insertion_slices
1301 .iter_from(&insertion_slice)
1302 .take_while(|slice| slice.edit_id == *edit_id);
1303 insertion_slices.extend(slices)
1304 }
1305 insertion_slices
1306 .sort_unstable_by_key(|s| (s.insertion_id, s.range.start, Reverse(s.range.end)));
1307
1308 // Get all of the fragments corresponding to these insertion slices.
1309 let mut fragment_ids = Vec::new();
1310 let mut insertions_cursor = self.insertions.cursor::<InsertionFragmentKey>(());
1311 for insertion_slice in &insertion_slices {
1312 if insertion_slice.insertion_id != insertions_cursor.start().timestamp
1313 || insertion_slice.range.start > insertions_cursor.start().split_offset
1314 {
1315 insertions_cursor.seek_forward(
1316 &InsertionFragmentKey {
1317 timestamp: insertion_slice.insertion_id,
1318 split_offset: insertion_slice.range.start,
1319 },
1320 Bias::Left,
1321 );
1322 }
1323 while let Some(item) = insertions_cursor.item() {
1324 if item.timestamp != insertion_slice.insertion_id
1325 || item.split_offset >= insertion_slice.range.end
1326 {
1327 break;
1328 }
1329 fragment_ids.push(&item.fragment_id);
1330 insertions_cursor.next();
1331 }
1332 }
1333 fragment_ids.sort_unstable();
1334 fragment_ids
1335 }
1336
1337 fn apply_undo(&mut self, undo: &UndoOperation) {
1338 self.snapshot.undo_map.insert(undo);
1339
1340 let mut edits = Patch::default();
1341 let mut old_fragments = self
1342 .fragments
1343 .cursor::<Dimensions<Option<&Locator>, usize>>(&None);
1344 let mut new_fragments = SumTree::new(&None);
1345 let mut new_ropes =
1346 RopeBuilder::new(self.visible_text.cursor(0), self.deleted_text.cursor(0));
1347
1348 for fragment_id in self.fragment_ids_for_edits(undo.counts.keys()) {
1349 let preceding_fragments = old_fragments.slice(&Some(fragment_id), Bias::Left);
1350 new_ropes.append(preceding_fragments.summary().text);
1351 new_fragments.append(preceding_fragments, &None);
1352
1353 if let Some(fragment) = old_fragments.item() {
1354 let mut fragment = fragment.clone();
1355 let fragment_was_visible = fragment.visible;
1356
1357 fragment.visible = fragment.is_visible(&self.undo_map);
1358 fragment.max_undos.observe(undo.timestamp);
1359
1360 let old_start = old_fragments.start().1;
1361 let new_start = new_fragments.summary().text.visible;
1362 if fragment_was_visible && !fragment.visible {
1363 edits.push(Edit {
1364 old: old_start..old_start + fragment.len,
1365 new: new_start..new_start,
1366 });
1367 } else if !fragment_was_visible && fragment.visible {
1368 edits.push(Edit {
1369 old: old_start..old_start,
1370 new: new_start..new_start + fragment.len,
1371 });
1372 }
1373 new_ropes.push_fragment(&fragment, fragment_was_visible);
1374 new_fragments.push(fragment, &None);
1375
1376 old_fragments.next();
1377 }
1378 }
1379
1380 let suffix = old_fragments.suffix();
1381 new_ropes.append(suffix.summary().text);
1382 new_fragments.append(suffix, &None);
1383
1384 drop(old_fragments);
1385 let (visible_text, deleted_text) = new_ropes.finish();
1386 self.snapshot.fragments = new_fragments;
1387 self.snapshot.visible_text = visible_text;
1388 self.snapshot.deleted_text = deleted_text;
1389 self.subscriptions.publish_mut(&edits);
1390 }
1391
1392 fn flush_deferred_ops(&mut self, executor: Option<&BackgroundExecutor>) {
1393 self.deferred_replicas.clear();
1394 let mut deferred_ops = Vec::new();
1395 for op in self.deferred_ops.drain().iter().cloned() {
1396 if self.can_apply_op(&op) {
1397 self.apply_op(op, executor);
1398 } else {
1399 self.deferred_replicas.insert(op.replica_id());
1400 deferred_ops.push(op);
1401 }
1402 }
1403 self.deferred_ops.insert(deferred_ops);
1404 }
1405
1406 fn can_apply_op(&self, op: &Operation) -> bool {
1407 if self.deferred_replicas.contains(&op.replica_id()) {
1408 false
1409 } else {
1410 self.version.observed_all(match op {
1411 Operation::Edit(edit) => &edit.version,
1412 Operation::Undo(undo) => &undo.version,
1413 })
1414 }
1415 }
1416
1417 pub fn has_deferred_ops(&self) -> bool {
1418 !self.deferred_ops.is_empty()
1419 }
1420
1421 pub fn peek_undo_stack(&self) -> Option<&HistoryEntry> {
1422 self.history.undo_stack.last()
1423 }
1424
1425 pub fn peek_redo_stack(&self) -> Option<&HistoryEntry> {
1426 self.history.redo_stack.last()
1427 }
1428
1429 pub fn start_transaction(&mut self) -> Option<TransactionId> {
1430 self.start_transaction_at(Instant::now())
1431 }
1432
1433 pub fn start_transaction_at(&mut self, now: Instant) -> Option<TransactionId> {
1434 self.history
1435 .start_transaction(self.version.clone(), now, &mut self.lamport_clock)
1436 }
1437
1438 pub fn end_transaction(&mut self) -> Option<(TransactionId, clock::Global)> {
1439 self.end_transaction_at(Instant::now())
1440 }
1441
1442 pub fn end_transaction_at(&mut self, now: Instant) -> Option<(TransactionId, clock::Global)> {
1443 if let Some(entry) = self.history.end_transaction(now) {
1444 let since = entry.transaction.start.clone();
1445 let id = self.history.group().unwrap();
1446 Some((id, since))
1447 } else {
1448 None
1449 }
1450 }
1451
1452 pub fn finalize_last_transaction(&mut self) -> Option<&Transaction> {
1453 self.history.finalize_last_transaction()
1454 }
1455
1456 pub fn group_until_transaction(&mut self, transaction_id: TransactionId) {
1457 self.history.group_until(transaction_id);
1458 }
1459
1460 pub fn base_text(&self) -> &Rope {
1461 &self.history.base_text
1462 }
1463
1464 pub fn operations(&self) -> &TreeMap<clock::Lamport, Operation> {
1465 &self.history.operations
1466 }
1467
1468 pub fn undo(&mut self) -> Option<(TransactionId, Operation)> {
1469 if let Some(entry) = self.history.pop_undo() {
1470 let transaction = entry.transaction.clone();
1471 let transaction_id = transaction.id;
1472 let op = self.undo_or_redo(transaction);
1473 Some((transaction_id, op))
1474 } else {
1475 None
1476 }
1477 }
1478
1479 pub fn undo_transaction(&mut self, transaction_id: TransactionId) -> Option<Operation> {
1480 let transaction = self
1481 .history
1482 .remove_from_undo(transaction_id)?
1483 .transaction
1484 .clone();
1485 Some(self.undo_or_redo(transaction))
1486 }
1487
1488 pub fn undo_to_transaction(&mut self, transaction_id: TransactionId) -> Vec<Operation> {
1489 let transactions = self
1490 .history
1491 .remove_from_undo_until(transaction_id)
1492 .iter()
1493 .map(|entry| entry.transaction.clone())
1494 .collect::<Vec<_>>();
1495
1496 transactions
1497 .into_iter()
1498 .map(|transaction| self.undo_or_redo(transaction))
1499 .collect()
1500 }
1501
1502 pub fn forget_transaction(&mut self, transaction_id: TransactionId) -> Option<Transaction> {
1503 self.history.forget(transaction_id)
1504 }
1505
1506 pub fn get_transaction(&self, transaction_id: TransactionId) -> Option<&Transaction> {
1507 self.history.transaction(transaction_id)
1508 }
1509
1510 pub fn merge_transactions(&mut self, transaction: TransactionId, destination: TransactionId) {
1511 self.history.merge_transactions(transaction, destination);
1512 }
1513
1514 pub fn redo(&mut self) -> Option<(TransactionId, Operation)> {
1515 if let Some(entry) = self.history.pop_redo() {
1516 let transaction = entry.transaction.clone();
1517 let transaction_id = transaction.id;
1518 let op = self.undo_or_redo(transaction);
1519 Some((transaction_id, op))
1520 } else {
1521 None
1522 }
1523 }
1524
1525 pub fn redo_to_transaction(&mut self, transaction_id: TransactionId) -> Vec<Operation> {
1526 let transactions = self
1527 .history
1528 .remove_from_redo(transaction_id)
1529 .iter()
1530 .map(|entry| entry.transaction.clone())
1531 .collect::<Vec<_>>();
1532
1533 transactions
1534 .into_iter()
1535 .map(|transaction| self.undo_or_redo(transaction))
1536 .collect()
1537 }
1538
1539 fn undo_or_redo(&mut self, transaction: Transaction) -> Operation {
1540 let mut counts = HashMap::default();
1541 for edit_id in transaction.edit_ids {
1542 counts.insert(edit_id, self.undo_map.undo_count(edit_id).saturating_add(1));
1543 }
1544
1545 let operation = self.undo_operations(counts);
1546 self.history.push(operation.clone());
1547 operation
1548 }
1549
1550 pub fn undo_operations(&mut self, counts: HashMap<clock::Lamport, u32>) -> Operation {
1551 let timestamp = self.lamport_clock.tick();
1552 let version = self.version();
1553 self.snapshot.version.observe(timestamp);
1554 let undo = UndoOperation {
1555 timestamp,
1556 version,
1557 counts,
1558 };
1559 self.apply_undo(&undo);
1560 Operation::Undo(undo)
1561 }
1562
1563 pub fn push_transaction(&mut self, transaction: Transaction, now: Instant) {
1564 self.history.push_transaction(transaction, now);
1565 }
1566
1567 /// Differs from `push_transaction` in that it does not clear the redo stack.
1568 /// The caller responsible for
1569 /// Differs from `push_transaction` in that it does not clear the redo
1570 /// stack. Intended to be used to create a parent transaction to merge
1571 /// potential child transactions into.
1572 ///
1573 /// The caller is responsible for removing it from the undo history using
1574 /// `forget_transaction` if no edits are merged into it. Otherwise, if edits
1575 /// are merged into this transaction, the caller is responsible for ensuring
1576 /// the redo stack is cleared. The easiest way to ensure the redo stack is
1577 /// cleared is to create transactions with the usual `start_transaction` and
1578 /// `end_transaction` methods and merging the resulting transactions into
1579 /// the transaction created by this method
1580 pub fn push_empty_transaction(&mut self, now: Instant) -> TransactionId {
1581 self.history
1582 .push_empty_transaction(self.version.clone(), now, &mut self.lamport_clock)
1583 }
1584
1585 pub fn edited_ranges_for_transaction_id<D>(
1586 &self,
1587 transaction_id: TransactionId,
1588 ) -> impl '_ + Iterator<Item = Range<D>>
1589 where
1590 D: TextDimension,
1591 {
1592 self.history
1593 .transaction(transaction_id)
1594 .into_iter()
1595 .flat_map(|transaction| self.edited_ranges_for_transaction(transaction))
1596 }
1597
1598 pub fn edited_ranges_for_edit_ids<'a, D>(
1599 &'a self,
1600 edit_ids: impl IntoIterator<Item = &'a clock::Lamport>,
1601 ) -> impl 'a + Iterator<Item = Range<D>>
1602 where
1603 D: TextDimension,
1604 {
1605 // get fragment ranges
1606 let mut cursor = self
1607 .fragments
1608 .cursor::<Dimensions<Option<&Locator>, usize>>(&None);
1609 let offset_ranges = self
1610 .fragment_ids_for_edits(edit_ids.into_iter())
1611 .into_iter()
1612 .filter_map(move |fragment_id| {
1613 cursor.seek_forward(&Some(fragment_id), Bias::Left);
1614 let fragment = cursor.item()?;
1615 let start_offset = cursor.start().1;
1616 let end_offset = start_offset + if fragment.visible { fragment.len } else { 0 };
1617 Some(start_offset..end_offset)
1618 });
1619
1620 // combine adjacent ranges
1621 let mut prev_range: Option<Range<usize>> = None;
1622 let disjoint_ranges = offset_ranges
1623 .map(Some)
1624 .chain([None])
1625 .filter_map(move |range| {
1626 if let Some((range, prev_range)) = range.as_ref().zip(prev_range.as_mut())
1627 && prev_range.end == range.start
1628 {
1629 prev_range.end = range.end;
1630 return None;
1631 }
1632 let result = prev_range.clone();
1633 prev_range = range;
1634 result
1635 });
1636
1637 // convert to the desired text dimension.
1638 let mut position = D::zero(());
1639 let mut rope_cursor = self.visible_text.cursor(0);
1640 disjoint_ranges.map(move |range| {
1641 position.add_assign(&rope_cursor.summary(range.start));
1642 let start = position;
1643 position.add_assign(&rope_cursor.summary(range.end));
1644 let end = position;
1645 start..end
1646 })
1647 }
1648
1649 pub fn edited_ranges_for_transaction<'a, D>(
1650 &'a self,
1651 transaction: &'a Transaction,
1652 ) -> impl 'a + Iterator<Item = Range<D>>
1653 where
1654 D: TextDimension,
1655 {
1656 self.edited_ranges_for_edit_ids(&transaction.edit_ids)
1657 }
1658
1659 pub fn subscribe(&mut self) -> Subscription {
1660 self.subscriptions.subscribe()
1661 }
1662
1663 pub fn wait_for_edits<It: IntoIterator<Item = clock::Lamport>>(
1664 &mut self,
1665 edit_ids: It,
1666 ) -> impl 'static + Future<Output = Result<()>> + use<It> {
1667 let mut futures = Vec::new();
1668 for edit_id in edit_ids {
1669 if !self.version.observed(edit_id) {
1670 let (tx, rx) = oneshot::channel();
1671 self.edit_id_resolvers.entry(edit_id).or_default().push(tx);
1672 futures.push(rx);
1673 }
1674 }
1675
1676 async move {
1677 for mut future in futures {
1678 if future.recv().await.is_none() {
1679 anyhow::bail!("gave up waiting for edits");
1680 }
1681 }
1682 Ok(())
1683 }
1684 }
1685
1686 pub fn wait_for_anchors<It: IntoIterator<Item = Anchor>>(
1687 &mut self,
1688 anchors: It,
1689 ) -> impl 'static + Future<Output = Result<()>> + use<It> {
1690 let mut futures = Vec::new();
1691 for anchor in anchors {
1692 if !self.version.observed(anchor.timestamp)
1693 && anchor != Anchor::MAX
1694 && anchor != Anchor::MIN
1695 {
1696 let (tx, rx) = oneshot::channel();
1697 self.edit_id_resolvers
1698 .entry(anchor.timestamp)
1699 .or_default()
1700 .push(tx);
1701 futures.push(rx);
1702 }
1703 }
1704
1705 async move {
1706 for mut future in futures {
1707 if future.recv().await.is_none() {
1708 anyhow::bail!("gave up waiting for anchors");
1709 }
1710 }
1711 Ok(())
1712 }
1713 }
1714
1715 pub fn wait_for_version(
1716 &mut self,
1717 version: clock::Global,
1718 ) -> impl Future<Output = Result<()>> + use<> {
1719 let mut rx = None;
1720 if !self.snapshot.version.observed_all(&version) {
1721 let channel = oneshot::channel();
1722 self.wait_for_version_txs.push((version, channel.0));
1723 rx = Some(channel.1);
1724 }
1725 async move {
1726 if let Some(mut rx) = rx
1727 && rx.recv().await.is_none()
1728 {
1729 anyhow::bail!("gave up waiting for version");
1730 }
1731 Ok(())
1732 }
1733 }
1734
1735 pub fn give_up_waiting(&mut self) {
1736 self.edit_id_resolvers.clear();
1737 self.wait_for_version_txs.clear();
1738 }
1739
1740 fn resolve_edit(&mut self, edit_id: clock::Lamport) {
1741 for mut tx in self
1742 .edit_id_resolvers
1743 .remove(&edit_id)
1744 .into_iter()
1745 .flatten()
1746 {
1747 tx.try_send(()).ok();
1748 }
1749 }
1750}
1751
1752#[cfg(any(test, feature = "test-support"))]
1753impl Buffer {
1754 #[track_caller]
1755 pub fn edit_via_marked_text(&mut self, marked_string: &str, cx: &BackgroundExecutor) {
1756 let edits = self.edits_for_marked_text(marked_string);
1757 self.edit(edits, cx);
1758 }
1759
1760 #[track_caller]
1761 pub fn edits_for_marked_text(&self, marked_string: &str) -> Vec<(Range<usize>, String)> {
1762 let old_text = self.text();
1763 let (new_text, mut ranges) = util::test::marked_text_ranges(marked_string, false);
1764 if ranges.is_empty() {
1765 ranges.push(0..new_text.len());
1766 }
1767
1768 assert_eq!(
1769 old_text[..ranges[0].start],
1770 new_text[..ranges[0].start],
1771 "invalid edit"
1772 );
1773
1774 let mut delta = 0;
1775 let mut edits = Vec::new();
1776 let mut ranges = ranges.into_iter().peekable();
1777
1778 while let Some(inserted_range) = ranges.next() {
1779 let new_start = inserted_range.start;
1780 let old_start = (new_start as isize - delta) as usize;
1781
1782 let following_text = if let Some(next_range) = ranges.peek() {
1783 &new_text[inserted_range.end..next_range.start]
1784 } else {
1785 &new_text[inserted_range.end..]
1786 };
1787
1788 let inserted_len = inserted_range.len();
1789 let deleted_len = old_text[old_start..]
1790 .find(following_text)
1791 .expect("invalid edit");
1792
1793 let old_range = old_start..old_start + deleted_len;
1794 edits.push((old_range, new_text[inserted_range].to_string()));
1795 delta += inserted_len as isize - deleted_len as isize;
1796 }
1797
1798 assert_eq!(
1799 old_text.len() as isize + delta,
1800 new_text.len() as isize,
1801 "invalid edit"
1802 );
1803
1804 edits
1805 }
1806
1807 pub fn check_invariants(&self) {
1808 // Ensure every fragment is ordered by locator in the fragment tree and corresponds
1809 // to an insertion fragment in the insertions tree.
1810 let mut prev_fragment_id = Locator::min();
1811 for fragment in self.snapshot.fragments.items(&None) {
1812 assert!(fragment.id > prev_fragment_id);
1813 prev_fragment_id = fragment.id.clone();
1814
1815 let insertion_fragment = self
1816 .snapshot
1817 .insertions
1818 .get(
1819 &InsertionFragmentKey {
1820 timestamp: fragment.timestamp,
1821 split_offset: fragment.insertion_offset,
1822 },
1823 (),
1824 )
1825 .unwrap();
1826 assert_eq!(
1827 insertion_fragment.fragment_id, fragment.id,
1828 "fragment: {:?}\ninsertion: {:?}",
1829 fragment, insertion_fragment
1830 );
1831 }
1832
1833 let mut cursor = self.snapshot.fragments.cursor::<Option<&Locator>>(&None);
1834 for insertion_fragment in self.snapshot.insertions.cursor::<()>(()) {
1835 cursor.seek(&Some(&insertion_fragment.fragment_id), Bias::Left);
1836 let fragment = cursor.item().unwrap();
1837 assert_eq!(insertion_fragment.fragment_id, fragment.id);
1838 assert_eq!(insertion_fragment.split_offset, fragment.insertion_offset);
1839 }
1840
1841 let fragment_summary = self.snapshot.fragments.summary();
1842 assert_eq!(
1843 fragment_summary.text.visible,
1844 self.snapshot.visible_text.len()
1845 );
1846 assert_eq!(
1847 fragment_summary.text.deleted,
1848 self.snapshot.deleted_text.len()
1849 );
1850
1851 assert!(!self.text().contains("\r\n"));
1852 }
1853
1854 pub fn set_group_interval(&mut self, group_interval: Duration) {
1855 self.history.group_interval = group_interval;
1856 }
1857
1858 pub fn random_byte_range(&self, start_offset: usize, rng: &mut impl rand::Rng) -> Range<usize> {
1859 let end = self.clip_offset(rng.random_range(start_offset..=self.len()), Bias::Right);
1860 let start = self.clip_offset(rng.random_range(start_offset..=end), Bias::Right);
1861 start..end
1862 }
1863
1864 pub fn get_random_edits<T>(
1865 &self,
1866 rng: &mut T,
1867 edit_count: usize,
1868 ) -> Vec<(Range<usize>, Arc<str>)>
1869 where
1870 T: rand::Rng,
1871 {
1872 let mut edits: Vec<(Range<usize>, Arc<str>)> = Vec::new();
1873 let mut last_end = None;
1874 for _ in 0..edit_count {
1875 if last_end.is_some_and(|last_end| last_end >= self.len()) {
1876 break;
1877 }
1878 let new_start = last_end.map_or(0, |last_end| last_end + 1);
1879 let range = self.random_byte_range(new_start, rng);
1880 last_end = Some(range.end);
1881
1882 let new_text_len = rng.random_range(0..10);
1883 let new_text: String = RandomCharIter::new(&mut *rng).take(new_text_len).collect();
1884
1885 edits.push((range, new_text.into()));
1886 }
1887 edits
1888 }
1889
1890 pub fn randomly_edit<T>(
1891 &mut self,
1892 rng: &mut T,
1893 edit_count: usize,
1894 executor: &BackgroundExecutor,
1895 ) -> (Vec<(Range<usize>, Arc<str>)>, Operation)
1896 where
1897 T: rand::Rng,
1898 {
1899 let mut edits = self.get_random_edits(rng, edit_count);
1900 log::info!("mutating buffer {:?} with {:?}", self.replica_id, edits);
1901
1902 let op = self.edit(edits.iter().cloned(), executor);
1903 if let Operation::Edit(edit) = &op {
1904 assert_eq!(edits.len(), edit.new_text.len());
1905 for (edit, new_text) in edits.iter_mut().zip(&edit.new_text) {
1906 edit.1 = new_text.clone();
1907 }
1908 } else {
1909 unreachable!()
1910 }
1911
1912 (edits, op)
1913 }
1914
1915 pub fn randomly_undo_redo(&mut self, rng: &mut impl rand::Rng) -> Vec<Operation> {
1916 use rand::prelude::*;
1917
1918 let mut ops = Vec::new();
1919 for _ in 0..rng.random_range(1..=5) {
1920 if let Some(entry) = self.history.undo_stack.choose(rng) {
1921 let transaction = entry.transaction.clone();
1922 log::info!(
1923 "undoing buffer {:?} transaction {:?}",
1924 self.replica_id,
1925 transaction
1926 );
1927 ops.push(self.undo_or_redo(transaction));
1928 }
1929 }
1930 ops
1931 }
1932}
1933
1934impl Deref for Buffer {
1935 type Target = BufferSnapshot;
1936
1937 fn deref(&self) -> &Self::Target {
1938 &self.snapshot
1939 }
1940}
1941
1942impl BufferSnapshot {
1943 pub fn as_rope(&self) -> &Rope {
1944 &self.visible_text
1945 }
1946
1947 pub fn rope_for_version(&self, version: &clock::Global) -> Rope {
1948 let mut rope = Rope::new();
1949
1950 let mut cursor = self
1951 .fragments
1952 .filter::<_, FragmentTextSummary>(&None, move |summary| {
1953 !version.observed_all(&summary.max_version)
1954 });
1955 cursor.next();
1956
1957 let mut visible_cursor = self.visible_text.cursor(0);
1958 let mut deleted_cursor = self.deleted_text.cursor(0);
1959
1960 while let Some(fragment) = cursor.item() {
1961 if cursor.start().visible > visible_cursor.offset() {
1962 let text = visible_cursor.slice(cursor.start().visible);
1963 rope.append(text);
1964 }
1965
1966 if fragment.was_visible(version, &self.undo_map) {
1967 if fragment.visible {
1968 let text = visible_cursor.slice(cursor.end().visible);
1969 rope.append(text);
1970 } else {
1971 deleted_cursor.seek_forward(cursor.start().deleted);
1972 let text = deleted_cursor.slice(cursor.end().deleted);
1973 rope.append(text);
1974 }
1975 } else if fragment.visible {
1976 visible_cursor.seek_forward(cursor.end().visible);
1977 }
1978
1979 cursor.next();
1980 }
1981
1982 if cursor.start().visible > visible_cursor.offset() {
1983 let text = visible_cursor.slice(cursor.start().visible);
1984 rope.append(text);
1985 }
1986
1987 rope
1988 }
1989
1990 pub fn remote_id(&self) -> BufferId {
1991 self.remote_id
1992 }
1993
1994 pub fn replica_id(&self) -> ReplicaId {
1995 self.replica_id
1996 }
1997
1998 pub fn row_count(&self) -> u32 {
1999 self.max_point().row + 1
2000 }
2001
2002 pub fn len(&self) -> usize {
2003 self.visible_text.len()
2004 }
2005
2006 pub fn is_empty(&self) -> bool {
2007 self.len() == 0
2008 }
2009
2010 pub fn chars(&self) -> impl Iterator<Item = char> + '_ {
2011 self.chars_at(0)
2012 }
2013
2014 pub fn chars_for_range<T: ToOffset>(&self, range: Range<T>) -> impl Iterator<Item = char> + '_ {
2015 self.text_for_range(range).flat_map(str::chars)
2016 }
2017
2018 pub fn reversed_chars_for_range<T: ToOffset>(
2019 &self,
2020 range: Range<T>,
2021 ) -> impl Iterator<Item = char> + '_ {
2022 self.reversed_chunks_in_range(range)
2023 .flat_map(|chunk| chunk.chars().rev())
2024 }
2025
2026 pub fn contains_str_at<T>(&self, position: T, needle: &str) -> bool
2027 where
2028 T: ToOffset,
2029 {
2030 let position = position.to_offset(self);
2031 position == self.clip_offset(position, Bias::Left)
2032 && self
2033 .bytes_in_range(position..self.len())
2034 .flatten()
2035 .copied()
2036 .take(needle.len())
2037 .eq(needle.bytes())
2038 }
2039
2040 pub fn common_prefix_at<T>(&self, position: T, needle: &str) -> Range<T>
2041 where
2042 T: ToOffset + TextDimension,
2043 {
2044 let offset = position.to_offset(self);
2045 let common_prefix_len = needle
2046 .char_indices()
2047 .map(|(index, _)| index)
2048 .chain([needle.len()])
2049 .take_while(|&len| len <= offset)
2050 .filter(|&len| {
2051 let left = self
2052 .chars_for_range(offset - len..offset)
2053 .flat_map(char::to_lowercase);
2054 let right = needle[..len].chars().flat_map(char::to_lowercase);
2055 left.eq(right)
2056 })
2057 .last()
2058 .unwrap_or(0);
2059 let start_offset = offset - common_prefix_len;
2060 let start = self.text_summary_for_range(0..start_offset);
2061 start..position
2062 }
2063
2064 pub fn text(&self) -> String {
2065 self.visible_text.to_string()
2066 }
2067
2068 pub fn line_ending(&self) -> LineEnding {
2069 self.line_ending
2070 }
2071
2072 pub fn deleted_text(&self) -> String {
2073 self.deleted_text.to_string()
2074 }
2075
2076 pub fn fragments(&self) -> impl Iterator<Item = &Fragment> {
2077 self.fragments.iter()
2078 }
2079
2080 pub fn text_summary(&self) -> TextSummary {
2081 self.visible_text.summary()
2082 }
2083
2084 pub fn max_point(&self) -> Point {
2085 self.visible_text.max_point()
2086 }
2087
2088 pub fn max_point_utf16(&self) -> PointUtf16 {
2089 self.visible_text.max_point_utf16()
2090 }
2091
2092 pub fn point_to_offset(&self, point: Point) -> usize {
2093 self.visible_text.point_to_offset(point)
2094 }
2095
2096 pub fn point_to_offset_utf16(&self, point: Point) -> OffsetUtf16 {
2097 self.visible_text.point_to_offset_utf16(point)
2098 }
2099
2100 pub fn point_utf16_to_offset_utf16(&self, point: PointUtf16) -> OffsetUtf16 {
2101 self.visible_text.point_utf16_to_offset_utf16(point)
2102 }
2103
2104 pub fn point_utf16_to_offset(&self, point: PointUtf16) -> usize {
2105 self.visible_text.point_utf16_to_offset(point)
2106 }
2107
2108 pub fn unclipped_point_utf16_to_offset(&self, point: Unclipped<PointUtf16>) -> usize {
2109 self.visible_text.unclipped_point_utf16_to_offset(point)
2110 }
2111
2112 pub fn unclipped_point_utf16_to_point(&self, point: Unclipped<PointUtf16>) -> Point {
2113 self.visible_text.unclipped_point_utf16_to_point(point)
2114 }
2115
2116 pub fn offset_utf16_to_offset(&self, offset: OffsetUtf16) -> usize {
2117 self.visible_text.offset_utf16_to_offset(offset)
2118 }
2119
2120 pub fn offset_to_offset_utf16(&self, offset: usize) -> OffsetUtf16 {
2121 self.visible_text.offset_to_offset_utf16(offset)
2122 }
2123
2124 pub fn offset_to_point(&self, offset: usize) -> Point {
2125 self.visible_text.offset_to_point(offset)
2126 }
2127
2128 pub fn offset_to_point_utf16(&self, offset: usize) -> PointUtf16 {
2129 self.visible_text.offset_to_point_utf16(offset)
2130 }
2131
2132 pub fn point_to_point_utf16(&self, point: Point) -> PointUtf16 {
2133 self.visible_text.point_to_point_utf16(point)
2134 }
2135
2136 pub fn point_utf16_to_point(&self, point: PointUtf16) -> Point {
2137 self.visible_text.point_utf16_to_point(point)
2138 }
2139
2140 pub fn version(&self) -> &clock::Global {
2141 &self.version
2142 }
2143
2144 pub fn chars_at<T: ToOffset>(&self, position: T) -> impl Iterator<Item = char> + '_ {
2145 let offset = position.to_offset(self);
2146 self.visible_text.chars_at(offset)
2147 }
2148
2149 pub fn reversed_chars_at<T: ToOffset>(&self, position: T) -> impl Iterator<Item = char> + '_ {
2150 let offset = position.to_offset(self);
2151 self.visible_text.reversed_chars_at(offset)
2152 }
2153
2154 pub fn reversed_chunks_in_range<T: ToOffset>(&self, range: Range<T>) -> rope::Chunks<'_> {
2155 let range = range.start.to_offset(self)..range.end.to_offset(self);
2156 self.visible_text.reversed_chunks_in_range(range)
2157 }
2158
2159 pub fn bytes_in_range<T: ToOffset>(&self, range: Range<T>) -> rope::Bytes<'_> {
2160 let start = range.start.to_offset(self);
2161 let end = range.end.to_offset(self);
2162 self.visible_text.bytes_in_range(start..end)
2163 }
2164
2165 pub fn reversed_bytes_in_range<T: ToOffset>(&self, range: Range<T>) -> rope::Bytes<'_> {
2166 let start = range.start.to_offset(self);
2167 let end = range.end.to_offset(self);
2168 self.visible_text.reversed_bytes_in_range(start..end)
2169 }
2170
2171 pub fn text_for_range<T: ToOffset>(&self, range: Range<T>) -> Chunks<'_> {
2172 let start = range.start.to_offset(self);
2173 let end = range.end.to_offset(self);
2174 self.visible_text.chunks_in_range(start..end)
2175 }
2176
2177 pub fn line_len(&self, row: u32) -> u32 {
2178 let row_start_offset = Point::new(row, 0).to_offset(self);
2179 let row_end_offset = if row >= self.max_point().row {
2180 self.len()
2181 } else {
2182 Point::new(row + 1, 0).to_previous_offset(self)
2183 };
2184 (row_end_offset - row_start_offset) as u32
2185 }
2186
2187 pub fn line_indents_in_row_range(
2188 &self,
2189 row_range: Range<u32>,
2190 ) -> impl Iterator<Item = (u32, LineIndent)> + '_ {
2191 let start = Point::new(row_range.start, 0).to_offset(self);
2192 let end = Point::new(row_range.end, self.line_len(row_range.end)).to_offset(self);
2193
2194 let mut chunks = self.as_rope().chunks_in_range(start..end);
2195 let mut row = row_range.start;
2196 let mut done = false;
2197 std::iter::from_fn(move || {
2198 if done {
2199 None
2200 } else {
2201 let indent = (row, LineIndent::from_chunks(&mut chunks));
2202 done = !chunks.next_line();
2203 row += 1;
2204 Some(indent)
2205 }
2206 })
2207 }
2208
2209 /// Returns the line indents in the given row range, exclusive of end row, in reversed order.
2210 pub fn reversed_line_indents_in_row_range(
2211 &self,
2212 row_range: Range<u32>,
2213 ) -> impl Iterator<Item = (u32, LineIndent)> + '_ {
2214 let start = Point::new(row_range.start, 0).to_offset(self);
2215
2216 let end_point;
2217 let end;
2218 if row_range.end > row_range.start {
2219 end_point = Point::new(row_range.end - 1, self.line_len(row_range.end - 1));
2220 end = end_point.to_offset(self);
2221 } else {
2222 end_point = Point::new(row_range.start, 0);
2223 end = start;
2224 };
2225
2226 let mut chunks = self.as_rope().chunks_in_range(start..end);
2227 // Move the cursor to the start of the last line if it's not empty.
2228 chunks.seek(end);
2229 if end_point.column > 0 {
2230 chunks.prev_line();
2231 }
2232
2233 let mut row = end_point.row;
2234 let mut done = false;
2235 std::iter::from_fn(move || {
2236 if done {
2237 None
2238 } else {
2239 let initial_offset = chunks.offset();
2240 let indent = (row, LineIndent::from_chunks(&mut chunks));
2241 if chunks.offset() > initial_offset {
2242 chunks.prev_line();
2243 }
2244 done = !chunks.prev_line();
2245 if !done {
2246 row -= 1;
2247 }
2248
2249 Some(indent)
2250 }
2251 })
2252 }
2253
2254 pub fn line_indent_for_row(&self, row: u32) -> LineIndent {
2255 LineIndent::from_iter(self.chars_at(Point::new(row, 0)))
2256 }
2257
2258 pub fn is_line_blank(&self, row: u32) -> bool {
2259 self.text_for_range(Point::new(row, 0)..Point::new(row, self.line_len(row)))
2260 .all(|chunk| chunk.matches(|c: char| !c.is_whitespace()).next().is_none())
2261 }
2262
2263 pub fn text_summary_for_range<D, O: ToOffset>(&self, range: Range<O>) -> D
2264 where
2265 D: TextDimension,
2266 {
2267 self.visible_text
2268 .cursor(range.start.to_offset(self))
2269 .summary(range.end.to_offset(self))
2270 }
2271
2272 pub fn summaries_for_anchors<'a, D, A>(&'a self, anchors: A) -> impl 'a + Iterator<Item = D>
2273 where
2274 D: 'a + TextDimension,
2275 A: 'a + IntoIterator<Item = &'a Anchor>,
2276 {
2277 let anchors = anchors.into_iter();
2278 self.summaries_for_anchors_with_payload::<D, _, ()>(anchors.map(|a| (a, ())))
2279 .map(|d| d.0)
2280 }
2281
2282 pub fn summaries_for_anchors_with_payload<'a, D, A, T>(
2283 &'a self,
2284 anchors: A,
2285 ) -> impl 'a + Iterator<Item = (D, T)>
2286 where
2287 D: 'a + TextDimension,
2288 A: 'a + IntoIterator<Item = (&'a Anchor, T)>,
2289 {
2290 let anchors = anchors.into_iter();
2291 let mut insertion_cursor = self.insertions.cursor::<InsertionFragmentKey>(());
2292 let mut fragment_cursor = self
2293 .fragments
2294 .cursor::<Dimensions<Option<&Locator>, usize>>(&None);
2295 let mut text_cursor = self.visible_text.cursor(0);
2296 let mut position = D::zero(());
2297
2298 anchors.map(move |(anchor, payload)| {
2299 if *anchor == Anchor::MIN {
2300 return (D::zero(()), payload);
2301 } else if *anchor == Anchor::MAX {
2302 return (D::from_text_summary(&self.visible_text.summary()), payload);
2303 }
2304
2305 let anchor_key = InsertionFragmentKey {
2306 timestamp: anchor.timestamp,
2307 split_offset: anchor.offset,
2308 };
2309 insertion_cursor.seek(&anchor_key, anchor.bias);
2310 if let Some(insertion) = insertion_cursor.item() {
2311 let comparison = sum_tree::KeyedItem::key(insertion).cmp(&anchor_key);
2312 if comparison == Ordering::Greater
2313 || (anchor.bias == Bias::Left
2314 && comparison == Ordering::Equal
2315 && anchor.offset > 0)
2316 {
2317 insertion_cursor.prev();
2318 }
2319 } else {
2320 insertion_cursor.prev();
2321 }
2322 let insertion = insertion_cursor.item().expect("invalid insertion");
2323 assert_eq!(
2324 insertion.timestamp,
2325 anchor.timestamp,
2326 "invalid insertion for buffer {} with anchor {:?}",
2327 self.remote_id(),
2328 anchor
2329 );
2330
2331 fragment_cursor.seek_forward(&Some(&insertion.fragment_id), Bias::Left);
2332 let fragment = fragment_cursor.item().unwrap();
2333 let mut fragment_offset = fragment_cursor.start().1;
2334 if fragment.visible {
2335 fragment_offset += anchor.offset - insertion.split_offset;
2336 }
2337
2338 position.add_assign(&text_cursor.summary(fragment_offset));
2339 (position, payload)
2340 })
2341 }
2342
2343 pub fn summary_for_anchor<D>(&self, anchor: &Anchor) -> D
2344 where
2345 D: TextDimension,
2346 {
2347 self.text_summary_for_range(0..self.offset_for_anchor(anchor))
2348 }
2349
2350 pub fn offset_for_anchor(&self, anchor: &Anchor) -> usize {
2351 if *anchor == Anchor::MIN {
2352 0
2353 } else if *anchor == Anchor::MAX {
2354 self.visible_text.len()
2355 } else {
2356 debug_assert!(anchor.buffer_id == Some(self.remote_id));
2357 debug_assert!(self.version.observed(anchor.timestamp));
2358 let anchor_key = InsertionFragmentKey {
2359 timestamp: anchor.timestamp,
2360 split_offset: anchor.offset,
2361 };
2362 let mut insertion_cursor = self.insertions.cursor::<InsertionFragmentKey>(());
2363 insertion_cursor.seek(&anchor_key, anchor.bias);
2364 if let Some(insertion) = insertion_cursor.item() {
2365 let comparison = sum_tree::KeyedItem::key(insertion).cmp(&anchor_key);
2366 if comparison == Ordering::Greater
2367 || (anchor.bias == Bias::Left
2368 && comparison == Ordering::Equal
2369 && anchor.offset > 0)
2370 {
2371 insertion_cursor.prev();
2372 }
2373 } else {
2374 insertion_cursor.prev();
2375 }
2376
2377 let Some(insertion) = insertion_cursor
2378 .item()
2379 .filter(|insertion| insertion.timestamp == anchor.timestamp)
2380 else {
2381 self.panic_bad_anchor(anchor);
2382 };
2383
2384 let (start, _, item) = self
2385 .fragments
2386 .find::<Dimensions<Option<&Locator>, usize>, _>(
2387 &None,
2388 &Some(&insertion.fragment_id),
2389 Bias::Left,
2390 );
2391 let fragment = item.unwrap();
2392 let mut fragment_offset = start.1;
2393 if fragment.visible {
2394 fragment_offset += anchor.offset - insertion.split_offset;
2395 }
2396 fragment_offset
2397 }
2398 }
2399
2400 #[cold]
2401 fn panic_bad_anchor(&self, anchor: &Anchor) -> ! {
2402 if anchor.buffer_id.is_some_and(|id| id != self.remote_id) {
2403 panic!(
2404 "invalid anchor - buffer id does not match: anchor {anchor:?}; buffer id: {}, version: {:?}",
2405 self.remote_id, self.version
2406 );
2407 } else if !self.version.observed(anchor.timestamp) {
2408 panic!(
2409 "invalid anchor - snapshot has not observed lamport: {:?}; version: {:?}",
2410 anchor, self.version
2411 );
2412 } else {
2413 panic!(
2414 "invalid anchor {:?}. buffer id: {}, version: {:?}",
2415 anchor, self.remote_id, self.version
2416 );
2417 }
2418 }
2419
2420 fn fragment_id_for_anchor(&self, anchor: &Anchor) -> &Locator {
2421 self.try_fragment_id_for_anchor(anchor)
2422 .unwrap_or_else(|| self.panic_bad_anchor(anchor))
2423 }
2424
2425 fn try_fragment_id_for_anchor(&self, anchor: &Anchor) -> Option<&Locator> {
2426 if *anchor == Anchor::MIN {
2427 Some(Locator::min_ref())
2428 } else if *anchor == Anchor::MAX {
2429 Some(Locator::max_ref())
2430 } else {
2431 let anchor_key = InsertionFragmentKey {
2432 timestamp: anchor.timestamp,
2433 split_offset: anchor.offset,
2434 };
2435 let mut insertion_cursor = self.insertions.cursor::<InsertionFragmentKey>(());
2436 insertion_cursor.seek(&anchor_key, anchor.bias);
2437 if let Some(insertion) = insertion_cursor.item() {
2438 let comparison = sum_tree::KeyedItem::key(insertion).cmp(&anchor_key);
2439 if comparison == Ordering::Greater
2440 || (anchor.bias == Bias::Left
2441 && comparison == Ordering::Equal
2442 && anchor.offset > 0)
2443 {
2444 insertion_cursor.prev();
2445 }
2446 } else {
2447 insertion_cursor.prev();
2448 }
2449
2450 insertion_cursor
2451 .item()
2452 .filter(|insertion| {
2453 !cfg!(debug_assertions) || insertion.timestamp == anchor.timestamp
2454 })
2455 .map(|insertion| &insertion.fragment_id)
2456 }
2457 }
2458
2459 pub fn anchor_before<T: ToOffset>(&self, position: T) -> Anchor {
2460 self.anchor_at(position, Bias::Left)
2461 }
2462
2463 pub fn anchor_after<T: ToOffset>(&self, position: T) -> Anchor {
2464 self.anchor_at(position, Bias::Right)
2465 }
2466
2467 pub fn anchor_at<T: ToOffset>(&self, position: T, bias: Bias) -> Anchor {
2468 self.anchor_at_offset(position.to_offset(self), bias)
2469 }
2470
2471 fn anchor_at_offset(&self, offset: usize, bias: Bias) -> Anchor {
2472 if bias == Bias::Left && offset == 0 {
2473 Anchor::MIN
2474 } else if bias == Bias::Right && offset == self.len() {
2475 Anchor::MAX
2476 } else {
2477 if offset > self.visible_text.len() {
2478 panic!("offset {} is out of bounds", offset)
2479 }
2480 self.visible_text.assert_char_boundary(offset);
2481 let (start, _, item) = self.fragments.find::<usize, _>(&None, &offset, bias);
2482 let fragment = item.unwrap();
2483 let overshoot = offset - start;
2484 Anchor {
2485 timestamp: fragment.timestamp,
2486 offset: fragment.insertion_offset + overshoot,
2487 bias,
2488 buffer_id: Some(self.remote_id),
2489 }
2490 }
2491 }
2492
2493 pub fn can_resolve(&self, anchor: &Anchor) -> bool {
2494 *anchor == Anchor::MIN
2495 || *anchor == Anchor::MAX
2496 || (Some(self.remote_id) == anchor.buffer_id && self.version.observed(anchor.timestamp))
2497 }
2498
2499 pub fn clip_offset(&self, offset: usize, bias: Bias) -> usize {
2500 self.visible_text.clip_offset(offset, bias)
2501 }
2502
2503 pub fn clip_point(&self, point: Point, bias: Bias) -> Point {
2504 self.visible_text.clip_point(point, bias)
2505 }
2506
2507 pub fn clip_offset_utf16(&self, offset: OffsetUtf16, bias: Bias) -> OffsetUtf16 {
2508 self.visible_text.clip_offset_utf16(offset, bias)
2509 }
2510
2511 pub fn clip_point_utf16(&self, point: Unclipped<PointUtf16>, bias: Bias) -> PointUtf16 {
2512 self.visible_text.clip_point_utf16(point, bias)
2513 }
2514
2515 pub fn edits_since<'a, D>(
2516 &'a self,
2517 since: &'a clock::Global,
2518 ) -> impl 'a + Iterator<Item = Edit<D>>
2519 where
2520 D: TextDimension + Ord,
2521 {
2522 self.edits_since_in_range(since, Anchor::MIN..Anchor::MAX)
2523 }
2524
2525 pub fn anchored_edits_since<'a, D>(
2526 &'a self,
2527 since: &'a clock::Global,
2528 ) -> impl 'a + Iterator<Item = (Edit<D>, Range<Anchor>)>
2529 where
2530 D: TextDimension + Ord,
2531 {
2532 self.anchored_edits_since_in_range(since, Anchor::MIN..Anchor::MAX)
2533 }
2534
2535 pub fn edits_since_in_range<'a, D>(
2536 &'a self,
2537 since: &'a clock::Global,
2538 range: Range<Anchor>,
2539 ) -> impl 'a + Iterator<Item = Edit<D>>
2540 where
2541 D: TextDimension + Ord,
2542 {
2543 self.anchored_edits_since_in_range(since, range)
2544 .map(|item| item.0)
2545 }
2546
2547 pub fn anchored_edits_since_in_range<'a, D>(
2548 &'a self,
2549 since: &'a clock::Global,
2550 range: Range<Anchor>,
2551 ) -> impl 'a + Iterator<Item = (Edit<D>, Range<Anchor>)>
2552 where
2553 D: TextDimension + Ord,
2554 {
2555 let fragments_cursor = if *since == self.version {
2556 None
2557 } else {
2558 let mut cursor = self.fragments.filter(&None, move |summary| {
2559 !since.observed_all(&summary.max_version)
2560 });
2561 cursor.next();
2562 Some(cursor)
2563 };
2564 let start_fragment_id = self.fragment_id_for_anchor(&range.start);
2565 let (start, _, item) = self
2566 .fragments
2567 .find::<Dimensions<Option<&Locator>, FragmentTextSummary>, _>(
2568 &None,
2569 &Some(start_fragment_id),
2570 Bias::Left,
2571 );
2572 let mut visible_start = start.1.visible;
2573 let mut deleted_start = start.1.deleted;
2574 if let Some(fragment) = item {
2575 let overshoot = range.start.offset - fragment.insertion_offset;
2576 if fragment.visible {
2577 visible_start += overshoot;
2578 } else {
2579 deleted_start += overshoot;
2580 }
2581 }
2582 let end_fragment_id = self.fragment_id_for_anchor(&range.end);
2583
2584 Edits {
2585 visible_cursor: self.visible_text.cursor(visible_start),
2586 deleted_cursor: self.deleted_text.cursor(deleted_start),
2587 fragments_cursor,
2588 undos: &self.undo_map,
2589 since,
2590 old_end: D::zero(()),
2591 new_end: D::zero(()),
2592 range: (start_fragment_id, range.start.offset)..(end_fragment_id, range.end.offset),
2593 buffer_id: self.remote_id,
2594 }
2595 }
2596
2597 pub fn has_edits_since_in_range(&self, since: &clock::Global, range: Range<Anchor>) -> bool {
2598 if *since != self.version {
2599 let start_fragment_id = self.fragment_id_for_anchor(&range.start);
2600 let end_fragment_id = self.fragment_id_for_anchor(&range.end);
2601 let mut cursor = self.fragments.filter::<_, usize>(&None, move |summary| {
2602 !since.observed_all(&summary.max_version)
2603 });
2604 cursor.next();
2605 while let Some(fragment) = cursor.item() {
2606 if fragment.id > *end_fragment_id {
2607 break;
2608 }
2609 if fragment.id > *start_fragment_id {
2610 let was_visible = fragment.was_visible(since, &self.undo_map);
2611 let is_visible = fragment.visible;
2612 if was_visible != is_visible {
2613 return true;
2614 }
2615 }
2616 cursor.next();
2617 }
2618 }
2619 false
2620 }
2621
2622 pub fn has_edits_since(&self, since: &clock::Global) -> bool {
2623 if *since != self.version {
2624 let mut cursor = self.fragments.filter::<_, usize>(&None, move |summary| {
2625 !since.observed_all(&summary.max_version)
2626 });
2627 cursor.next();
2628 while let Some(fragment) = cursor.item() {
2629 let was_visible = fragment.was_visible(since, &self.undo_map);
2630 let is_visible = fragment.visible;
2631 if was_visible != is_visible {
2632 return true;
2633 }
2634 cursor.next();
2635 }
2636 }
2637 false
2638 }
2639
2640 pub fn range_to_version(&self, range: Range<usize>, version: &clock::Global) -> Range<usize> {
2641 let mut offsets = self.offsets_to_version([range.start, range.end], version);
2642 offsets.next().unwrap()..offsets.next().unwrap()
2643 }
2644
2645 /// Converts the given sequence of offsets into their corresponding offsets
2646 /// at a prior version of this buffer.
2647 pub fn offsets_to_version<'a>(
2648 &'a self,
2649 offsets: impl 'a + IntoIterator<Item = usize>,
2650 version: &'a clock::Global,
2651 ) -> impl 'a + Iterator<Item = usize> {
2652 let mut edits = self.edits_since(version).peekable();
2653 let mut last_old_end = 0;
2654 let mut last_new_end = 0;
2655 offsets.into_iter().map(move |new_offset| {
2656 while let Some(edit) = edits.peek() {
2657 if edit.new.start > new_offset {
2658 break;
2659 }
2660
2661 if edit.new.end <= new_offset {
2662 last_new_end = edit.new.end;
2663 last_old_end = edit.old.end;
2664 edits.next();
2665 continue;
2666 }
2667
2668 let overshoot = new_offset - edit.new.start;
2669 return (edit.old.start + overshoot).min(edit.old.end);
2670 }
2671
2672 last_old_end + new_offset.saturating_sub(last_new_end)
2673 })
2674 }
2675
2676 /// Visually annotates a position or range with the `Debug` representation of a value. The
2677 /// callsite of this function is used as a key - previous annotations will be removed.
2678 #[cfg(debug_assertions)]
2679 #[track_caller]
2680 pub fn debug<R, V>(&self, ranges: &R, value: V)
2681 where
2682 R: debug::ToDebugRanges,
2683 V: std::fmt::Debug,
2684 {
2685 self.debug_with_key(std::panic::Location::caller(), ranges, value);
2686 }
2687
2688 /// Visually annotates a position or range with the `Debug` representation of a value. Previous
2689 /// debug annotations with the same key will be removed. The key is also used to determine the
2690 /// annotation's color.
2691 #[cfg(debug_assertions)]
2692 pub fn debug_with_key<K, R, V>(&self, key: &K, ranges: &R, value: V)
2693 where
2694 K: std::hash::Hash + 'static,
2695 R: debug::ToDebugRanges,
2696 V: std::fmt::Debug,
2697 {
2698 let ranges = ranges
2699 .to_debug_ranges(self)
2700 .into_iter()
2701 .map(|range| self.anchor_after(range.start)..self.anchor_before(range.end))
2702 .collect();
2703 debug::GlobalDebugRanges::with_locked(|debug_ranges| {
2704 debug_ranges.insert(key, ranges, format!("{value:?}").into());
2705 });
2706 }
2707}
2708
2709struct RopeBuilder<'a> {
2710 old_visible_cursor: rope::Cursor<'a>,
2711 old_deleted_cursor: rope::Cursor<'a>,
2712 new_visible: Rope,
2713 new_deleted: Rope,
2714}
2715
2716impl<'a> RopeBuilder<'a> {
2717 fn new(old_visible_cursor: rope::Cursor<'a>, old_deleted_cursor: rope::Cursor<'a>) -> Self {
2718 Self {
2719 old_visible_cursor,
2720 old_deleted_cursor,
2721 new_visible: Rope::new(),
2722 new_deleted: Rope::new(),
2723 }
2724 }
2725
2726 fn append(&mut self, len: FragmentTextSummary) {
2727 self.push(len.visible, true, true);
2728 self.push(len.deleted, false, false);
2729 }
2730
2731 fn push_fragment(&mut self, fragment: &Fragment, was_visible: bool) {
2732 debug_assert!(fragment.len > 0);
2733 self.push(fragment.len, was_visible, fragment.visible)
2734 }
2735
2736 fn push(&mut self, len: usize, was_visible: bool, is_visible: bool) {
2737 let text = if was_visible {
2738 self.old_visible_cursor
2739 .slice(self.old_visible_cursor.offset() + len)
2740 } else {
2741 self.old_deleted_cursor
2742 .slice(self.old_deleted_cursor.offset() + len)
2743 };
2744 if is_visible {
2745 self.new_visible.append(text);
2746 } else {
2747 self.new_deleted.append(text);
2748 }
2749 }
2750
2751 fn push_str(&mut self, text: &str, cx: &BackgroundExecutor) {
2752 self.new_visible.push(text, cx);
2753 }
2754
2755 fn push_str_small(&mut self, text: &str) {
2756 self.new_visible.push_small(text);
2757 }
2758
2759 fn finish(mut self) -> (Rope, Rope) {
2760 self.new_visible.append(self.old_visible_cursor.suffix());
2761 self.new_deleted.append(self.old_deleted_cursor.suffix());
2762 (self.new_visible, self.new_deleted)
2763 }
2764}
2765
2766impl<D: TextDimension + Ord, F: FnMut(&FragmentSummary) -> bool> Iterator for Edits<'_, D, F> {
2767 type Item = (Edit<D>, Range<Anchor>);
2768
2769 fn next(&mut self) -> Option<Self::Item> {
2770 let mut pending_edit: Option<Self::Item> = None;
2771 let cursor = self.fragments_cursor.as_mut()?;
2772
2773 while let Some(fragment) = cursor.item() {
2774 if fragment.id < *self.range.start.0 {
2775 cursor.next();
2776 continue;
2777 } else if fragment.id > *self.range.end.0 {
2778 break;
2779 }
2780
2781 if cursor.start().visible > self.visible_cursor.offset() {
2782 let summary = self.visible_cursor.summary(cursor.start().visible);
2783 self.old_end.add_assign(&summary);
2784 self.new_end.add_assign(&summary);
2785 }
2786
2787 if pending_edit
2788 .as_ref()
2789 .is_some_and(|(change, _)| change.new.end < self.new_end)
2790 {
2791 break;
2792 }
2793
2794 let start_anchor = Anchor {
2795 timestamp: fragment.timestamp,
2796 offset: fragment.insertion_offset,
2797 bias: Bias::Right,
2798 buffer_id: Some(self.buffer_id),
2799 };
2800 let end_anchor = Anchor {
2801 timestamp: fragment.timestamp,
2802 offset: fragment.insertion_offset + fragment.len,
2803 bias: Bias::Left,
2804 buffer_id: Some(self.buffer_id),
2805 };
2806
2807 if !fragment.was_visible(self.since, self.undos) && fragment.visible {
2808 let mut visible_end = cursor.end().visible;
2809 if fragment.id == *self.range.end.0 {
2810 visible_end = cmp::min(
2811 visible_end,
2812 cursor.start().visible + (self.range.end.1 - fragment.insertion_offset),
2813 );
2814 }
2815
2816 let fragment_summary = self.visible_cursor.summary(visible_end);
2817 let mut new_end = self.new_end;
2818 new_end.add_assign(&fragment_summary);
2819 if let Some((edit, range)) = pending_edit.as_mut() {
2820 edit.new.end = new_end;
2821 range.end = end_anchor;
2822 } else {
2823 pending_edit = Some((
2824 Edit {
2825 old: self.old_end..self.old_end,
2826 new: self.new_end..new_end,
2827 },
2828 start_anchor..end_anchor,
2829 ));
2830 }
2831
2832 self.new_end = new_end;
2833 } else if fragment.was_visible(self.since, self.undos) && !fragment.visible {
2834 let mut deleted_end = cursor.end().deleted;
2835 if fragment.id == *self.range.end.0 {
2836 deleted_end = cmp::min(
2837 deleted_end,
2838 cursor.start().deleted + (self.range.end.1 - fragment.insertion_offset),
2839 );
2840 }
2841
2842 if cursor.start().deleted > self.deleted_cursor.offset() {
2843 self.deleted_cursor.seek_forward(cursor.start().deleted);
2844 }
2845 let fragment_summary = self.deleted_cursor.summary(deleted_end);
2846 let mut old_end = self.old_end;
2847 old_end.add_assign(&fragment_summary);
2848 if let Some((edit, range)) = pending_edit.as_mut() {
2849 edit.old.end = old_end;
2850 range.end = end_anchor;
2851 } else {
2852 pending_edit = Some((
2853 Edit {
2854 old: self.old_end..old_end,
2855 new: self.new_end..self.new_end,
2856 },
2857 start_anchor..end_anchor,
2858 ));
2859 }
2860
2861 self.old_end = old_end;
2862 }
2863
2864 cursor.next();
2865 }
2866
2867 pending_edit
2868 }
2869}
2870
2871impl Fragment {
2872 fn is_visible(&self, undos: &UndoMap) -> bool {
2873 !undos.is_undone(self.timestamp) && self.deletions.iter().all(|d| undos.is_undone(*d))
2874 }
2875
2876 fn was_visible(&self, version: &clock::Global, undos: &UndoMap) -> bool {
2877 (version.observed(self.timestamp) && !undos.was_undone(self.timestamp, version))
2878 && self
2879 .deletions
2880 .iter()
2881 .all(|d| !version.observed(*d) || undos.was_undone(*d, version))
2882 }
2883}
2884
2885impl sum_tree::Item for Fragment {
2886 type Summary = FragmentSummary;
2887
2888 fn summary(&self, _cx: &Option<clock::Global>) -> Self::Summary {
2889 let mut max_version = clock::Global::new();
2890 max_version.observe(self.timestamp);
2891 for deletion in &self.deletions {
2892 max_version.observe(*deletion);
2893 }
2894 max_version.join(&self.max_undos);
2895
2896 let mut min_insertion_version = clock::Global::new();
2897 min_insertion_version.observe(self.timestamp);
2898 let max_insertion_version = min_insertion_version.clone();
2899 if self.visible {
2900 FragmentSummary {
2901 max_id: self.id.clone(),
2902 text: FragmentTextSummary {
2903 visible: self.len,
2904 deleted: 0,
2905 },
2906 max_version,
2907 min_insertion_version,
2908 max_insertion_version,
2909 }
2910 } else {
2911 FragmentSummary {
2912 max_id: self.id.clone(),
2913 text: FragmentTextSummary {
2914 visible: 0,
2915 deleted: self.len,
2916 },
2917 max_version,
2918 min_insertion_version,
2919 max_insertion_version,
2920 }
2921 }
2922 }
2923}
2924
2925impl sum_tree::Summary for FragmentSummary {
2926 type Context<'a> = &'a Option<clock::Global>;
2927
2928 fn zero(_cx: Self::Context<'_>) -> Self {
2929 Default::default()
2930 }
2931
2932 fn add_summary(&mut self, other: &Self, _: Self::Context<'_>) {
2933 self.max_id.assign(&other.max_id);
2934 self.text.visible += &other.text.visible;
2935 self.text.deleted += &other.text.deleted;
2936 self.max_version.join(&other.max_version);
2937 self.min_insertion_version
2938 .meet(&other.min_insertion_version);
2939 self.max_insertion_version
2940 .join(&other.max_insertion_version);
2941 }
2942}
2943
2944impl Default for FragmentSummary {
2945 fn default() -> Self {
2946 FragmentSummary {
2947 max_id: Locator::min(),
2948 text: FragmentTextSummary::default(),
2949 max_version: clock::Global::new(),
2950 min_insertion_version: clock::Global::new(),
2951 max_insertion_version: clock::Global::new(),
2952 }
2953 }
2954}
2955
2956impl sum_tree::Item for InsertionFragment {
2957 type Summary = InsertionFragmentKey;
2958
2959 fn summary(&self, _cx: ()) -> Self::Summary {
2960 InsertionFragmentKey {
2961 timestamp: self.timestamp,
2962 split_offset: self.split_offset,
2963 }
2964 }
2965}
2966
2967impl sum_tree::KeyedItem for InsertionFragment {
2968 type Key = InsertionFragmentKey;
2969
2970 fn key(&self) -> Self::Key {
2971 sum_tree::Item::summary(self, ())
2972 }
2973}
2974
2975impl InsertionFragment {
2976 fn new(fragment: &Fragment) -> Self {
2977 Self {
2978 timestamp: fragment.timestamp,
2979 split_offset: fragment.insertion_offset,
2980 fragment_id: fragment.id.clone(),
2981 }
2982 }
2983
2984 fn insert_new(fragment: &Fragment) -> sum_tree::Edit<Self> {
2985 sum_tree::Edit::Insert(Self::new(fragment))
2986 }
2987}
2988
2989impl sum_tree::ContextLessSummary for InsertionFragmentKey {
2990 fn zero() -> Self {
2991 InsertionFragmentKey {
2992 timestamp: Lamport::MIN,
2993 split_offset: 0,
2994 }
2995 }
2996
2997 fn add_summary(&mut self, summary: &Self) {
2998 *self = *summary;
2999 }
3000}
3001
3002#[derive(Copy, Clone, Debug, Default, PartialEq, Eq, PartialOrd, Ord, Hash)]
3003pub struct FullOffset(pub usize);
3004
3005impl ops::AddAssign<usize> for FullOffset {
3006 fn add_assign(&mut self, rhs: usize) {
3007 self.0 += rhs;
3008 }
3009}
3010
3011impl ops::Add<usize> for FullOffset {
3012 type Output = Self;
3013
3014 fn add(mut self, rhs: usize) -> Self::Output {
3015 self += rhs;
3016 self
3017 }
3018}
3019
3020impl ops::Sub for FullOffset {
3021 type Output = usize;
3022
3023 fn sub(self, rhs: Self) -> Self::Output {
3024 self.0 - rhs.0
3025 }
3026}
3027
3028impl sum_tree::Dimension<'_, FragmentSummary> for usize {
3029 fn zero(_: &Option<clock::Global>) -> Self {
3030 Default::default()
3031 }
3032
3033 fn add_summary(&mut self, summary: &FragmentSummary, _: &Option<clock::Global>) {
3034 *self += summary.text.visible;
3035 }
3036}
3037
3038impl sum_tree::Dimension<'_, FragmentSummary> for FullOffset {
3039 fn zero(_: &Option<clock::Global>) -> Self {
3040 Default::default()
3041 }
3042
3043 fn add_summary(&mut self, summary: &FragmentSummary, _: &Option<clock::Global>) {
3044 self.0 += summary.text.visible + summary.text.deleted;
3045 }
3046}
3047
3048impl<'a> sum_tree::Dimension<'a, FragmentSummary> for Option<&'a Locator> {
3049 fn zero(_: &Option<clock::Global>) -> Self {
3050 Default::default()
3051 }
3052
3053 fn add_summary(&mut self, summary: &'a FragmentSummary, _: &Option<clock::Global>) {
3054 *self = Some(&summary.max_id);
3055 }
3056}
3057
3058impl sum_tree::SeekTarget<'_, FragmentSummary, FragmentTextSummary> for usize {
3059 fn cmp(
3060 &self,
3061 cursor_location: &FragmentTextSummary,
3062 _: &Option<clock::Global>,
3063 ) -> cmp::Ordering {
3064 Ord::cmp(self, &cursor_location.visible)
3065 }
3066}
3067
3068#[derive(Copy, Clone, Debug, Eq, PartialEq)]
3069enum VersionedFullOffset {
3070 Offset(FullOffset),
3071 Invalid,
3072}
3073
3074impl VersionedFullOffset {
3075 fn full_offset(&self) -> FullOffset {
3076 if let Self::Offset(position) = self {
3077 *position
3078 } else {
3079 panic!("invalid version")
3080 }
3081 }
3082}
3083
3084impl Default for VersionedFullOffset {
3085 fn default() -> Self {
3086 Self::Offset(Default::default())
3087 }
3088}
3089
3090impl<'a> sum_tree::Dimension<'a, FragmentSummary> for VersionedFullOffset {
3091 fn zero(_cx: &Option<clock::Global>) -> Self {
3092 Default::default()
3093 }
3094
3095 fn add_summary(&mut self, summary: &'a FragmentSummary, cx: &Option<clock::Global>) {
3096 if let Self::Offset(offset) = self {
3097 let version = cx.as_ref().unwrap();
3098 if version.observed_all(&summary.max_insertion_version) {
3099 *offset += summary.text.visible + summary.text.deleted;
3100 } else if version.observed_any(&summary.min_insertion_version) {
3101 *self = Self::Invalid;
3102 }
3103 }
3104 }
3105}
3106
3107impl sum_tree::SeekTarget<'_, FragmentSummary, Self> for VersionedFullOffset {
3108 fn cmp(&self, cursor_position: &Self, _: &Option<clock::Global>) -> cmp::Ordering {
3109 match (self, cursor_position) {
3110 (Self::Offset(a), Self::Offset(b)) => Ord::cmp(a, b),
3111 (Self::Offset(_), Self::Invalid) => cmp::Ordering::Less,
3112 (Self::Invalid, _) => unreachable!(),
3113 }
3114 }
3115}
3116
3117impl Operation {
3118 fn replica_id(&self) -> ReplicaId {
3119 operation_queue::Operation::lamport_timestamp(self).replica_id
3120 }
3121
3122 pub fn timestamp(&self) -> clock::Lamport {
3123 match self {
3124 Operation::Edit(edit) => edit.timestamp,
3125 Operation::Undo(undo) => undo.timestamp,
3126 }
3127 }
3128
3129 pub fn as_edit(&self) -> Option<&EditOperation> {
3130 match self {
3131 Operation::Edit(edit) => Some(edit),
3132 _ => None,
3133 }
3134 }
3135
3136 pub fn is_edit(&self) -> bool {
3137 matches!(self, Operation::Edit { .. })
3138 }
3139}
3140
3141impl operation_queue::Operation for Operation {
3142 fn lamport_timestamp(&self) -> clock::Lamport {
3143 match self {
3144 Operation::Edit(edit) => edit.timestamp,
3145 Operation::Undo(undo) => undo.timestamp,
3146 }
3147 }
3148}
3149
3150pub trait ToOffset {
3151 fn to_offset(&self, snapshot: &BufferSnapshot) -> usize;
3152 /// Turns this point into the next offset in the buffer that comes after this, respecting utf8 boundaries.
3153 fn to_next_offset(&self, snapshot: &BufferSnapshot) -> usize {
3154 snapshot
3155 .visible_text
3156 .ceil_char_boundary(self.to_offset(snapshot) + 1)
3157 }
3158 /// Turns this point into the previous offset in the buffer that comes before this, respecting utf8 boundaries.
3159 fn to_previous_offset(&self, snapshot: &BufferSnapshot) -> usize {
3160 snapshot
3161 .visible_text
3162 .floor_char_boundary(self.to_offset(snapshot).saturating_sub(1))
3163 }
3164}
3165
3166impl ToOffset for Point {
3167 fn to_offset(&self, snapshot: &BufferSnapshot) -> usize {
3168 snapshot.point_to_offset(*self)
3169 }
3170}
3171
3172impl ToOffset for usize {
3173 #[track_caller]
3174 fn to_offset(&self, snapshot: &BufferSnapshot) -> usize {
3175 assert!(
3176 *self <= snapshot.len(),
3177 "offset {} is out of range, snapshot length is {}",
3178 self,
3179 snapshot.len()
3180 );
3181 *self
3182 }
3183}
3184
3185impl ToOffset for Anchor {
3186 fn to_offset(&self, snapshot: &BufferSnapshot) -> usize {
3187 snapshot.summary_for_anchor(self)
3188 }
3189}
3190
3191impl<T: ToOffset> ToOffset for &T {
3192 fn to_offset(&self, content: &BufferSnapshot) -> usize {
3193 (*self).to_offset(content)
3194 }
3195}
3196
3197impl ToOffset for PointUtf16 {
3198 fn to_offset(&self, snapshot: &BufferSnapshot) -> usize {
3199 snapshot.point_utf16_to_offset(*self)
3200 }
3201}
3202
3203impl ToOffset for Unclipped<PointUtf16> {
3204 fn to_offset(&self, snapshot: &BufferSnapshot) -> usize {
3205 snapshot.unclipped_point_utf16_to_offset(*self)
3206 }
3207}
3208
3209pub trait ToPoint {
3210 fn to_point(&self, snapshot: &BufferSnapshot) -> Point;
3211}
3212
3213impl ToPoint for Anchor {
3214 fn to_point(&self, snapshot: &BufferSnapshot) -> Point {
3215 snapshot.summary_for_anchor(self)
3216 }
3217}
3218
3219impl ToPoint for usize {
3220 fn to_point(&self, snapshot: &BufferSnapshot) -> Point {
3221 snapshot.offset_to_point(*self)
3222 }
3223}
3224
3225impl ToPoint for Point {
3226 fn to_point(&self, _: &BufferSnapshot) -> Point {
3227 *self
3228 }
3229}
3230
3231impl ToPoint for Unclipped<PointUtf16> {
3232 fn to_point(&self, snapshot: &BufferSnapshot) -> Point {
3233 snapshot.unclipped_point_utf16_to_point(*self)
3234 }
3235}
3236
3237pub trait ToPointUtf16 {
3238 fn to_point_utf16(&self, snapshot: &BufferSnapshot) -> PointUtf16;
3239}
3240
3241impl ToPointUtf16 for Anchor {
3242 fn to_point_utf16(&self, snapshot: &BufferSnapshot) -> PointUtf16 {
3243 snapshot.summary_for_anchor(self)
3244 }
3245}
3246
3247impl ToPointUtf16 for usize {
3248 fn to_point_utf16(&self, snapshot: &BufferSnapshot) -> PointUtf16 {
3249 snapshot.offset_to_point_utf16(*self)
3250 }
3251}
3252
3253impl ToPointUtf16 for PointUtf16 {
3254 fn to_point_utf16(&self, _: &BufferSnapshot) -> PointUtf16 {
3255 *self
3256 }
3257}
3258
3259impl ToPointUtf16 for Point {
3260 fn to_point_utf16(&self, snapshot: &BufferSnapshot) -> PointUtf16 {
3261 snapshot.point_to_point_utf16(*self)
3262 }
3263}
3264
3265pub trait ToOffsetUtf16 {
3266 fn to_offset_utf16(&self, snapshot: &BufferSnapshot) -> OffsetUtf16;
3267}
3268
3269impl ToOffsetUtf16 for Anchor {
3270 fn to_offset_utf16(&self, snapshot: &BufferSnapshot) -> OffsetUtf16 {
3271 snapshot.summary_for_anchor(self)
3272 }
3273}
3274
3275impl ToOffsetUtf16 for usize {
3276 fn to_offset_utf16(&self, snapshot: &BufferSnapshot) -> OffsetUtf16 {
3277 snapshot.offset_to_offset_utf16(*self)
3278 }
3279}
3280
3281impl ToOffsetUtf16 for OffsetUtf16 {
3282 fn to_offset_utf16(&self, _snapshot: &BufferSnapshot) -> OffsetUtf16 {
3283 *self
3284 }
3285}
3286
3287pub trait FromAnchor {
3288 fn from_anchor(anchor: &Anchor, snapshot: &BufferSnapshot) -> Self;
3289}
3290
3291impl FromAnchor for Anchor {
3292 fn from_anchor(anchor: &Anchor, _snapshot: &BufferSnapshot) -> Self {
3293 *anchor
3294 }
3295}
3296
3297impl FromAnchor for Point {
3298 fn from_anchor(anchor: &Anchor, snapshot: &BufferSnapshot) -> Self {
3299 snapshot.summary_for_anchor(anchor)
3300 }
3301}
3302
3303impl FromAnchor for PointUtf16 {
3304 fn from_anchor(anchor: &Anchor, snapshot: &BufferSnapshot) -> Self {
3305 snapshot.summary_for_anchor(anchor)
3306 }
3307}
3308
3309impl FromAnchor for usize {
3310 fn from_anchor(anchor: &Anchor, snapshot: &BufferSnapshot) -> Self {
3311 snapshot.summary_for_anchor(anchor)
3312 }
3313}
3314
3315#[derive(Clone, Copy, Debug, PartialEq)]
3316pub enum LineEnding {
3317 Unix,
3318 Windows,
3319}
3320
3321impl Default for LineEnding {
3322 fn default() -> Self {
3323 #[cfg(unix)]
3324 return Self::Unix;
3325
3326 #[cfg(not(unix))]
3327 return Self::Windows;
3328 }
3329}
3330
3331impl LineEnding {
3332 pub fn as_str(&self) -> &'static str {
3333 match self {
3334 LineEnding::Unix => "\n",
3335 LineEnding::Windows => "\r\n",
3336 }
3337 }
3338
3339 pub fn label(&self) -> &'static str {
3340 match self {
3341 LineEnding::Unix => "LF",
3342 LineEnding::Windows => "CRLF",
3343 }
3344 }
3345
3346 pub fn detect(text: &str) -> Self {
3347 let mut max_ix = cmp::min(text.len(), 1000);
3348 while !text.is_char_boundary(max_ix) {
3349 max_ix -= 1;
3350 }
3351
3352 if let Some(ix) = text[..max_ix].find(['\n']) {
3353 if ix > 0 && text.as_bytes()[ix - 1] == b'\r' {
3354 Self::Windows
3355 } else {
3356 Self::Unix
3357 }
3358 } else {
3359 Self::default()
3360 }
3361 }
3362
3363 pub fn normalize(text: &mut String) {
3364 if let Cow::Owned(replaced) = LINE_SEPARATORS_REGEX.replace_all(text, "\n") {
3365 *text = replaced;
3366 }
3367 }
3368
3369 pub fn normalize_arc(text: Arc<str>) -> Arc<str> {
3370 if let Cow::Owned(replaced) = LINE_SEPARATORS_REGEX.replace_all(&text, "\n") {
3371 replaced.into()
3372 } else {
3373 text
3374 }
3375 }
3376
3377 pub fn normalize_cow(text: Cow<str>) -> Cow<str> {
3378 if let Cow::Owned(replaced) = LINE_SEPARATORS_REGEX.replace_all(&text, "\n") {
3379 replaced.into()
3380 } else {
3381 text
3382 }
3383 }
3384}
3385
3386#[cfg(debug_assertions)]
3387pub mod debug {
3388 use super::*;
3389 use parking_lot::Mutex;
3390 use std::any::TypeId;
3391 use std::hash::{Hash, Hasher};
3392
3393 static GLOBAL_DEBUG_RANGES: Mutex<Option<GlobalDebugRanges>> = Mutex::new(None);
3394
3395 pub struct GlobalDebugRanges {
3396 pub ranges: Vec<DebugRange>,
3397 key_to_occurrence_index: HashMap<Key, usize>,
3398 next_occurrence_index: usize,
3399 }
3400
3401 pub struct DebugRange {
3402 key: Key,
3403 pub ranges: Vec<Range<Anchor>>,
3404 pub value: Arc<str>,
3405 pub occurrence_index: usize,
3406 }
3407
3408 #[derive(Debug, Clone, PartialEq, Eq, Hash)]
3409 struct Key {
3410 type_id: TypeId,
3411 hash: u64,
3412 }
3413
3414 impl GlobalDebugRanges {
3415 pub fn with_locked<R>(f: impl FnOnce(&mut Self) -> R) -> R {
3416 let mut state = GLOBAL_DEBUG_RANGES.lock();
3417 if state.is_none() {
3418 *state = Some(GlobalDebugRanges {
3419 ranges: Vec::new(),
3420 key_to_occurrence_index: HashMap::default(),
3421 next_occurrence_index: 0,
3422 });
3423 }
3424 if let Some(global_debug_ranges) = state.as_mut() {
3425 f(global_debug_ranges)
3426 } else {
3427 unreachable!()
3428 }
3429 }
3430
3431 pub fn insert<K: Hash + 'static>(
3432 &mut self,
3433 key: &K,
3434 ranges: Vec<Range<Anchor>>,
3435 value: Arc<str>,
3436 ) {
3437 let occurrence_index = *self
3438 .key_to_occurrence_index
3439 .entry(Key::new(key))
3440 .or_insert_with(|| {
3441 let occurrence_index = self.next_occurrence_index;
3442 self.next_occurrence_index += 1;
3443 occurrence_index
3444 });
3445 let key = Key::new(key);
3446 let existing = self
3447 .ranges
3448 .iter()
3449 .enumerate()
3450 .rfind(|(_, existing)| existing.key == key);
3451 if let Some((existing_ix, _)) = existing {
3452 self.ranges.remove(existing_ix);
3453 }
3454 self.ranges.push(DebugRange {
3455 ranges,
3456 key,
3457 value,
3458 occurrence_index,
3459 });
3460 }
3461
3462 pub fn remove<K: Hash + 'static>(&mut self, key: &K) {
3463 self.remove_impl(&Key::new(key));
3464 }
3465
3466 fn remove_impl(&mut self, key: &Key) {
3467 let existing = self
3468 .ranges
3469 .iter()
3470 .enumerate()
3471 .rfind(|(_, existing)| &existing.key == key);
3472 if let Some((existing_ix, _)) = existing {
3473 self.ranges.remove(existing_ix);
3474 }
3475 }
3476
3477 pub fn remove_all_with_key_type<K: 'static>(&mut self) {
3478 self.ranges
3479 .retain(|item| item.key.type_id != TypeId::of::<K>());
3480 }
3481 }
3482
3483 impl Key {
3484 fn new<K: Hash + 'static>(key: &K) -> Self {
3485 let type_id = TypeId::of::<K>();
3486 let mut hasher = collections::FxHasher::default();
3487 key.hash(&mut hasher);
3488 Key {
3489 type_id,
3490 hash: hasher.finish(),
3491 }
3492 }
3493 }
3494
3495 pub trait ToDebugRanges {
3496 fn to_debug_ranges(&self, snapshot: &BufferSnapshot) -> Vec<Range<usize>>;
3497 }
3498
3499 impl<T: ToOffset> ToDebugRanges for T {
3500 fn to_debug_ranges(&self, snapshot: &BufferSnapshot) -> Vec<Range<usize>> {
3501 [self.to_offset(snapshot)].to_debug_ranges(snapshot)
3502 }
3503 }
3504
3505 impl<T: ToOffset + Clone> ToDebugRanges for Range<T> {
3506 fn to_debug_ranges(&self, snapshot: &BufferSnapshot) -> Vec<Range<usize>> {
3507 [self.clone()].to_debug_ranges(snapshot)
3508 }
3509 }
3510
3511 impl<T: ToOffset> ToDebugRanges for Vec<T> {
3512 fn to_debug_ranges(&self, snapshot: &BufferSnapshot) -> Vec<Range<usize>> {
3513 self.as_slice().to_debug_ranges(snapshot)
3514 }
3515 }
3516
3517 impl<T: ToOffset> ToDebugRanges for Vec<Range<T>> {
3518 fn to_debug_ranges(&self, snapshot: &BufferSnapshot) -> Vec<Range<usize>> {
3519 self.as_slice().to_debug_ranges(snapshot)
3520 }
3521 }
3522
3523 impl<T: ToOffset> ToDebugRanges for [T] {
3524 fn to_debug_ranges(&self, snapshot: &BufferSnapshot) -> Vec<Range<usize>> {
3525 self.iter()
3526 .map(|item| {
3527 let offset = item.to_offset(snapshot);
3528 offset..offset
3529 })
3530 .collect()
3531 }
3532 }
3533
3534 impl<T: ToOffset> ToDebugRanges for [Range<T>] {
3535 fn to_debug_ranges(&self, snapshot: &BufferSnapshot) -> Vec<Range<usize>> {
3536 self.iter()
3537 .map(|range| range.start.to_offset(snapshot)..range.end.to_offset(snapshot))
3538 .collect()
3539 }
3540 }
3541}