1mod anchor;
2pub mod locator;
3#[cfg(any(test, feature = "test-support"))]
4pub mod network;
5pub mod operation_queue;
6mod patch;
7mod selection;
8pub mod subscription;
9#[cfg(test)]
10mod tests;
11mod undo_map;
12
13pub use anchor::*;
14use anyhow::{Context as _, Result};
15use clock::Lamport;
16pub use clock::ReplicaId;
17use collections::{HashMap, HashSet};
18use locator::Locator;
19use operation_queue::OperationQueue;
20pub use patch::Patch;
21use postage::{oneshot, prelude::*};
22
23use regex::Regex;
24pub use rope::*;
25pub use selection::*;
26use std::{
27 borrow::Cow,
28 cmp::{self, Ordering, Reverse},
29 fmt::Display,
30 future::Future,
31 iter::Iterator,
32 num::NonZeroU64,
33 ops::{self, Deref, Range, Sub},
34 str,
35 sync::{Arc, LazyLock},
36 time::{Duration, Instant},
37};
38pub use subscription::*;
39pub use sum_tree::Bias;
40use sum_tree::{Dimensions, FilterCursor, SumTree, TreeMap, TreeSet};
41use undo_map::UndoMap;
42use util::debug_panic;
43
44#[cfg(any(test, feature = "test-support"))]
45use util::RandomCharIter;
46
47static LINE_SEPARATORS_REGEX: LazyLock<Regex> =
48 LazyLock::new(|| Regex::new(r"\r\n|\r").expect("Failed to create LINE_SEPARATORS_REGEX"));
49
50pub type TransactionId = clock::Lamport;
51
52pub struct Buffer {
53 snapshot: BufferSnapshot,
54 history: History,
55 deferred_ops: OperationQueue<Operation>,
56 deferred_replicas: HashSet<ReplicaId>,
57 pub lamport_clock: clock::Lamport,
58 subscriptions: Topic<usize>,
59 edit_id_resolvers: HashMap<clock::Lamport, Vec<oneshot::Sender<()>>>,
60 wait_for_version_txs: Vec<(clock::Global, oneshot::Sender<()>)>,
61}
62
63#[repr(transparent)]
64#[derive(Clone, Copy, Debug, Hash, PartialEq, PartialOrd, Ord, Eq)]
65pub struct BufferId(NonZeroU64);
66
67impl Display for BufferId {
68 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
69 write!(f, "{}", self.0)
70 }
71}
72
73impl From<NonZeroU64> for BufferId {
74 fn from(id: NonZeroU64) -> Self {
75 BufferId(id)
76 }
77}
78
79impl BufferId {
80 /// Returns Err if `id` is outside of BufferId domain.
81 pub fn new(id: u64) -> anyhow::Result<Self> {
82 let id = NonZeroU64::new(id).context("Buffer id cannot be 0.")?;
83 Ok(Self(id))
84 }
85
86 /// Increments this buffer id, returning the old value.
87 /// So that's a post-increment operator in disguise.
88 pub fn next(&mut self) -> Self {
89 let old = *self;
90 self.0 = self.0.saturating_add(1);
91 old
92 }
93
94 pub fn to_proto(self) -> u64 {
95 self.into()
96 }
97}
98
99impl From<BufferId> for u64 {
100 fn from(id: BufferId) -> Self {
101 id.0.get()
102 }
103}
104
105#[derive(Clone)]
106pub struct BufferSnapshot {
107 replica_id: ReplicaId,
108 remote_id: BufferId,
109 visible_text: Rope,
110 deleted_text: Rope,
111 line_ending: LineEnding,
112 undo_map: UndoMap,
113 fragments: SumTree<Fragment>,
114 insertions: SumTree<InsertionFragment>,
115 insertion_slices: TreeSet<InsertionSlice>,
116 pub version: clock::Global,
117}
118
119#[derive(Clone, Debug)]
120pub struct HistoryEntry {
121 transaction: Transaction,
122 first_edit_at: Instant,
123 last_edit_at: Instant,
124 suppress_grouping: bool,
125}
126
127#[derive(Clone, Debug)]
128pub struct Transaction {
129 pub id: TransactionId,
130 pub edit_ids: Vec<clock::Lamport>,
131 pub start: clock::Global,
132}
133
134impl Transaction {
135 pub fn merge_in(&mut self, other: Transaction) {
136 self.edit_ids.extend(other.edit_ids);
137 }
138}
139
140impl HistoryEntry {
141 pub fn transaction_id(&self) -> TransactionId {
142 self.transaction.id
143 }
144}
145
146struct History {
147 base_text: Rope,
148 operations: TreeMap<clock::Lamport, Operation>,
149 undo_stack: Vec<HistoryEntry>,
150 redo_stack: Vec<HistoryEntry>,
151 transaction_depth: usize,
152 group_interval: Duration,
153}
154
155#[derive(Clone, Debug, Eq, PartialEq)]
156struct InsertionSlice {
157 edit_id: clock::Lamport,
158 insertion_id: clock::Lamport,
159 range: Range<usize>,
160}
161
162impl Ord for InsertionSlice {
163 fn cmp(&self, other: &Self) -> Ordering {
164 self.edit_id
165 .cmp(&other.edit_id)
166 .then_with(|| self.insertion_id.cmp(&other.insertion_id))
167 .then_with(|| self.range.start.cmp(&other.range.start))
168 .then_with(|| self.range.end.cmp(&other.range.end))
169 }
170}
171
172impl PartialOrd for InsertionSlice {
173 fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
174 Some(self.cmp(other))
175 }
176}
177
178impl InsertionSlice {
179 fn from_fragment(edit_id: clock::Lamport, fragment: &Fragment) -> Self {
180 Self {
181 edit_id,
182 insertion_id: fragment.timestamp,
183 range: fragment.insertion_offset..fragment.insertion_offset + fragment.len,
184 }
185 }
186}
187
188impl History {
189 pub fn new(base_text: Rope) -> Self {
190 Self {
191 base_text,
192 operations: Default::default(),
193 undo_stack: Vec::new(),
194 redo_stack: Vec::new(),
195 transaction_depth: 0,
196 // Don't group transactions in tests unless we opt in, because it's a footgun.
197 #[cfg(any(test, feature = "test-support"))]
198 group_interval: Duration::ZERO,
199 #[cfg(not(any(test, feature = "test-support")))]
200 group_interval: Duration::from_millis(300),
201 }
202 }
203
204 fn push(&mut self, op: Operation) {
205 self.operations.insert(op.timestamp(), op);
206 }
207
208 fn start_transaction(
209 &mut self,
210 start: clock::Global,
211 now: Instant,
212 clock: &mut clock::Lamport,
213 ) -> Option<TransactionId> {
214 self.transaction_depth += 1;
215 if self.transaction_depth == 1 {
216 let id = clock.tick();
217 self.undo_stack.push(HistoryEntry {
218 transaction: Transaction {
219 id,
220 start,
221 edit_ids: Default::default(),
222 },
223 first_edit_at: now,
224 last_edit_at: now,
225 suppress_grouping: false,
226 });
227 Some(id)
228 } else {
229 None
230 }
231 }
232
233 fn end_transaction(&mut self, now: Instant) -> Option<&HistoryEntry> {
234 assert_ne!(self.transaction_depth, 0);
235 self.transaction_depth -= 1;
236 if self.transaction_depth == 0 {
237 if self
238 .undo_stack
239 .last()
240 .unwrap()
241 .transaction
242 .edit_ids
243 .is_empty()
244 {
245 self.undo_stack.pop();
246 None
247 } else {
248 self.redo_stack.clear();
249 let entry = self.undo_stack.last_mut().unwrap();
250 entry.last_edit_at = now;
251 Some(entry)
252 }
253 } else {
254 None
255 }
256 }
257
258 fn group(&mut self) -> Option<TransactionId> {
259 let mut count = 0;
260 let mut entries = self.undo_stack.iter();
261 if let Some(mut entry) = entries.next_back() {
262 while let Some(prev_entry) = entries.next_back() {
263 if !prev_entry.suppress_grouping
264 && entry.first_edit_at - prev_entry.last_edit_at < self.group_interval
265 {
266 entry = prev_entry;
267 count += 1;
268 } else {
269 break;
270 }
271 }
272 }
273 self.group_trailing(count)
274 }
275
276 fn group_until(&mut self, transaction_id: TransactionId) {
277 let mut count = 0;
278 for entry in self.undo_stack.iter().rev() {
279 if entry.transaction_id() == transaction_id {
280 self.group_trailing(count);
281 break;
282 } else if entry.suppress_grouping {
283 break;
284 } else {
285 count += 1;
286 }
287 }
288 }
289
290 fn group_trailing(&mut self, n: usize) -> Option<TransactionId> {
291 let new_len = self.undo_stack.len() - n;
292 let (entries_to_keep, entries_to_merge) = self.undo_stack.split_at_mut(new_len);
293 if let Some(last_entry) = entries_to_keep.last_mut() {
294 for entry in &*entries_to_merge {
295 for edit_id in &entry.transaction.edit_ids {
296 last_entry.transaction.edit_ids.push(*edit_id);
297 }
298 }
299
300 if let Some(entry) = entries_to_merge.last_mut() {
301 last_entry.last_edit_at = entry.last_edit_at;
302 }
303 }
304
305 self.undo_stack.truncate(new_len);
306 self.undo_stack.last().map(|e| e.transaction.id)
307 }
308
309 fn finalize_last_transaction(&mut self) -> Option<&Transaction> {
310 self.undo_stack.last_mut().map(|entry| {
311 entry.suppress_grouping = true;
312 &entry.transaction
313 })
314 }
315
316 fn push_transaction(&mut self, transaction: Transaction, now: Instant) {
317 assert_eq!(self.transaction_depth, 0);
318 self.undo_stack.push(HistoryEntry {
319 transaction,
320 first_edit_at: now,
321 last_edit_at: now,
322 suppress_grouping: false,
323 });
324 }
325
326 /// Differs from `push_transaction` in that it does not clear the redo
327 /// stack. Intended to be used to create a parent transaction to merge
328 /// potential child transactions into.
329 ///
330 /// The caller is responsible for removing it from the undo history using
331 /// `forget_transaction` if no edits are merged into it. Otherwise, if edits
332 /// are merged into this transaction, the caller is responsible for ensuring
333 /// the redo stack is cleared. The easiest way to ensure the redo stack is
334 /// cleared is to create transactions with the usual `start_transaction` and
335 /// `end_transaction` methods and merging the resulting transactions into
336 /// the transaction created by this method
337 fn push_empty_transaction(
338 &mut self,
339 start: clock::Global,
340 now: Instant,
341 clock: &mut clock::Lamport,
342 ) -> TransactionId {
343 assert_eq!(self.transaction_depth, 0);
344 let id = clock.tick();
345 let transaction = Transaction {
346 id,
347 start,
348 edit_ids: Vec::new(),
349 };
350 self.undo_stack.push(HistoryEntry {
351 transaction,
352 first_edit_at: now,
353 last_edit_at: now,
354 suppress_grouping: false,
355 });
356 id
357 }
358
359 fn push_undo(&mut self, op_id: clock::Lamport) {
360 assert_ne!(self.transaction_depth, 0);
361 if let Some(Operation::Edit(_)) = self.operations.get(&op_id) {
362 let last_transaction = self.undo_stack.last_mut().unwrap();
363 last_transaction.transaction.edit_ids.push(op_id);
364 }
365 }
366
367 fn pop_undo(&mut self) -> Option<&HistoryEntry> {
368 assert_eq!(self.transaction_depth, 0);
369 if let Some(entry) = self.undo_stack.pop() {
370 self.redo_stack.push(entry);
371 self.redo_stack.last()
372 } else {
373 None
374 }
375 }
376
377 fn remove_from_undo(&mut self, transaction_id: TransactionId) -> Option<&HistoryEntry> {
378 assert_eq!(self.transaction_depth, 0);
379
380 let entry_ix = self
381 .undo_stack
382 .iter()
383 .rposition(|entry| entry.transaction.id == transaction_id)?;
384 let entry = self.undo_stack.remove(entry_ix);
385 self.redo_stack.push(entry);
386 self.redo_stack.last()
387 }
388
389 fn remove_from_undo_until(&mut self, transaction_id: TransactionId) -> &[HistoryEntry] {
390 assert_eq!(self.transaction_depth, 0);
391
392 let redo_stack_start_len = self.redo_stack.len();
393 if let Some(entry_ix) = self
394 .undo_stack
395 .iter()
396 .rposition(|entry| entry.transaction.id == transaction_id)
397 {
398 self.redo_stack
399 .extend(self.undo_stack.drain(entry_ix..).rev());
400 }
401 &self.redo_stack[redo_stack_start_len..]
402 }
403
404 fn forget(&mut self, transaction_id: TransactionId) -> Option<Transaction> {
405 assert_eq!(self.transaction_depth, 0);
406 if let Some(entry_ix) = self
407 .undo_stack
408 .iter()
409 .rposition(|entry| entry.transaction.id == transaction_id)
410 {
411 Some(self.undo_stack.remove(entry_ix).transaction)
412 } else if let Some(entry_ix) = self
413 .redo_stack
414 .iter()
415 .rposition(|entry| entry.transaction.id == transaction_id)
416 {
417 Some(self.redo_stack.remove(entry_ix).transaction)
418 } else {
419 None
420 }
421 }
422
423 fn transaction(&self, transaction_id: TransactionId) -> Option<&Transaction> {
424 let entry = self
425 .undo_stack
426 .iter()
427 .rfind(|entry| entry.transaction.id == transaction_id)
428 .or_else(|| {
429 self.redo_stack
430 .iter()
431 .rfind(|entry| entry.transaction.id == transaction_id)
432 })?;
433 Some(&entry.transaction)
434 }
435
436 fn transaction_mut(&mut self, transaction_id: TransactionId) -> Option<&mut Transaction> {
437 let entry = self
438 .undo_stack
439 .iter_mut()
440 .rfind(|entry| entry.transaction.id == transaction_id)
441 .or_else(|| {
442 self.redo_stack
443 .iter_mut()
444 .rfind(|entry| entry.transaction.id == transaction_id)
445 })?;
446 Some(&mut entry.transaction)
447 }
448
449 fn merge_transactions(&mut self, transaction: TransactionId, destination: TransactionId) {
450 if let Some(transaction) = self.forget(transaction)
451 && let Some(destination) = self.transaction_mut(destination)
452 {
453 destination.edit_ids.extend(transaction.edit_ids);
454 }
455 }
456
457 fn pop_redo(&mut self) -> Option<&HistoryEntry> {
458 assert_eq!(self.transaction_depth, 0);
459 if let Some(entry) = self.redo_stack.pop() {
460 self.undo_stack.push(entry);
461 self.undo_stack.last()
462 } else {
463 None
464 }
465 }
466
467 fn remove_from_redo(&mut self, transaction_id: TransactionId) -> &[HistoryEntry] {
468 assert_eq!(self.transaction_depth, 0);
469
470 let undo_stack_start_len = self.undo_stack.len();
471 if let Some(entry_ix) = self
472 .redo_stack
473 .iter()
474 .rposition(|entry| entry.transaction.id == transaction_id)
475 {
476 self.undo_stack
477 .extend(self.redo_stack.drain(entry_ix..).rev());
478 }
479 &self.undo_stack[undo_stack_start_len..]
480 }
481}
482
483struct Edits<'a, D: TextDimension, F: FnMut(&FragmentSummary) -> bool> {
484 visible_cursor: rope::Cursor<'a>,
485 deleted_cursor: rope::Cursor<'a>,
486 fragments_cursor: Option<FilterCursor<'a, 'static, F, Fragment, FragmentTextSummary>>,
487 undos: &'a UndoMap,
488 since: &'a clock::Global,
489 old_end: D,
490 new_end: D,
491 range: Range<(&'a Locator, usize)>,
492 buffer_id: BufferId,
493}
494
495#[derive(Clone, Debug, Default, Eq, PartialEq)]
496pub struct Edit<D> {
497 pub old: Range<D>,
498 pub new: Range<D>,
499}
500impl<D> Edit<D>
501where
502 D: PartialEq,
503{
504 pub fn is_empty(&self) -> bool {
505 self.old.start == self.old.end && self.new.start == self.new.end
506 }
507}
508
509impl<D, DDelta> Edit<D>
510where
511 D: Sub<D, Output = DDelta> + Copy,
512{
513 pub fn old_len(&self) -> DDelta {
514 self.old.end - self.old.start
515 }
516
517 pub fn new_len(&self) -> DDelta {
518 self.new.end - self.new.start
519 }
520}
521
522impl<D1, D2> Edit<(D1, D2)> {
523 pub fn flatten(self) -> (Edit<D1>, Edit<D2>) {
524 (
525 Edit {
526 old: self.old.start.0..self.old.end.0,
527 new: self.new.start.0..self.new.end.0,
528 },
529 Edit {
530 old: self.old.start.1..self.old.end.1,
531 new: self.new.start.1..self.new.end.1,
532 },
533 )
534 }
535}
536
537#[derive(Eq, PartialEq, Clone, Debug)]
538pub struct Fragment {
539 pub id: Locator,
540 pub timestamp: clock::Lamport,
541 pub insertion_offset: usize,
542 pub len: usize,
543 pub visible: bool,
544 pub deletions: HashSet<clock::Lamport>,
545 pub max_undos: clock::Global,
546}
547
548#[derive(Eq, PartialEq, Clone, Debug)]
549pub struct FragmentSummary {
550 text: FragmentTextSummary,
551 max_id: Locator,
552 max_version: clock::Global,
553 min_insertion_version: clock::Global,
554 max_insertion_version: clock::Global,
555}
556
557#[derive(Copy, Default, Clone, Debug, PartialEq, Eq)]
558struct FragmentTextSummary {
559 visible: usize,
560 deleted: usize,
561}
562
563impl<'a> sum_tree::Dimension<'a, FragmentSummary> for FragmentTextSummary {
564 fn zero(_: &Option<clock::Global>) -> Self {
565 Default::default()
566 }
567
568 fn add_summary(&mut self, summary: &'a FragmentSummary, _: &Option<clock::Global>) {
569 self.visible += summary.text.visible;
570 self.deleted += summary.text.deleted;
571 }
572}
573
574#[derive(Eq, PartialEq, Clone, Debug)]
575struct InsertionFragment {
576 timestamp: clock::Lamport,
577 split_offset: usize,
578 fragment_id: Locator,
579}
580
581#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord)]
582struct InsertionFragmentKey {
583 timestamp: clock::Lamport,
584 split_offset: usize,
585}
586
587#[derive(Clone, Debug, Eq, PartialEq)]
588pub enum Operation {
589 Edit(EditOperation),
590 Undo(UndoOperation),
591}
592
593#[derive(Clone, Debug, Eq, PartialEq)]
594pub struct EditOperation {
595 pub timestamp: clock::Lamport,
596 pub version: clock::Global,
597 pub ranges: Vec<Range<FullOffset>>,
598 pub new_text: Vec<Arc<str>>,
599}
600
601#[derive(Clone, Debug, Eq, PartialEq)]
602pub struct UndoOperation {
603 pub timestamp: clock::Lamport,
604 pub version: clock::Global,
605 pub counts: HashMap<clock::Lamport, u32>,
606}
607
608/// Stores information about the indentation of a line (tabs and spaces).
609#[derive(Clone, Copy, Debug, Eq, PartialEq)]
610pub struct LineIndent {
611 pub tabs: u32,
612 pub spaces: u32,
613 pub line_blank: bool,
614}
615
616impl LineIndent {
617 pub fn from_chunks(chunks: &mut Chunks) -> Self {
618 let mut tabs = 0;
619 let mut spaces = 0;
620 let mut line_blank = true;
621
622 'outer: while let Some(chunk) = chunks.peek() {
623 for ch in chunk.chars() {
624 if ch == '\t' {
625 tabs += 1;
626 } else if ch == ' ' {
627 spaces += 1;
628 } else {
629 if ch != '\n' {
630 line_blank = false;
631 }
632 break 'outer;
633 }
634 }
635
636 chunks.next();
637 }
638
639 Self {
640 tabs,
641 spaces,
642 line_blank,
643 }
644 }
645
646 /// Constructs a new `LineIndent` which only contains spaces.
647 pub fn spaces(spaces: u32) -> Self {
648 Self {
649 tabs: 0,
650 spaces,
651 line_blank: true,
652 }
653 }
654
655 /// Constructs a new `LineIndent` which only contains tabs.
656 pub fn tabs(tabs: u32) -> Self {
657 Self {
658 tabs,
659 spaces: 0,
660 line_blank: true,
661 }
662 }
663
664 /// Indicates whether the line is empty.
665 pub fn is_line_empty(&self) -> bool {
666 self.tabs == 0 && self.spaces == 0 && self.line_blank
667 }
668
669 /// Indicates whether the line is blank (contains only whitespace).
670 pub fn is_line_blank(&self) -> bool {
671 self.line_blank
672 }
673
674 /// Returns the number of indentation characters (tabs or spaces).
675 pub fn raw_len(&self) -> u32 {
676 self.tabs + self.spaces
677 }
678
679 /// Returns the number of indentation characters (tabs or spaces), taking tab size into account.
680 pub fn len(&self, tab_size: u32) -> u32 {
681 self.tabs * tab_size + self.spaces
682 }
683}
684
685impl From<&str> for LineIndent {
686 fn from(value: &str) -> Self {
687 Self::from_iter(value.chars())
688 }
689}
690
691impl FromIterator<char> for LineIndent {
692 fn from_iter<T: IntoIterator<Item = char>>(chars: T) -> Self {
693 let mut tabs = 0;
694 let mut spaces = 0;
695 let mut line_blank = true;
696 for c in chars {
697 if c == '\t' {
698 tabs += 1;
699 } else if c == ' ' {
700 spaces += 1;
701 } else {
702 if c != '\n' {
703 line_blank = false;
704 }
705 break;
706 }
707 }
708 Self {
709 tabs,
710 spaces,
711 line_blank,
712 }
713 }
714}
715
716impl Buffer {
717 pub fn new(replica_id: ReplicaId, remote_id: BufferId, base_text: impl Into<String>) -> Buffer {
718 let mut base_text = base_text.into();
719 let line_ending = LineEnding::detect(&base_text);
720 LineEnding::normalize(&mut base_text);
721 Self::new_normalized(replica_id, remote_id, line_ending, Rope::from(&*base_text))
722 }
723
724 pub fn new_normalized(
725 replica_id: ReplicaId,
726 remote_id: BufferId,
727 line_ending: LineEnding,
728 normalized: Rope,
729 ) -> Buffer {
730 let history = History::new(normalized);
731 let mut fragments = SumTree::new(&None);
732 let mut insertions = SumTree::default();
733
734 let mut lamport_clock = clock::Lamport::new(replica_id);
735 let mut version = clock::Global::new();
736
737 let visible_text = history.base_text.clone();
738 if !visible_text.is_empty() {
739 let insertion_timestamp = clock::Lamport::new(ReplicaId::LOCAL);
740 lamport_clock.observe(insertion_timestamp);
741 version.observe(insertion_timestamp);
742 let fragment_id = Locator::between(&Locator::min(), &Locator::max());
743 let fragment = Fragment {
744 id: fragment_id,
745 timestamp: insertion_timestamp,
746 insertion_offset: 0,
747 len: visible_text.len(),
748 visible: true,
749 deletions: Default::default(),
750 max_undos: Default::default(),
751 };
752 insertions.push(InsertionFragment::new(&fragment), ());
753 fragments.push(fragment, &None);
754 }
755
756 Buffer {
757 snapshot: BufferSnapshot {
758 replica_id,
759 remote_id,
760 visible_text,
761 deleted_text: Rope::new(),
762 line_ending,
763 fragments,
764 insertions,
765 version,
766 undo_map: Default::default(),
767 insertion_slices: Default::default(),
768 },
769 history,
770 deferred_ops: OperationQueue::new(),
771 deferred_replicas: HashSet::default(),
772 lamport_clock,
773 subscriptions: Default::default(),
774 edit_id_resolvers: Default::default(),
775 wait_for_version_txs: Default::default(),
776 }
777 }
778
779 pub fn version(&self) -> clock::Global {
780 self.version.clone()
781 }
782
783 pub fn snapshot(&self) -> BufferSnapshot {
784 self.snapshot.clone()
785 }
786
787 pub fn branch(&self) -> Self {
788 Self {
789 snapshot: self.snapshot.clone(),
790 history: History::new(self.base_text().clone()),
791 deferred_ops: OperationQueue::new(),
792 deferred_replicas: HashSet::default(),
793 lamport_clock: clock::Lamport::new(ReplicaId::LOCAL_BRANCH),
794 subscriptions: Default::default(),
795 edit_id_resolvers: Default::default(),
796 wait_for_version_txs: Default::default(),
797 }
798 }
799
800 pub fn replica_id(&self) -> ReplicaId {
801 self.lamport_clock.replica_id
802 }
803
804 pub fn remote_id(&self) -> BufferId {
805 self.remote_id
806 }
807
808 pub fn deferred_ops_len(&self) -> usize {
809 self.deferred_ops.len()
810 }
811
812 pub fn transaction_group_interval(&self) -> Duration {
813 self.history.group_interval
814 }
815
816 pub fn edit<R, I, S, T>(&mut self, edits: R) -> Operation
817 where
818 R: IntoIterator<IntoIter = I>,
819 I: ExactSizeIterator<Item = (Range<S>, T)>,
820 S: ToOffset,
821 T: Into<Arc<str>>,
822 {
823 let edits = edits
824 .into_iter()
825 .map(|(range, new_text)| (range, new_text.into()));
826
827 self.start_transaction();
828 let timestamp = self.lamport_clock.tick();
829 let operation = Operation::Edit(self.apply_local_edit(edits, timestamp));
830
831 self.history.push(operation.clone());
832 self.history.push_undo(operation.timestamp());
833 self.snapshot.version.observe(operation.timestamp());
834 self.end_transaction();
835 operation
836 }
837
838 fn apply_local_edit<S: ToOffset, T: Into<Arc<str>>>(
839 &mut self,
840 edits: impl ExactSizeIterator<Item = (Range<S>, T)>,
841 timestamp: clock::Lamport,
842 ) -> EditOperation {
843 let mut edits_patch = Patch::default();
844 let mut edit_op = EditOperation {
845 timestamp,
846 version: self.version(),
847 ranges: Vec::with_capacity(edits.len()),
848 new_text: Vec::with_capacity(edits.len()),
849 };
850 let mut new_insertions = Vec::new();
851 let mut insertion_offset = 0;
852 let mut insertion_slices = Vec::new();
853
854 let mut edits = edits
855 .map(|(range, new_text)| (range.to_offset(&*self), new_text))
856 .peekable();
857
858 let mut new_ropes =
859 RopeBuilder::new(self.visible_text.cursor(0), self.deleted_text.cursor(0));
860 let mut old_fragments = self.fragments.cursor::<FragmentTextSummary>(&None);
861 let mut new_fragments = old_fragments.slice(&edits.peek().unwrap().0.start, Bias::Right);
862 new_ropes.append(new_fragments.summary().text);
863
864 let mut fragment_start = old_fragments.start().visible;
865 for (range, new_text) in edits {
866 let new_text = LineEnding::normalize_arc(new_text.into());
867 let fragment_end = old_fragments.end().visible;
868
869 // If the current fragment ends before this range, then jump ahead to the first fragment
870 // that extends past the start of this range, reusing any intervening fragments.
871 if fragment_end < range.start {
872 // If the current fragment has been partially consumed, then consume the rest of it
873 // and advance to the next fragment before slicing.
874 if fragment_start > old_fragments.start().visible {
875 if fragment_end > fragment_start {
876 let mut suffix = old_fragments.item().unwrap().clone();
877 suffix.len = fragment_end - fragment_start;
878 suffix.insertion_offset += fragment_start - old_fragments.start().visible;
879 new_insertions.push(InsertionFragment::insert_new(&suffix));
880 new_ropes.push_fragment(&suffix, suffix.visible);
881 new_fragments.push(suffix, &None);
882 }
883 old_fragments.next();
884 }
885
886 let slice = old_fragments.slice(&range.start, Bias::Right);
887 new_ropes.append(slice.summary().text);
888 new_fragments.append(slice, &None);
889 fragment_start = old_fragments.start().visible;
890 }
891
892 let full_range_start = FullOffset(range.start + old_fragments.start().deleted);
893
894 // Preserve any portion of the current fragment that precedes this range.
895 if fragment_start < range.start {
896 let mut prefix = old_fragments.item().unwrap().clone();
897 prefix.len = range.start - fragment_start;
898 prefix.insertion_offset += fragment_start - old_fragments.start().visible;
899 prefix.id = Locator::between(&new_fragments.summary().max_id, &prefix.id);
900 new_insertions.push(InsertionFragment::insert_new(&prefix));
901 new_ropes.push_fragment(&prefix, prefix.visible);
902 new_fragments.push(prefix, &None);
903 fragment_start = range.start;
904 }
905
906 // Insert the new text before any existing fragments within the range.
907 if !new_text.is_empty() {
908 let new_start = new_fragments.summary().text.visible;
909
910 let fragment = Fragment {
911 id: Locator::between(
912 &new_fragments.summary().max_id,
913 old_fragments
914 .item()
915 .map_or(&Locator::max(), |old_fragment| &old_fragment.id),
916 ),
917 timestamp,
918 insertion_offset,
919 len: new_text.len(),
920 deletions: Default::default(),
921 max_undos: Default::default(),
922 visible: true,
923 };
924 edits_patch.push(Edit {
925 old: fragment_start..fragment_start,
926 new: new_start..new_start + new_text.len(),
927 });
928 insertion_slices.push(InsertionSlice::from_fragment(timestamp, &fragment));
929 new_insertions.push(InsertionFragment::insert_new(&fragment));
930 new_ropes.push_str(new_text.as_ref());
931 new_fragments.push(fragment, &None);
932 insertion_offset += new_text.len();
933 }
934
935 // Advance through every fragment that intersects this range, marking the intersecting
936 // portions as deleted.
937 while fragment_start < range.end {
938 let fragment = old_fragments.item().unwrap();
939 let fragment_end = old_fragments.end().visible;
940 let mut intersection = fragment.clone();
941 let intersection_end = cmp::min(range.end, fragment_end);
942 if fragment.visible {
943 intersection.len = intersection_end - fragment_start;
944 intersection.insertion_offset += fragment_start - old_fragments.start().visible;
945 intersection.id =
946 Locator::between(&new_fragments.summary().max_id, &intersection.id);
947 intersection.deletions.insert(timestamp);
948 intersection.visible = false;
949 }
950 if intersection.len > 0 {
951 if fragment.visible && !intersection.visible {
952 let new_start = new_fragments.summary().text.visible;
953 edits_patch.push(Edit {
954 old: fragment_start..intersection_end,
955 new: new_start..new_start,
956 });
957 insertion_slices
958 .push(InsertionSlice::from_fragment(timestamp, &intersection));
959 }
960 new_insertions.push(InsertionFragment::insert_new(&intersection));
961 new_ropes.push_fragment(&intersection, fragment.visible);
962 new_fragments.push(intersection, &None);
963 fragment_start = intersection_end;
964 }
965 if fragment_end <= range.end {
966 old_fragments.next();
967 }
968 }
969
970 let full_range_end = FullOffset(range.end + old_fragments.start().deleted);
971 edit_op.ranges.push(full_range_start..full_range_end);
972 edit_op.new_text.push(new_text);
973 }
974
975 // If the current fragment has been partially consumed, then consume the rest of it
976 // and advance to the next fragment before slicing.
977 if fragment_start > old_fragments.start().visible {
978 let fragment_end = old_fragments.end().visible;
979 if fragment_end > fragment_start {
980 let mut suffix = old_fragments.item().unwrap().clone();
981 suffix.len = fragment_end - fragment_start;
982 suffix.insertion_offset += fragment_start - old_fragments.start().visible;
983 new_insertions.push(InsertionFragment::insert_new(&suffix));
984 new_ropes.push_fragment(&suffix, suffix.visible);
985 new_fragments.push(suffix, &None);
986 }
987 old_fragments.next();
988 }
989
990 let suffix = old_fragments.suffix();
991 new_ropes.append(suffix.summary().text);
992 new_fragments.append(suffix, &None);
993 let (visible_text, deleted_text) = new_ropes.finish();
994 drop(old_fragments);
995
996 self.snapshot.fragments = new_fragments;
997 self.snapshot.insertions.edit(new_insertions, ());
998 self.snapshot.visible_text = visible_text;
999 self.snapshot.deleted_text = deleted_text;
1000 self.subscriptions.publish_mut(&edits_patch);
1001 self.snapshot.insertion_slices.extend(insertion_slices);
1002 edit_op
1003 }
1004
1005 pub fn set_line_ending(&mut self, line_ending: LineEnding) {
1006 self.snapshot.line_ending = line_ending;
1007 }
1008
1009 pub fn apply_ops<I: IntoIterator<Item = Operation>>(&mut self, ops: I) {
1010 let mut deferred_ops = Vec::new();
1011 for op in ops {
1012 self.history.push(op.clone());
1013 if self.can_apply_op(&op) {
1014 self.apply_op(op);
1015 } else {
1016 self.deferred_replicas.insert(op.replica_id());
1017 deferred_ops.push(op);
1018 }
1019 }
1020 self.deferred_ops.insert(deferred_ops);
1021 self.flush_deferred_ops();
1022 }
1023
1024 fn apply_op(&mut self, op: Operation) {
1025 match op {
1026 Operation::Edit(edit) => {
1027 if !self.version.observed(edit.timestamp) {
1028 self.apply_remote_edit(
1029 &edit.version,
1030 &edit.ranges,
1031 &edit.new_text,
1032 edit.timestamp,
1033 );
1034 self.snapshot.version.observe(edit.timestamp);
1035 self.lamport_clock.observe(edit.timestamp);
1036 self.resolve_edit(edit.timestamp);
1037 }
1038 }
1039 Operation::Undo(undo) => {
1040 if !self.version.observed(undo.timestamp) {
1041 self.apply_undo(&undo);
1042 self.snapshot.version.observe(undo.timestamp);
1043 self.lamport_clock.observe(undo.timestamp);
1044 }
1045 }
1046 }
1047 self.wait_for_version_txs.retain_mut(|(version, tx)| {
1048 if self.snapshot.version().observed_all(version) {
1049 tx.try_send(()).ok();
1050 false
1051 } else {
1052 true
1053 }
1054 });
1055 }
1056
1057 fn apply_remote_edit(
1058 &mut self,
1059 version: &clock::Global,
1060 ranges: &[Range<FullOffset>],
1061 new_text: &[Arc<str>],
1062 timestamp: clock::Lamport,
1063 ) {
1064 if ranges.is_empty() {
1065 return;
1066 }
1067
1068 let edits = ranges.iter().zip(new_text.iter());
1069 let mut edits_patch = Patch::default();
1070 let mut insertion_slices = Vec::new();
1071 let cx = Some(version.clone());
1072 let mut new_insertions = Vec::new();
1073 let mut insertion_offset = 0;
1074 let mut new_ropes =
1075 RopeBuilder::new(self.visible_text.cursor(0), self.deleted_text.cursor(0));
1076 let mut old_fragments = self
1077 .fragments
1078 .cursor::<Dimensions<VersionedFullOffset, usize>>(&cx);
1079 let mut new_fragments =
1080 old_fragments.slice(&VersionedFullOffset::Offset(ranges[0].start), Bias::Left);
1081 new_ropes.append(new_fragments.summary().text);
1082
1083 let mut fragment_start = old_fragments.start().0.full_offset();
1084 for (range, new_text) in edits {
1085 let fragment_end = old_fragments.end().0.full_offset();
1086
1087 // If the current fragment ends before this range, then jump ahead to the first fragment
1088 // that extends past the start of this range, reusing any intervening fragments.
1089 if fragment_end < range.start {
1090 // If the current fragment has been partially consumed, then consume the rest of it
1091 // and advance to the next fragment before slicing.
1092 if fragment_start > old_fragments.start().0.full_offset() {
1093 if fragment_end > fragment_start {
1094 let mut suffix = old_fragments.item().unwrap().clone();
1095 suffix.len = fragment_end.0 - fragment_start.0;
1096 suffix.insertion_offset +=
1097 fragment_start - old_fragments.start().0.full_offset();
1098 new_insertions.push(InsertionFragment::insert_new(&suffix));
1099 new_ropes.push_fragment(&suffix, suffix.visible);
1100 new_fragments.push(suffix, &None);
1101 }
1102 old_fragments.next();
1103 }
1104
1105 let slice =
1106 old_fragments.slice(&VersionedFullOffset::Offset(range.start), Bias::Left);
1107 new_ropes.append(slice.summary().text);
1108 new_fragments.append(slice, &None);
1109 fragment_start = old_fragments.start().0.full_offset();
1110 }
1111
1112 // If we are at the end of a non-concurrent fragment, advance to the next one.
1113 let fragment_end = old_fragments.end().0.full_offset();
1114 if fragment_end == range.start && fragment_end > fragment_start {
1115 let mut fragment = old_fragments.item().unwrap().clone();
1116 fragment.len = fragment_end.0 - fragment_start.0;
1117 fragment.insertion_offset += fragment_start - old_fragments.start().0.full_offset();
1118 new_insertions.push(InsertionFragment::insert_new(&fragment));
1119 new_ropes.push_fragment(&fragment, fragment.visible);
1120 new_fragments.push(fragment, &None);
1121 old_fragments.next();
1122 fragment_start = old_fragments.start().0.full_offset();
1123 }
1124
1125 // Skip over insertions that are concurrent to this edit, but have a lower lamport
1126 // timestamp.
1127 while let Some(fragment) = old_fragments.item() {
1128 if fragment_start == range.start && fragment.timestamp > timestamp {
1129 new_ropes.push_fragment(fragment, fragment.visible);
1130 new_fragments.push(fragment.clone(), &None);
1131 old_fragments.next();
1132 debug_assert_eq!(fragment_start, range.start);
1133 } else {
1134 break;
1135 }
1136 }
1137 debug_assert!(fragment_start <= range.start);
1138
1139 // Preserve any portion of the current fragment that precedes this range.
1140 if fragment_start < range.start {
1141 let mut prefix = old_fragments.item().unwrap().clone();
1142 prefix.len = range.start.0 - fragment_start.0;
1143 prefix.insertion_offset += fragment_start - old_fragments.start().0.full_offset();
1144 prefix.id = Locator::between(&new_fragments.summary().max_id, &prefix.id);
1145 new_insertions.push(InsertionFragment::insert_new(&prefix));
1146 fragment_start = range.start;
1147 new_ropes.push_fragment(&prefix, prefix.visible);
1148 new_fragments.push(prefix, &None);
1149 }
1150
1151 // Insert the new text before any existing fragments within the range.
1152 if !new_text.is_empty() {
1153 let mut old_start = old_fragments.start().1;
1154 if old_fragments.item().is_some_and(|f| f.visible) {
1155 old_start += fragment_start.0 - old_fragments.start().0.full_offset().0;
1156 }
1157 let new_start = new_fragments.summary().text.visible;
1158 let fragment = Fragment {
1159 id: Locator::between(
1160 &new_fragments.summary().max_id,
1161 old_fragments
1162 .item()
1163 .map_or(&Locator::max(), |old_fragment| &old_fragment.id),
1164 ),
1165 timestamp,
1166 insertion_offset,
1167 len: new_text.len(),
1168 deletions: Default::default(),
1169 max_undos: Default::default(),
1170 visible: true,
1171 };
1172 edits_patch.push(Edit {
1173 old: old_start..old_start,
1174 new: new_start..new_start + new_text.len(),
1175 });
1176 insertion_slices.push(InsertionSlice::from_fragment(timestamp, &fragment));
1177 new_insertions.push(InsertionFragment::insert_new(&fragment));
1178 new_ropes.push_str(new_text);
1179 new_fragments.push(fragment, &None);
1180 insertion_offset += new_text.len();
1181 }
1182
1183 // Advance through every fragment that intersects this range, marking the intersecting
1184 // portions as deleted.
1185 while fragment_start < range.end {
1186 let fragment = old_fragments.item().unwrap();
1187 let fragment_end = old_fragments.end().0.full_offset();
1188 let mut intersection = fragment.clone();
1189 let intersection_end = cmp::min(range.end, fragment_end);
1190 if fragment.was_visible(version, &self.undo_map) {
1191 intersection.len = intersection_end.0 - fragment_start.0;
1192 intersection.insertion_offset +=
1193 fragment_start - old_fragments.start().0.full_offset();
1194 intersection.id =
1195 Locator::between(&new_fragments.summary().max_id, &intersection.id);
1196 intersection.deletions.insert(timestamp);
1197 intersection.visible = false;
1198 insertion_slices.push(InsertionSlice::from_fragment(timestamp, &intersection));
1199 }
1200 if intersection.len > 0 {
1201 if fragment.visible && !intersection.visible {
1202 let old_start = old_fragments.start().1
1203 + (fragment_start.0 - old_fragments.start().0.full_offset().0);
1204 let new_start = new_fragments.summary().text.visible;
1205 edits_patch.push(Edit {
1206 old: old_start..old_start + intersection.len,
1207 new: new_start..new_start,
1208 });
1209 }
1210 new_insertions.push(InsertionFragment::insert_new(&intersection));
1211 new_ropes.push_fragment(&intersection, fragment.visible);
1212 new_fragments.push(intersection, &None);
1213 fragment_start = intersection_end;
1214 }
1215 if fragment_end <= range.end {
1216 old_fragments.next();
1217 }
1218 }
1219 }
1220
1221 // If the current fragment has been partially consumed, then consume the rest of it
1222 // and advance to the next fragment before slicing.
1223 if fragment_start > old_fragments.start().0.full_offset() {
1224 let fragment_end = old_fragments.end().0.full_offset();
1225 if fragment_end > fragment_start {
1226 let mut suffix = old_fragments.item().unwrap().clone();
1227 suffix.len = fragment_end.0 - fragment_start.0;
1228 suffix.insertion_offset += fragment_start - old_fragments.start().0.full_offset();
1229 new_insertions.push(InsertionFragment::insert_new(&suffix));
1230 new_ropes.push_fragment(&suffix, suffix.visible);
1231 new_fragments.push(suffix, &None);
1232 }
1233 old_fragments.next();
1234 }
1235
1236 let suffix = old_fragments.suffix();
1237 new_ropes.append(suffix.summary().text);
1238 new_fragments.append(suffix, &None);
1239 let (visible_text, deleted_text) = new_ropes.finish();
1240 drop(old_fragments);
1241
1242 self.snapshot.fragments = new_fragments;
1243 self.snapshot.visible_text = visible_text;
1244 self.snapshot.deleted_text = deleted_text;
1245 self.snapshot.insertions.edit(new_insertions, ());
1246 self.snapshot.insertion_slices.extend(insertion_slices);
1247 self.subscriptions.publish_mut(&edits_patch)
1248 }
1249
1250 fn fragment_ids_for_edits<'a>(
1251 &'a self,
1252 edit_ids: impl Iterator<Item = &'a clock::Lamport>,
1253 ) -> Vec<&'a Locator> {
1254 // Get all of the insertion slices changed by the given edits.
1255 let mut insertion_slices = Vec::new();
1256 for edit_id in edit_ids {
1257 let insertion_slice = InsertionSlice {
1258 edit_id: *edit_id,
1259 insertion_id: clock::Lamport::MIN,
1260 range: 0..0,
1261 };
1262 let slices = self
1263 .snapshot
1264 .insertion_slices
1265 .iter_from(&insertion_slice)
1266 .take_while(|slice| slice.edit_id == *edit_id);
1267 insertion_slices.extend(slices)
1268 }
1269 insertion_slices
1270 .sort_unstable_by_key(|s| (s.insertion_id, s.range.start, Reverse(s.range.end)));
1271
1272 // Get all of the fragments corresponding to these insertion slices.
1273 let mut fragment_ids = Vec::new();
1274 let mut insertions_cursor = self.insertions.cursor::<InsertionFragmentKey>(());
1275 for insertion_slice in &insertion_slices {
1276 if insertion_slice.insertion_id != insertions_cursor.start().timestamp
1277 || insertion_slice.range.start > insertions_cursor.start().split_offset
1278 {
1279 insertions_cursor.seek_forward(
1280 &InsertionFragmentKey {
1281 timestamp: insertion_slice.insertion_id,
1282 split_offset: insertion_slice.range.start,
1283 },
1284 Bias::Left,
1285 );
1286 }
1287 while let Some(item) = insertions_cursor.item() {
1288 if item.timestamp != insertion_slice.insertion_id
1289 || item.split_offset >= insertion_slice.range.end
1290 {
1291 break;
1292 }
1293 fragment_ids.push(&item.fragment_id);
1294 insertions_cursor.next();
1295 }
1296 }
1297 fragment_ids.sort_unstable();
1298 fragment_ids
1299 }
1300
1301 fn apply_undo(&mut self, undo: &UndoOperation) {
1302 self.snapshot.undo_map.insert(undo);
1303
1304 let mut edits = Patch::default();
1305 let mut old_fragments = self
1306 .fragments
1307 .cursor::<Dimensions<Option<&Locator>, usize>>(&None);
1308 let mut new_fragments = SumTree::new(&None);
1309 let mut new_ropes =
1310 RopeBuilder::new(self.visible_text.cursor(0), self.deleted_text.cursor(0));
1311
1312 for fragment_id in self.fragment_ids_for_edits(undo.counts.keys()) {
1313 let preceding_fragments = old_fragments.slice(&Some(fragment_id), Bias::Left);
1314 new_ropes.append(preceding_fragments.summary().text);
1315 new_fragments.append(preceding_fragments, &None);
1316
1317 if let Some(fragment) = old_fragments.item() {
1318 let mut fragment = fragment.clone();
1319 let fragment_was_visible = fragment.visible;
1320
1321 fragment.visible = fragment.is_visible(&self.undo_map);
1322 fragment.max_undos.observe(undo.timestamp);
1323
1324 let old_start = old_fragments.start().1;
1325 let new_start = new_fragments.summary().text.visible;
1326 if fragment_was_visible && !fragment.visible {
1327 edits.push(Edit {
1328 old: old_start..old_start + fragment.len,
1329 new: new_start..new_start,
1330 });
1331 } else if !fragment_was_visible && fragment.visible {
1332 edits.push(Edit {
1333 old: old_start..old_start,
1334 new: new_start..new_start + fragment.len,
1335 });
1336 }
1337 new_ropes.push_fragment(&fragment, fragment_was_visible);
1338 new_fragments.push(fragment, &None);
1339
1340 old_fragments.next();
1341 }
1342 }
1343
1344 let suffix = old_fragments.suffix();
1345 new_ropes.append(suffix.summary().text);
1346 new_fragments.append(suffix, &None);
1347
1348 drop(old_fragments);
1349 let (visible_text, deleted_text) = new_ropes.finish();
1350 self.snapshot.fragments = new_fragments;
1351 self.snapshot.visible_text = visible_text;
1352 self.snapshot.deleted_text = deleted_text;
1353 self.subscriptions.publish_mut(&edits);
1354 }
1355
1356 fn flush_deferred_ops(&mut self) {
1357 self.deferred_replicas.clear();
1358 let mut deferred_ops = Vec::new();
1359 for op in self.deferred_ops.drain().iter().cloned() {
1360 if self.can_apply_op(&op) {
1361 self.apply_op(op);
1362 } else {
1363 self.deferred_replicas.insert(op.replica_id());
1364 deferred_ops.push(op);
1365 }
1366 }
1367 self.deferred_ops.insert(deferred_ops);
1368 }
1369
1370 fn can_apply_op(&self, op: &Operation) -> bool {
1371 if self.deferred_replicas.contains(&op.replica_id()) {
1372 false
1373 } else {
1374 self.version.observed_all(match op {
1375 Operation::Edit(edit) => &edit.version,
1376 Operation::Undo(undo) => &undo.version,
1377 })
1378 }
1379 }
1380
1381 pub fn has_deferred_ops(&self) -> bool {
1382 !self.deferred_ops.is_empty()
1383 }
1384
1385 pub fn peek_undo_stack(&self) -> Option<&HistoryEntry> {
1386 self.history.undo_stack.last()
1387 }
1388
1389 pub fn peek_redo_stack(&self) -> Option<&HistoryEntry> {
1390 self.history.redo_stack.last()
1391 }
1392
1393 pub fn start_transaction(&mut self) -> Option<TransactionId> {
1394 self.start_transaction_at(Instant::now())
1395 }
1396
1397 pub fn start_transaction_at(&mut self, now: Instant) -> Option<TransactionId> {
1398 self.history
1399 .start_transaction(self.version.clone(), now, &mut self.lamport_clock)
1400 }
1401
1402 pub fn end_transaction(&mut self) -> Option<(TransactionId, clock::Global)> {
1403 self.end_transaction_at(Instant::now())
1404 }
1405
1406 pub fn end_transaction_at(&mut self, now: Instant) -> Option<(TransactionId, clock::Global)> {
1407 if let Some(entry) = self.history.end_transaction(now) {
1408 let since = entry.transaction.start.clone();
1409 let id = self.history.group().unwrap();
1410 Some((id, since))
1411 } else {
1412 None
1413 }
1414 }
1415
1416 pub fn finalize_last_transaction(&mut self) -> Option<&Transaction> {
1417 self.history.finalize_last_transaction()
1418 }
1419
1420 pub fn group_until_transaction(&mut self, transaction_id: TransactionId) {
1421 self.history.group_until(transaction_id);
1422 }
1423
1424 pub fn base_text(&self) -> &Rope {
1425 &self.history.base_text
1426 }
1427
1428 pub fn operations(&self) -> &TreeMap<clock::Lamport, Operation> {
1429 &self.history.operations
1430 }
1431
1432 pub fn undo(&mut self) -> Option<(TransactionId, Operation)> {
1433 if let Some(entry) = self.history.pop_undo() {
1434 let transaction = entry.transaction.clone();
1435 let transaction_id = transaction.id;
1436 let op = self.undo_or_redo(transaction);
1437 Some((transaction_id, op))
1438 } else {
1439 None
1440 }
1441 }
1442
1443 pub fn undo_transaction(&mut self, transaction_id: TransactionId) -> Option<Operation> {
1444 let transaction = self
1445 .history
1446 .remove_from_undo(transaction_id)?
1447 .transaction
1448 .clone();
1449 Some(self.undo_or_redo(transaction))
1450 }
1451
1452 pub fn undo_to_transaction(&mut self, transaction_id: TransactionId) -> Vec<Operation> {
1453 let transactions = self
1454 .history
1455 .remove_from_undo_until(transaction_id)
1456 .iter()
1457 .map(|entry| entry.transaction.clone())
1458 .collect::<Vec<_>>();
1459
1460 transactions
1461 .into_iter()
1462 .map(|transaction| self.undo_or_redo(transaction))
1463 .collect()
1464 }
1465
1466 pub fn forget_transaction(&mut self, transaction_id: TransactionId) -> Option<Transaction> {
1467 self.history.forget(transaction_id)
1468 }
1469
1470 pub fn get_transaction(&self, transaction_id: TransactionId) -> Option<&Transaction> {
1471 self.history.transaction(transaction_id)
1472 }
1473
1474 pub fn merge_transactions(&mut self, transaction: TransactionId, destination: TransactionId) {
1475 self.history.merge_transactions(transaction, destination);
1476 }
1477
1478 pub fn redo(&mut self) -> Option<(TransactionId, Operation)> {
1479 if let Some(entry) = self.history.pop_redo() {
1480 let transaction = entry.transaction.clone();
1481 let transaction_id = transaction.id;
1482 let op = self.undo_or_redo(transaction);
1483 Some((transaction_id, op))
1484 } else {
1485 None
1486 }
1487 }
1488
1489 pub fn redo_to_transaction(&mut self, transaction_id: TransactionId) -> Vec<Operation> {
1490 let transactions = self
1491 .history
1492 .remove_from_redo(transaction_id)
1493 .iter()
1494 .map(|entry| entry.transaction.clone())
1495 .collect::<Vec<_>>();
1496
1497 transactions
1498 .into_iter()
1499 .map(|transaction| self.undo_or_redo(transaction))
1500 .collect()
1501 }
1502
1503 fn undo_or_redo(&mut self, transaction: Transaction) -> Operation {
1504 let mut counts = HashMap::default();
1505 for edit_id in transaction.edit_ids {
1506 counts.insert(edit_id, self.undo_map.undo_count(edit_id).saturating_add(1));
1507 }
1508
1509 let operation = self.undo_operations(counts);
1510 self.history.push(operation.clone());
1511 operation
1512 }
1513
1514 pub fn undo_operations(&mut self, counts: HashMap<clock::Lamport, u32>) -> Operation {
1515 let timestamp = self.lamport_clock.tick();
1516 let version = self.version();
1517 self.snapshot.version.observe(timestamp);
1518 let undo = UndoOperation {
1519 timestamp,
1520 version,
1521 counts,
1522 };
1523 self.apply_undo(&undo);
1524 Operation::Undo(undo)
1525 }
1526
1527 pub fn push_transaction(&mut self, transaction: Transaction, now: Instant) {
1528 self.history.push_transaction(transaction, now);
1529 }
1530
1531 /// Differs from `push_transaction` in that it does not clear the redo stack.
1532 /// The caller responsible for
1533 /// Differs from `push_transaction` in that it does not clear the redo
1534 /// stack. Intended to be used to create a parent transaction to merge
1535 /// potential child transactions into.
1536 ///
1537 /// The caller is responsible for removing it from the undo history using
1538 /// `forget_transaction` if no edits are merged into it. Otherwise, if edits
1539 /// are merged into this transaction, the caller is responsible for ensuring
1540 /// the redo stack is cleared. The easiest way to ensure the redo stack is
1541 /// cleared is to create transactions with the usual `start_transaction` and
1542 /// `end_transaction` methods and merging the resulting transactions into
1543 /// the transaction created by this method
1544 pub fn push_empty_transaction(&mut self, now: Instant) -> TransactionId {
1545 self.history
1546 .push_empty_transaction(self.version.clone(), now, &mut self.lamport_clock)
1547 }
1548
1549 pub fn edited_ranges_for_transaction_id<D>(
1550 &self,
1551 transaction_id: TransactionId,
1552 ) -> impl '_ + Iterator<Item = Range<D>>
1553 where
1554 D: TextDimension,
1555 {
1556 self.history
1557 .transaction(transaction_id)
1558 .into_iter()
1559 .flat_map(|transaction| self.edited_ranges_for_transaction(transaction))
1560 }
1561
1562 pub fn edited_ranges_for_edit_ids<'a, D>(
1563 &'a self,
1564 edit_ids: impl IntoIterator<Item = &'a clock::Lamport>,
1565 ) -> impl 'a + Iterator<Item = Range<D>>
1566 where
1567 D: TextDimension,
1568 {
1569 // get fragment ranges
1570 let mut cursor = self
1571 .fragments
1572 .cursor::<Dimensions<Option<&Locator>, usize>>(&None);
1573 let offset_ranges = self
1574 .fragment_ids_for_edits(edit_ids.into_iter())
1575 .into_iter()
1576 .filter_map(move |fragment_id| {
1577 cursor.seek_forward(&Some(fragment_id), Bias::Left);
1578 let fragment = cursor.item()?;
1579 let start_offset = cursor.start().1;
1580 let end_offset = start_offset + if fragment.visible { fragment.len } else { 0 };
1581 Some(start_offset..end_offset)
1582 });
1583
1584 // combine adjacent ranges
1585 let mut prev_range: Option<Range<usize>> = None;
1586 let disjoint_ranges = offset_ranges
1587 .map(Some)
1588 .chain([None])
1589 .filter_map(move |range| {
1590 if let Some((range, prev_range)) = range.as_ref().zip(prev_range.as_mut())
1591 && prev_range.end == range.start
1592 {
1593 prev_range.end = range.end;
1594 return None;
1595 }
1596 let result = prev_range.clone();
1597 prev_range = range;
1598 result
1599 });
1600
1601 // convert to the desired text dimension.
1602 let mut position = D::zero(());
1603 let mut rope_cursor = self.visible_text.cursor(0);
1604 disjoint_ranges.map(move |range| {
1605 position.add_assign(&rope_cursor.summary(range.start));
1606 let start = position;
1607 position.add_assign(&rope_cursor.summary(range.end));
1608 let end = position;
1609 start..end
1610 })
1611 }
1612
1613 pub fn edited_ranges_for_transaction<'a, D>(
1614 &'a self,
1615 transaction: &'a Transaction,
1616 ) -> impl 'a + Iterator<Item = Range<D>>
1617 where
1618 D: TextDimension,
1619 {
1620 self.edited_ranges_for_edit_ids(&transaction.edit_ids)
1621 }
1622
1623 pub fn subscribe(&mut self) -> Subscription<usize> {
1624 self.subscriptions.subscribe()
1625 }
1626
1627 pub fn wait_for_edits<It: IntoIterator<Item = clock::Lamport>>(
1628 &mut self,
1629 edit_ids: It,
1630 ) -> impl 'static + Future<Output = Result<()>> + use<It> {
1631 let mut futures = Vec::new();
1632 for edit_id in edit_ids {
1633 if !self.version.observed(edit_id) {
1634 let (tx, rx) = oneshot::channel();
1635 self.edit_id_resolvers.entry(edit_id).or_default().push(tx);
1636 futures.push(rx);
1637 }
1638 }
1639
1640 async move {
1641 for mut future in futures {
1642 if future.recv().await.is_none() {
1643 anyhow::bail!("gave up waiting for edits");
1644 }
1645 }
1646 Ok(())
1647 }
1648 }
1649
1650 pub fn wait_for_anchors<It: IntoIterator<Item = Anchor>>(
1651 &mut self,
1652 anchors: It,
1653 ) -> impl 'static + Future<Output = Result<()>> + use<It> {
1654 let mut futures = Vec::new();
1655 for anchor in anchors {
1656 if !self.version.observed(anchor.timestamp) && !anchor.is_max() && !anchor.is_min() {
1657 let (tx, rx) = oneshot::channel();
1658 self.edit_id_resolvers
1659 .entry(anchor.timestamp)
1660 .or_default()
1661 .push(tx);
1662 futures.push(rx);
1663 }
1664 }
1665
1666 async move {
1667 for mut future in futures {
1668 if future.recv().await.is_none() {
1669 anyhow::bail!("gave up waiting for anchors");
1670 }
1671 }
1672 Ok(())
1673 }
1674 }
1675
1676 pub fn wait_for_version(
1677 &mut self,
1678 version: clock::Global,
1679 ) -> impl Future<Output = Result<()>> + use<> {
1680 let mut rx = None;
1681 if !self.snapshot.version.observed_all(&version) {
1682 let channel = oneshot::channel();
1683 self.wait_for_version_txs.push((version, channel.0));
1684 rx = Some(channel.1);
1685 }
1686 async move {
1687 if let Some(mut rx) = rx
1688 && rx.recv().await.is_none()
1689 {
1690 anyhow::bail!("gave up waiting for version");
1691 }
1692 Ok(())
1693 }
1694 }
1695
1696 pub fn give_up_waiting(&mut self) {
1697 self.edit_id_resolvers.clear();
1698 self.wait_for_version_txs.clear();
1699 }
1700
1701 fn resolve_edit(&mut self, edit_id: clock::Lamport) {
1702 for mut tx in self
1703 .edit_id_resolvers
1704 .remove(&edit_id)
1705 .into_iter()
1706 .flatten()
1707 {
1708 tx.try_send(()).ok();
1709 }
1710 }
1711}
1712
1713#[cfg(any(test, feature = "test-support"))]
1714impl Buffer {
1715 #[track_caller]
1716 pub fn edit_via_marked_text(&mut self, marked_string: &str) {
1717 let edits = self.edits_for_marked_text(marked_string);
1718 self.edit(edits);
1719 }
1720
1721 #[track_caller]
1722 pub fn edits_for_marked_text(&self, marked_string: &str) -> Vec<(Range<usize>, String)> {
1723 let old_text = self.text();
1724 let (new_text, mut ranges) = util::test::marked_text_ranges(marked_string, false);
1725 if ranges.is_empty() {
1726 ranges.push(0..new_text.len());
1727 }
1728
1729 assert_eq!(
1730 old_text[..ranges[0].start],
1731 new_text[..ranges[0].start],
1732 "invalid edit"
1733 );
1734
1735 let mut delta = 0;
1736 let mut edits = Vec::new();
1737 let mut ranges = ranges.into_iter().peekable();
1738
1739 while let Some(inserted_range) = ranges.next() {
1740 let new_start = inserted_range.start;
1741 let old_start = (new_start as isize - delta) as usize;
1742
1743 let following_text = if let Some(next_range) = ranges.peek() {
1744 &new_text[inserted_range.end..next_range.start]
1745 } else {
1746 &new_text[inserted_range.end..]
1747 };
1748
1749 let inserted_len = inserted_range.len();
1750 let deleted_len = old_text[old_start..]
1751 .find(following_text)
1752 .expect("invalid edit");
1753
1754 let old_range = old_start..old_start + deleted_len;
1755 edits.push((old_range, new_text[inserted_range].to_string()));
1756 delta += inserted_len as isize - deleted_len as isize;
1757 }
1758
1759 assert_eq!(
1760 old_text.len() as isize + delta,
1761 new_text.len() as isize,
1762 "invalid edit"
1763 );
1764
1765 edits
1766 }
1767
1768 pub fn check_invariants(&self) {
1769 // Ensure every fragment is ordered by locator in the fragment tree and corresponds
1770 // to an insertion fragment in the insertions tree.
1771 let mut prev_fragment_id = Locator::min();
1772 for fragment in self.snapshot.fragments.items(&None) {
1773 assert!(fragment.id > prev_fragment_id);
1774 prev_fragment_id = fragment.id.clone();
1775
1776 let insertion_fragment = self
1777 .snapshot
1778 .insertions
1779 .get(
1780 &InsertionFragmentKey {
1781 timestamp: fragment.timestamp,
1782 split_offset: fragment.insertion_offset,
1783 },
1784 (),
1785 )
1786 .unwrap();
1787 assert_eq!(
1788 insertion_fragment.fragment_id, fragment.id,
1789 "fragment: {:?}\ninsertion: {:?}",
1790 fragment, insertion_fragment
1791 );
1792 }
1793
1794 let mut cursor = self.snapshot.fragments.cursor::<Option<&Locator>>(&None);
1795 for insertion_fragment in self.snapshot.insertions.cursor::<()>(()) {
1796 cursor.seek(&Some(&insertion_fragment.fragment_id), Bias::Left);
1797 let fragment = cursor.item().unwrap();
1798 assert_eq!(insertion_fragment.fragment_id, fragment.id);
1799 assert_eq!(insertion_fragment.split_offset, fragment.insertion_offset);
1800 }
1801
1802 let fragment_summary = self.snapshot.fragments.summary();
1803 assert_eq!(
1804 fragment_summary.text.visible,
1805 self.snapshot.visible_text.len()
1806 );
1807 assert_eq!(
1808 fragment_summary.text.deleted,
1809 self.snapshot.deleted_text.len()
1810 );
1811
1812 assert!(!self.text().contains("\r\n"));
1813 }
1814
1815 pub fn set_group_interval(&mut self, group_interval: Duration) {
1816 self.history.group_interval = group_interval;
1817 }
1818
1819 pub fn random_byte_range(&self, start_offset: usize, rng: &mut impl rand::Rng) -> Range<usize> {
1820 let end = self.clip_offset(rng.random_range(start_offset..=self.len()), Bias::Right);
1821 let start = self.clip_offset(rng.random_range(start_offset..=end), Bias::Right);
1822 start..end
1823 }
1824
1825 pub fn get_random_edits<T>(
1826 &self,
1827 rng: &mut T,
1828 edit_count: usize,
1829 ) -> Vec<(Range<usize>, Arc<str>)>
1830 where
1831 T: rand::Rng,
1832 {
1833 let mut edits: Vec<(Range<usize>, Arc<str>)> = Vec::new();
1834 let mut last_end = None;
1835 for _ in 0..edit_count {
1836 if last_end.is_some_and(|last_end| last_end >= self.len()) {
1837 break;
1838 }
1839 let new_start = last_end.map_or(0, |last_end| last_end + 1);
1840 let range = self.random_byte_range(new_start, rng);
1841 last_end = Some(range.end);
1842
1843 let new_text_len = rng.random_range(0..10);
1844 let new_text: String = RandomCharIter::new(&mut *rng).take(new_text_len).collect();
1845
1846 edits.push((range, new_text.into()));
1847 }
1848 edits
1849 }
1850
1851 pub fn randomly_edit<T>(
1852 &mut self,
1853 rng: &mut T,
1854 edit_count: usize,
1855 ) -> (Vec<(Range<usize>, Arc<str>)>, Operation)
1856 where
1857 T: rand::Rng,
1858 {
1859 let mut edits = self.get_random_edits(rng, edit_count);
1860 log::info!("mutating buffer {:?} with {:?}", self.replica_id, edits);
1861
1862 let op = self.edit(edits.iter().cloned());
1863 if let Operation::Edit(edit) = &op {
1864 assert_eq!(edits.len(), edit.new_text.len());
1865 for (edit, new_text) in edits.iter_mut().zip(&edit.new_text) {
1866 edit.1 = new_text.clone();
1867 }
1868 } else {
1869 unreachable!()
1870 }
1871
1872 (edits, op)
1873 }
1874
1875 pub fn randomly_undo_redo(&mut self, rng: &mut impl rand::Rng) -> Vec<Operation> {
1876 use rand::prelude::*;
1877
1878 let mut ops = Vec::new();
1879 for _ in 0..rng.random_range(1..=5) {
1880 if let Some(entry) = self.history.undo_stack.choose(rng) {
1881 let transaction = entry.transaction.clone();
1882 log::info!(
1883 "undoing buffer {:?} transaction {:?}",
1884 self.replica_id,
1885 transaction
1886 );
1887 ops.push(self.undo_or_redo(transaction));
1888 }
1889 }
1890 ops
1891 }
1892}
1893
1894impl Deref for Buffer {
1895 type Target = BufferSnapshot;
1896
1897 fn deref(&self) -> &Self::Target {
1898 &self.snapshot
1899 }
1900}
1901
1902impl BufferSnapshot {
1903 pub fn as_rope(&self) -> &Rope {
1904 &self.visible_text
1905 }
1906
1907 pub fn rope_for_version(&self, version: &clock::Global) -> Rope {
1908 let mut rope = Rope::new();
1909
1910 let mut cursor = self
1911 .fragments
1912 .filter::<_, FragmentTextSummary>(&None, move |summary| {
1913 !version.observed_all(&summary.max_version)
1914 });
1915 cursor.next();
1916
1917 let mut visible_cursor = self.visible_text.cursor(0);
1918 let mut deleted_cursor = self.deleted_text.cursor(0);
1919
1920 while let Some(fragment) = cursor.item() {
1921 if cursor.start().visible > visible_cursor.offset() {
1922 let text = visible_cursor.slice(cursor.start().visible);
1923 rope.append(text);
1924 }
1925
1926 if fragment.was_visible(version, &self.undo_map) {
1927 if fragment.visible {
1928 let text = visible_cursor.slice(cursor.end().visible);
1929 rope.append(text);
1930 } else {
1931 deleted_cursor.seek_forward(cursor.start().deleted);
1932 let text = deleted_cursor.slice(cursor.end().deleted);
1933 rope.append(text);
1934 }
1935 } else if fragment.visible {
1936 visible_cursor.seek_forward(cursor.end().visible);
1937 }
1938
1939 cursor.next();
1940 }
1941
1942 if cursor.start().visible > visible_cursor.offset() {
1943 let text = visible_cursor.slice(cursor.start().visible);
1944 rope.append(text);
1945 }
1946
1947 rope
1948 }
1949
1950 pub fn remote_id(&self) -> BufferId {
1951 self.remote_id
1952 }
1953
1954 pub fn replica_id(&self) -> ReplicaId {
1955 self.replica_id
1956 }
1957
1958 pub fn row_count(&self) -> u32 {
1959 self.max_point().row + 1
1960 }
1961
1962 pub fn len(&self) -> usize {
1963 self.visible_text.len()
1964 }
1965
1966 pub fn is_empty(&self) -> bool {
1967 self.len() == 0
1968 }
1969
1970 pub fn chars(&self) -> impl Iterator<Item = char> + '_ {
1971 self.chars_at(0)
1972 }
1973
1974 pub fn chars_for_range<T: ToOffset>(&self, range: Range<T>) -> impl Iterator<Item = char> + '_ {
1975 self.text_for_range(range).flat_map(str::chars)
1976 }
1977
1978 pub fn reversed_chars_for_range<T: ToOffset>(
1979 &self,
1980 range: Range<T>,
1981 ) -> impl Iterator<Item = char> + '_ {
1982 self.reversed_chunks_in_range(range)
1983 .flat_map(|chunk| chunk.chars().rev())
1984 }
1985
1986 pub fn contains_str_at<T>(&self, position: T, needle: &str) -> bool
1987 where
1988 T: ToOffset,
1989 {
1990 let position = position.to_offset(self);
1991 position == self.clip_offset(position, Bias::Left)
1992 && self
1993 .bytes_in_range(position..self.len())
1994 .flatten()
1995 .copied()
1996 .take(needle.len())
1997 .eq(needle.bytes())
1998 }
1999
2000 pub fn common_prefix_at<T>(&self, position: T, needle: &str) -> Range<T>
2001 where
2002 T: ToOffset + TextDimension,
2003 {
2004 let offset = position.to_offset(self);
2005 let common_prefix_len = needle
2006 .char_indices()
2007 .map(|(index, _)| index)
2008 .chain([needle.len()])
2009 .take_while(|&len| len <= offset)
2010 .filter(|&len| {
2011 let left = self
2012 .chars_for_range(offset - len..offset)
2013 .flat_map(char::to_lowercase);
2014 let right = needle[..len].chars().flat_map(char::to_lowercase);
2015 left.eq(right)
2016 })
2017 .last()
2018 .unwrap_or(0);
2019 let start_offset = offset - common_prefix_len;
2020 let start = self.text_summary_for_range(0..start_offset);
2021 start..position
2022 }
2023
2024 pub fn text(&self) -> String {
2025 self.visible_text.to_string()
2026 }
2027
2028 pub fn line_ending(&self) -> LineEnding {
2029 self.line_ending
2030 }
2031
2032 pub fn deleted_text(&self) -> String {
2033 self.deleted_text.to_string()
2034 }
2035
2036 pub fn fragments(&self) -> impl Iterator<Item = &Fragment> {
2037 self.fragments.iter()
2038 }
2039
2040 pub fn text_summary(&self) -> TextSummary {
2041 self.visible_text.summary()
2042 }
2043
2044 pub fn max_point(&self) -> Point {
2045 self.visible_text.max_point()
2046 }
2047
2048 pub fn max_point_utf16(&self) -> PointUtf16 {
2049 self.visible_text.max_point_utf16()
2050 }
2051
2052 pub fn point_to_offset(&self, point: Point) -> usize {
2053 self.visible_text.point_to_offset(point)
2054 }
2055
2056 pub fn point_to_offset_utf16(&self, point: Point) -> OffsetUtf16 {
2057 self.visible_text.point_to_offset_utf16(point)
2058 }
2059
2060 pub fn point_utf16_to_offset_utf16(&self, point: PointUtf16) -> OffsetUtf16 {
2061 self.visible_text.point_utf16_to_offset_utf16(point)
2062 }
2063
2064 pub fn point_utf16_to_offset(&self, point: PointUtf16) -> usize {
2065 self.visible_text.point_utf16_to_offset(point)
2066 }
2067
2068 pub fn unclipped_point_utf16_to_offset(&self, point: Unclipped<PointUtf16>) -> usize {
2069 self.visible_text.unclipped_point_utf16_to_offset(point)
2070 }
2071
2072 pub fn unclipped_point_utf16_to_point(&self, point: Unclipped<PointUtf16>) -> Point {
2073 self.visible_text.unclipped_point_utf16_to_point(point)
2074 }
2075
2076 pub fn offset_utf16_to_offset(&self, offset: OffsetUtf16) -> usize {
2077 self.visible_text.offset_utf16_to_offset(offset)
2078 }
2079
2080 pub fn offset_to_offset_utf16(&self, offset: usize) -> OffsetUtf16 {
2081 self.visible_text.offset_to_offset_utf16(offset)
2082 }
2083
2084 pub fn offset_to_point(&self, offset: usize) -> Point {
2085 self.visible_text.offset_to_point(offset)
2086 }
2087
2088 pub fn offset_to_point_utf16(&self, offset: usize) -> PointUtf16 {
2089 self.visible_text.offset_to_point_utf16(offset)
2090 }
2091
2092 pub fn point_to_point_utf16(&self, point: Point) -> PointUtf16 {
2093 self.visible_text.point_to_point_utf16(point)
2094 }
2095
2096 pub fn point_utf16_to_point(&self, point: PointUtf16) -> Point {
2097 self.visible_text.point_utf16_to_point(point)
2098 }
2099
2100 pub fn version(&self) -> &clock::Global {
2101 &self.version
2102 }
2103
2104 pub fn chars_at<T: ToOffset>(&self, position: T) -> impl Iterator<Item = char> + '_ {
2105 let offset = position.to_offset(self);
2106 self.visible_text.chars_at(offset)
2107 }
2108
2109 pub fn reversed_chars_at<T: ToOffset>(&self, position: T) -> impl Iterator<Item = char> + '_ {
2110 let offset = position.to_offset(self);
2111 self.visible_text.reversed_chars_at(offset)
2112 }
2113
2114 pub fn reversed_chunks_in_range<T: ToOffset>(&self, range: Range<T>) -> rope::Chunks<'_> {
2115 let range = range.start.to_offset(self)..range.end.to_offset(self);
2116 self.visible_text.reversed_chunks_in_range(range)
2117 }
2118
2119 pub fn bytes_in_range<T: ToOffset>(&self, range: Range<T>) -> rope::Bytes<'_> {
2120 let start = range.start.to_offset(self);
2121 let end = range.end.to_offset(self);
2122 self.visible_text.bytes_in_range(start..end)
2123 }
2124
2125 pub fn reversed_bytes_in_range<T: ToOffset>(&self, range: Range<T>) -> rope::Bytes<'_> {
2126 let start = range.start.to_offset(self);
2127 let end = range.end.to_offset(self);
2128 self.visible_text.reversed_bytes_in_range(start..end)
2129 }
2130
2131 pub fn text_for_range<T: ToOffset>(&self, range: Range<T>) -> Chunks<'_> {
2132 let start = range.start.to_offset(self);
2133 let end = range.end.to_offset(self);
2134 self.visible_text.chunks_in_range(start..end)
2135 }
2136
2137 pub fn line_len(&self, row: u32) -> u32 {
2138 let row_start_offset = Point::new(row, 0).to_offset(self);
2139 let row_end_offset = if row >= self.max_point().row {
2140 self.len()
2141 } else {
2142 Point::new(row + 1, 0).to_previous_offset(self)
2143 };
2144 (row_end_offset - row_start_offset) as u32
2145 }
2146
2147 pub fn line_indents_in_row_range(
2148 &self,
2149 row_range: Range<u32>,
2150 ) -> impl Iterator<Item = (u32, LineIndent)> + '_ {
2151 let start = Point::new(row_range.start, 0).to_offset(self);
2152 let end = Point::new(row_range.end, self.line_len(row_range.end)).to_offset(self);
2153
2154 let mut chunks = self.as_rope().chunks_in_range(start..end);
2155 let mut row = row_range.start;
2156 let mut done = false;
2157 std::iter::from_fn(move || {
2158 if done {
2159 None
2160 } else {
2161 let indent = (row, LineIndent::from_chunks(&mut chunks));
2162 done = !chunks.next_line();
2163 row += 1;
2164 Some(indent)
2165 }
2166 })
2167 }
2168
2169 /// Returns the line indents in the given row range, exclusive of end row, in reversed order.
2170 pub fn reversed_line_indents_in_row_range(
2171 &self,
2172 row_range: Range<u32>,
2173 ) -> impl Iterator<Item = (u32, LineIndent)> + '_ {
2174 let start = Point::new(row_range.start, 0).to_offset(self);
2175
2176 let end_point;
2177 let end;
2178 if row_range.end > row_range.start {
2179 end_point = Point::new(row_range.end - 1, self.line_len(row_range.end - 1));
2180 end = end_point.to_offset(self);
2181 } else {
2182 end_point = Point::new(row_range.start, 0);
2183 end = start;
2184 };
2185
2186 let mut chunks = self.as_rope().chunks_in_range(start..end);
2187 // Move the cursor to the start of the last line if it's not empty.
2188 chunks.seek(end);
2189 if end_point.column > 0 {
2190 chunks.prev_line();
2191 }
2192
2193 let mut row = end_point.row;
2194 let mut done = false;
2195 std::iter::from_fn(move || {
2196 if done {
2197 None
2198 } else {
2199 let initial_offset = chunks.offset();
2200 let indent = (row, LineIndent::from_chunks(&mut chunks));
2201 if chunks.offset() > initial_offset {
2202 chunks.prev_line();
2203 }
2204 done = !chunks.prev_line();
2205 if !done {
2206 row -= 1;
2207 }
2208
2209 Some(indent)
2210 }
2211 })
2212 }
2213
2214 pub fn line_indent_for_row(&self, row: u32) -> LineIndent {
2215 LineIndent::from_iter(self.chars_at(Point::new(row, 0)))
2216 }
2217
2218 pub fn is_line_blank(&self, row: u32) -> bool {
2219 self.text_for_range(Point::new(row, 0)..Point::new(row, self.line_len(row)))
2220 .all(|chunk| chunk.matches(|c: char| !c.is_whitespace()).next().is_none())
2221 }
2222
2223 pub fn text_summary_for_range<D, O: ToOffset>(&self, range: Range<O>) -> D
2224 where
2225 D: TextDimension,
2226 {
2227 self.visible_text
2228 .cursor(range.start.to_offset(self))
2229 .summary(range.end.to_offset(self))
2230 }
2231
2232 pub fn summaries_for_anchors<'a, D, A>(&'a self, anchors: A) -> impl 'a + Iterator<Item = D>
2233 where
2234 D: 'a + TextDimension,
2235 A: 'a + IntoIterator<Item = &'a Anchor>,
2236 {
2237 let anchors = anchors.into_iter();
2238 self.summaries_for_anchors_with_payload::<D, _, ()>(anchors.map(|a| (a, ())))
2239 .map(|d| d.0)
2240 }
2241
2242 pub fn summaries_for_anchors_with_payload<'a, D, A, T>(
2243 &'a self,
2244 anchors: A,
2245 ) -> impl 'a + Iterator<Item = (D, T)>
2246 where
2247 D: 'a + TextDimension,
2248 A: 'a + IntoIterator<Item = (&'a Anchor, T)>,
2249 {
2250 let anchors = anchors.into_iter();
2251 let mut fragment_cursor = self
2252 .fragments
2253 .cursor::<Dimensions<Option<&Locator>, usize>>(&None);
2254 let mut text_cursor = self.visible_text.cursor(0);
2255 let mut position = D::zero(());
2256
2257 anchors.map(move |(anchor, payload)| {
2258 if anchor.is_min() {
2259 return (D::zero(()), payload);
2260 } else if anchor.is_max() {
2261 return (D::from_text_summary(&self.visible_text.summary()), payload);
2262 }
2263
2264 let Some(insertion) = self.try_find_fragment(anchor) else {
2265 panic!(
2266 "invalid insertion for buffer {}@{:?} with anchor {:?}",
2267 self.remote_id(),
2268 self.version,
2269 anchor
2270 );
2271 };
2272 assert_eq!(
2273 insertion.timestamp,
2274 anchor.timestamp,
2275 "invalid insertion for buffer {}@{:?} and anchor {:?}",
2276 self.remote_id(),
2277 self.version,
2278 anchor
2279 );
2280
2281 fragment_cursor.seek_forward(&Some(&insertion.fragment_id), Bias::Left);
2282 let fragment = fragment_cursor.item().unwrap();
2283 let mut fragment_offset = fragment_cursor.start().1;
2284 if fragment.visible {
2285 fragment_offset += anchor.offset - insertion.split_offset;
2286 }
2287
2288 position.add_assign(&text_cursor.summary(fragment_offset));
2289 (position, payload)
2290 })
2291 }
2292
2293 pub fn summary_for_anchor<D>(&self, anchor: &Anchor) -> D
2294 where
2295 D: TextDimension,
2296 {
2297 self.text_summary_for_range(0..self.offset_for_anchor(anchor))
2298 }
2299
2300 pub fn offset_for_anchor(&self, anchor: &Anchor) -> usize {
2301 if anchor.is_min() {
2302 0
2303 } else if anchor.is_max() {
2304 self.visible_text.len()
2305 } else {
2306 debug_assert_eq!(anchor.buffer_id, Some(self.remote_id));
2307 debug_assert!(
2308 self.version.observed(anchor.timestamp),
2309 "Anchor timestamp {:?} not observed by buffer {:?}",
2310 anchor.timestamp,
2311 self.version
2312 );
2313 let item = self.try_find_fragment(anchor);
2314 let Some(insertion) = item.filter(|insertion| insertion.timestamp == anchor.timestamp)
2315 else {
2316 self.panic_bad_anchor(anchor);
2317 };
2318
2319 let (start, _, item) = self
2320 .fragments
2321 .find::<Dimensions<Option<&Locator>, usize>, _>(
2322 &None,
2323 &Some(&insertion.fragment_id),
2324 Bias::Left,
2325 );
2326 let fragment = item.unwrap();
2327 let mut fragment_offset = start.1;
2328 if fragment.visible {
2329 fragment_offset += anchor.offset - insertion.split_offset;
2330 }
2331 fragment_offset
2332 }
2333 }
2334
2335 #[cold]
2336 fn panic_bad_anchor(&self, anchor: &Anchor) -> ! {
2337 if anchor.buffer_id.is_some_and(|id| id != self.remote_id) {
2338 panic!(
2339 "invalid anchor - buffer id does not match: anchor {anchor:?}; buffer id: {}, version: {:?}",
2340 self.remote_id, self.version
2341 );
2342 } else if !self.version.observed(anchor.timestamp) {
2343 panic!(
2344 "invalid anchor - snapshot has not observed lamport: {:?}; version: {:?}",
2345 anchor, self.version
2346 );
2347 } else {
2348 panic!(
2349 "invalid anchor {:?}. buffer id: {}, version: {:?}",
2350 anchor, self.remote_id, self.version
2351 );
2352 }
2353 }
2354
2355 fn fragment_id_for_anchor(&self, anchor: &Anchor) -> &Locator {
2356 self.try_fragment_id_for_anchor(anchor)
2357 .unwrap_or_else(|| self.panic_bad_anchor(anchor))
2358 }
2359
2360 fn try_fragment_id_for_anchor(&self, anchor: &Anchor) -> Option<&Locator> {
2361 if anchor.is_min() {
2362 Some(Locator::min_ref())
2363 } else if anchor.is_max() {
2364 Some(Locator::max_ref())
2365 } else {
2366 let item = self.try_find_fragment(anchor);
2367 item.filter(|insertion| {
2368 !cfg!(debug_assertions) || insertion.timestamp == anchor.timestamp
2369 })
2370 .map(|insertion| &insertion.fragment_id)
2371 }
2372 }
2373
2374 fn try_find_fragment(&self, anchor: &Anchor) -> Option<&InsertionFragment> {
2375 let anchor_key = InsertionFragmentKey {
2376 timestamp: anchor.timestamp,
2377 split_offset: anchor.offset,
2378 };
2379 match self.insertions.find_with_prev::<InsertionFragmentKey, _>(
2380 (),
2381 &anchor_key,
2382 anchor.bias,
2383 ) {
2384 (_, _, Some((prev, insertion))) => {
2385 let comparison = sum_tree::KeyedItem::key(insertion).cmp(&anchor_key);
2386 if comparison == Ordering::Greater
2387 || (anchor.bias == Bias::Left
2388 && comparison == Ordering::Equal
2389 && anchor.offset > 0)
2390 {
2391 prev
2392 } else {
2393 Some(insertion)
2394 }
2395 }
2396 _ => self.insertions.last(),
2397 }
2398 }
2399
2400 pub fn anchor_before<T: ToOffset>(&self, position: T) -> Anchor {
2401 self.anchor_at(position, Bias::Left)
2402 }
2403
2404 pub fn anchor_after<T: ToOffset>(&self, position: T) -> Anchor {
2405 self.anchor_at(position, Bias::Right)
2406 }
2407
2408 pub fn anchor_at<T: ToOffset>(&self, position: T, bias: Bias) -> Anchor {
2409 self.anchor_at_offset(position.to_offset(self), bias)
2410 }
2411
2412 fn anchor_at_offset(&self, mut offset: usize, bias: Bias) -> Anchor {
2413 if bias == Bias::Left && offset == 0 {
2414 Anchor::min_for_buffer(self.remote_id)
2415 } else if bias == Bias::Right
2416 && ((!cfg!(debug_assertions) && offset >= self.len()) || offset == self.len())
2417 {
2418 Anchor::max_for_buffer(self.remote_id)
2419 } else {
2420 if !self
2421 .visible_text
2422 .assert_char_boundary::<{ cfg!(debug_assertions) }>(offset)
2423 {
2424 offset = match bias {
2425 Bias::Left => self.visible_text.floor_char_boundary(offset),
2426 Bias::Right => self.visible_text.ceil_char_boundary(offset),
2427 };
2428 }
2429 let (start, _, item) = self.fragments.find::<usize, _>(&None, &offset, bias);
2430 let Some(fragment) = item else {
2431 // We got a bad offset, likely out of bounds
2432 debug_panic!(
2433 "Failed to find fragment at offset {} (len: {})",
2434 offset,
2435 self.len()
2436 );
2437 return Anchor::max_for_buffer(self.remote_id);
2438 };
2439 let overshoot = offset - start;
2440 Anchor {
2441 timestamp: fragment.timestamp,
2442 offset: fragment.insertion_offset + overshoot,
2443 bias,
2444 buffer_id: Some(self.remote_id),
2445 }
2446 }
2447 }
2448
2449 pub fn can_resolve(&self, anchor: &Anchor) -> bool {
2450 anchor.is_min()
2451 || anchor.is_max()
2452 || (Some(self.remote_id) == anchor.buffer_id && self.version.observed(anchor.timestamp))
2453 }
2454
2455 pub fn clip_offset(&self, offset: usize, bias: Bias) -> usize {
2456 self.visible_text.clip_offset(offset, bias)
2457 }
2458
2459 pub fn clip_point(&self, point: Point, bias: Bias) -> Point {
2460 self.visible_text.clip_point(point, bias)
2461 }
2462
2463 pub fn clip_offset_utf16(&self, offset: OffsetUtf16, bias: Bias) -> OffsetUtf16 {
2464 self.visible_text.clip_offset_utf16(offset, bias)
2465 }
2466
2467 pub fn clip_point_utf16(&self, point: Unclipped<PointUtf16>, bias: Bias) -> PointUtf16 {
2468 self.visible_text.clip_point_utf16(point, bias)
2469 }
2470
2471 pub fn edits_since<'a, D>(
2472 &'a self,
2473 since: &'a clock::Global,
2474 ) -> impl 'a + Iterator<Item = Edit<D>>
2475 where
2476 D: TextDimension + Ord,
2477 {
2478 self.edits_since_in_range(since, Anchor::MIN..Anchor::MAX)
2479 }
2480
2481 pub fn anchored_edits_since<'a, D>(
2482 &'a self,
2483 since: &'a clock::Global,
2484 ) -> impl 'a + Iterator<Item = (Edit<D>, Range<Anchor>)>
2485 where
2486 D: TextDimension + Ord,
2487 {
2488 self.anchored_edits_since_in_range(since, Anchor::MIN..Anchor::MAX)
2489 }
2490
2491 pub fn edits_since_in_range<'a, D>(
2492 &'a self,
2493 since: &'a clock::Global,
2494 range: Range<Anchor>,
2495 ) -> impl 'a + Iterator<Item = Edit<D>>
2496 where
2497 D: TextDimension + Ord,
2498 {
2499 self.anchored_edits_since_in_range(since, range)
2500 .map(|item| item.0)
2501 }
2502
2503 pub fn anchored_edits_since_in_range<'a, D>(
2504 &'a self,
2505 since: &'a clock::Global,
2506 range: Range<Anchor>,
2507 ) -> impl 'a + Iterator<Item = (Edit<D>, Range<Anchor>)>
2508 where
2509 D: TextDimension + Ord,
2510 {
2511 if *since == self.version {
2512 return None.into_iter().flatten();
2513 }
2514 let mut cursor = self.fragments.filter(&None, move |summary| {
2515 !since.observed_all(&summary.max_version)
2516 });
2517 cursor.next();
2518 let fragments_cursor = Some(cursor);
2519 let start_fragment_id = self.fragment_id_for_anchor(&range.start);
2520 let (start, _, item) = self
2521 .fragments
2522 .find::<Dimensions<Option<&Locator>, FragmentTextSummary>, _>(
2523 &None,
2524 &Some(start_fragment_id),
2525 Bias::Left,
2526 );
2527 let mut visible_start = start.1.visible;
2528 let mut deleted_start = start.1.deleted;
2529 if let Some(fragment) = item {
2530 let overshoot = range.start.offset - fragment.insertion_offset;
2531 if fragment.visible {
2532 visible_start += overshoot;
2533 } else {
2534 deleted_start += overshoot;
2535 }
2536 }
2537 let end_fragment_id = self.fragment_id_for_anchor(&range.end);
2538
2539 Some(Edits {
2540 visible_cursor: self.visible_text.cursor(visible_start),
2541 deleted_cursor: self.deleted_text.cursor(deleted_start),
2542 fragments_cursor,
2543 undos: &self.undo_map,
2544 since,
2545 old_end: D::zero(()),
2546 new_end: D::zero(()),
2547 range: (start_fragment_id, range.start.offset)..(end_fragment_id, range.end.offset),
2548 buffer_id: self.remote_id,
2549 })
2550 .into_iter()
2551 .flatten()
2552 }
2553
2554 pub fn has_edits_since_in_range(&self, since: &clock::Global, range: Range<Anchor>) -> bool {
2555 if *since != self.version {
2556 let start_fragment_id = self.fragment_id_for_anchor(&range.start);
2557 let end_fragment_id = self.fragment_id_for_anchor(&range.end);
2558 let mut cursor = self.fragments.filter::<_, usize>(&None, move |summary| {
2559 !since.observed_all(&summary.max_version)
2560 });
2561 cursor.next();
2562 while let Some(fragment) = cursor.item() {
2563 if fragment.id > *end_fragment_id {
2564 break;
2565 }
2566 if fragment.id > *start_fragment_id {
2567 let was_visible = fragment.was_visible(since, &self.undo_map);
2568 let is_visible = fragment.visible;
2569 if was_visible != is_visible {
2570 return true;
2571 }
2572 }
2573 cursor.next();
2574 }
2575 }
2576 false
2577 }
2578
2579 pub fn has_edits_since(&self, since: &clock::Global) -> bool {
2580 if *since != self.version {
2581 let mut cursor = self.fragments.filter::<_, usize>(&None, move |summary| {
2582 !since.observed_all(&summary.max_version)
2583 });
2584 cursor.next();
2585 while let Some(fragment) = cursor.item() {
2586 let was_visible = fragment.was_visible(since, &self.undo_map);
2587 let is_visible = fragment.visible;
2588 if was_visible != is_visible {
2589 return true;
2590 }
2591 cursor.next();
2592 }
2593 }
2594 false
2595 }
2596
2597 pub fn range_to_version(&self, range: Range<usize>, version: &clock::Global) -> Range<usize> {
2598 let mut offsets = self.offsets_to_version([range.start, range.end], version);
2599 offsets.next().unwrap()..offsets.next().unwrap()
2600 }
2601
2602 /// Converts the given sequence of offsets into their corresponding offsets
2603 /// at a prior version of this buffer.
2604 pub fn offsets_to_version<'a>(
2605 &'a self,
2606 offsets: impl 'a + IntoIterator<Item = usize>,
2607 version: &'a clock::Global,
2608 ) -> impl 'a + Iterator<Item = usize> {
2609 let mut edits = self.edits_since(version).peekable();
2610 let mut last_old_end = 0;
2611 let mut last_new_end = 0;
2612 offsets.into_iter().map(move |new_offset| {
2613 while let Some(edit) = edits.peek() {
2614 if edit.new.start > new_offset {
2615 break;
2616 }
2617
2618 if edit.new.end <= new_offset {
2619 last_new_end = edit.new.end;
2620 last_old_end = edit.old.end;
2621 edits.next();
2622 continue;
2623 }
2624
2625 let overshoot = new_offset - edit.new.start;
2626 return (edit.old.start + overshoot).min(edit.old.end);
2627 }
2628
2629 last_old_end + new_offset.saturating_sub(last_new_end)
2630 })
2631 }
2632
2633 /// Visually annotates a position or range with the `Debug` representation of a value. The
2634 /// callsite of this function is used as a key - previous annotations will be removed.
2635 #[cfg(debug_assertions)]
2636 #[track_caller]
2637 pub fn debug<R, V>(&self, ranges: &R, value: V)
2638 where
2639 R: debug::ToDebugRanges,
2640 V: std::fmt::Debug,
2641 {
2642 self.debug_with_key(std::panic::Location::caller(), ranges, value);
2643 }
2644
2645 /// Visually annotates a position or range with the `Debug` representation of a value. Previous
2646 /// debug annotations with the same key will be removed. The key is also used to determine the
2647 /// annotation's color.
2648 #[cfg(debug_assertions)]
2649 pub fn debug_with_key<K, R, V>(&self, key: &K, ranges: &R, value: V)
2650 where
2651 K: std::hash::Hash + 'static,
2652 R: debug::ToDebugRanges,
2653 V: std::fmt::Debug,
2654 {
2655 let ranges = ranges
2656 .to_debug_ranges(self)
2657 .into_iter()
2658 .map(|range| self.anchor_after(range.start)..self.anchor_before(range.end))
2659 .collect();
2660 debug::GlobalDebugRanges::with_locked(|debug_ranges| {
2661 debug_ranges.insert(key, ranges, format!("{value:?}").into());
2662 });
2663 }
2664}
2665
2666struct RopeBuilder<'a> {
2667 old_visible_cursor: rope::Cursor<'a>,
2668 old_deleted_cursor: rope::Cursor<'a>,
2669 new_visible: Rope,
2670 new_deleted: Rope,
2671}
2672
2673impl<'a> RopeBuilder<'a> {
2674 fn new(old_visible_cursor: rope::Cursor<'a>, old_deleted_cursor: rope::Cursor<'a>) -> Self {
2675 Self {
2676 old_visible_cursor,
2677 old_deleted_cursor,
2678 new_visible: Rope::new(),
2679 new_deleted: Rope::new(),
2680 }
2681 }
2682
2683 fn append(&mut self, len: FragmentTextSummary) {
2684 self.push(len.visible, true, true);
2685 self.push(len.deleted, false, false);
2686 }
2687
2688 fn push_fragment(&mut self, fragment: &Fragment, was_visible: bool) {
2689 debug_assert!(fragment.len > 0);
2690 self.push(fragment.len, was_visible, fragment.visible)
2691 }
2692
2693 fn push(&mut self, len: usize, was_visible: bool, is_visible: bool) {
2694 let text = if was_visible {
2695 self.old_visible_cursor
2696 .slice(self.old_visible_cursor.offset() + len)
2697 } else {
2698 self.old_deleted_cursor
2699 .slice(self.old_deleted_cursor.offset() + len)
2700 };
2701 if is_visible {
2702 self.new_visible.append(text);
2703 } else {
2704 self.new_deleted.append(text);
2705 }
2706 }
2707
2708 fn push_str(&mut self, text: &str) {
2709 self.new_visible.push(text);
2710 }
2711
2712 fn finish(mut self) -> (Rope, Rope) {
2713 self.new_visible.append(self.old_visible_cursor.suffix());
2714 self.new_deleted.append(self.old_deleted_cursor.suffix());
2715 (self.new_visible, self.new_deleted)
2716 }
2717}
2718
2719impl<D: TextDimension + Ord, F: FnMut(&FragmentSummary) -> bool> Iterator for Edits<'_, D, F> {
2720 type Item = (Edit<D>, Range<Anchor>);
2721
2722 fn next(&mut self) -> Option<Self::Item> {
2723 let mut pending_edit: Option<Self::Item> = None;
2724 let cursor = self.fragments_cursor.as_mut()?;
2725
2726 while let Some(fragment) = cursor.item() {
2727 if fragment.id < *self.range.start.0 {
2728 cursor.next();
2729 continue;
2730 } else if fragment.id > *self.range.end.0 {
2731 break;
2732 }
2733
2734 if cursor.start().visible > self.visible_cursor.offset() {
2735 let summary = self.visible_cursor.summary(cursor.start().visible);
2736 self.old_end.add_assign(&summary);
2737 self.new_end.add_assign(&summary);
2738 }
2739
2740 if pending_edit
2741 .as_ref()
2742 .is_some_and(|(change, _)| change.new.end < self.new_end)
2743 {
2744 break;
2745 }
2746
2747 let start_anchor = Anchor {
2748 timestamp: fragment.timestamp,
2749 offset: fragment.insertion_offset,
2750 bias: Bias::Right,
2751 buffer_id: Some(self.buffer_id),
2752 };
2753 let end_anchor = Anchor {
2754 timestamp: fragment.timestamp,
2755 offset: fragment.insertion_offset + fragment.len,
2756 bias: Bias::Left,
2757 buffer_id: Some(self.buffer_id),
2758 };
2759
2760 if !fragment.was_visible(self.since, self.undos) && fragment.visible {
2761 let mut visible_end = cursor.end().visible;
2762 if fragment.id == *self.range.end.0 {
2763 visible_end = cmp::min(
2764 visible_end,
2765 cursor.start().visible + (self.range.end.1 - fragment.insertion_offset),
2766 );
2767 }
2768
2769 let fragment_summary = self.visible_cursor.summary(visible_end);
2770 let mut new_end = self.new_end;
2771 new_end.add_assign(&fragment_summary);
2772 if let Some((edit, range)) = pending_edit.as_mut() {
2773 edit.new.end = new_end;
2774 range.end = end_anchor;
2775 } else {
2776 pending_edit = Some((
2777 Edit {
2778 old: self.old_end..self.old_end,
2779 new: self.new_end..new_end,
2780 },
2781 start_anchor..end_anchor,
2782 ));
2783 }
2784
2785 self.new_end = new_end;
2786 } else if fragment.was_visible(self.since, self.undos) && !fragment.visible {
2787 let mut deleted_end = cursor.end().deleted;
2788 if fragment.id == *self.range.end.0 {
2789 deleted_end = cmp::min(
2790 deleted_end,
2791 cursor.start().deleted + (self.range.end.1 - fragment.insertion_offset),
2792 );
2793 }
2794
2795 if cursor.start().deleted > self.deleted_cursor.offset() {
2796 self.deleted_cursor.seek_forward(cursor.start().deleted);
2797 }
2798 let fragment_summary = self.deleted_cursor.summary(deleted_end);
2799 let mut old_end = self.old_end;
2800 old_end.add_assign(&fragment_summary);
2801 if let Some((edit, range)) = pending_edit.as_mut() {
2802 edit.old.end = old_end;
2803 range.end = end_anchor;
2804 } else {
2805 pending_edit = Some((
2806 Edit {
2807 old: self.old_end..old_end,
2808 new: self.new_end..self.new_end,
2809 },
2810 start_anchor..end_anchor,
2811 ));
2812 }
2813
2814 self.old_end = old_end;
2815 }
2816
2817 cursor.next();
2818 }
2819
2820 pending_edit
2821 }
2822}
2823
2824impl Fragment {
2825 fn is_visible(&self, undos: &UndoMap) -> bool {
2826 !undos.is_undone(self.timestamp) && self.deletions.iter().all(|d| undos.is_undone(*d))
2827 }
2828
2829 fn was_visible(&self, version: &clock::Global, undos: &UndoMap) -> bool {
2830 (version.observed(self.timestamp) && !undos.was_undone(self.timestamp, version))
2831 && self
2832 .deletions
2833 .iter()
2834 .all(|d| !version.observed(*d) || undos.was_undone(*d, version))
2835 }
2836}
2837
2838impl sum_tree::Item for Fragment {
2839 type Summary = FragmentSummary;
2840
2841 fn summary(&self, _cx: &Option<clock::Global>) -> Self::Summary {
2842 let mut max_version = clock::Global::new();
2843 max_version.observe(self.timestamp);
2844 for deletion in &self.deletions {
2845 max_version.observe(*deletion);
2846 }
2847 max_version.join(&self.max_undos);
2848
2849 let mut min_insertion_version = clock::Global::new();
2850 min_insertion_version.observe(self.timestamp);
2851 let max_insertion_version = min_insertion_version.clone();
2852 if self.visible {
2853 FragmentSummary {
2854 max_id: self.id.clone(),
2855 text: FragmentTextSummary {
2856 visible: self.len,
2857 deleted: 0,
2858 },
2859 max_version,
2860 min_insertion_version,
2861 max_insertion_version,
2862 }
2863 } else {
2864 FragmentSummary {
2865 max_id: self.id.clone(),
2866 text: FragmentTextSummary {
2867 visible: 0,
2868 deleted: self.len,
2869 },
2870 max_version,
2871 min_insertion_version,
2872 max_insertion_version,
2873 }
2874 }
2875 }
2876}
2877
2878impl sum_tree::Summary for FragmentSummary {
2879 type Context<'a> = &'a Option<clock::Global>;
2880
2881 fn zero(_cx: Self::Context<'_>) -> Self {
2882 Default::default()
2883 }
2884
2885 fn add_summary(&mut self, other: &Self, _: Self::Context<'_>) {
2886 self.max_id.assign(&other.max_id);
2887 self.text.visible += &other.text.visible;
2888 self.text.deleted += &other.text.deleted;
2889 self.max_version.join(&other.max_version);
2890 self.min_insertion_version
2891 .meet(&other.min_insertion_version);
2892 self.max_insertion_version
2893 .join(&other.max_insertion_version);
2894 }
2895}
2896
2897impl Default for FragmentSummary {
2898 fn default() -> Self {
2899 FragmentSummary {
2900 max_id: Locator::min(),
2901 text: FragmentTextSummary::default(),
2902 max_version: clock::Global::new(),
2903 min_insertion_version: clock::Global::new(),
2904 max_insertion_version: clock::Global::new(),
2905 }
2906 }
2907}
2908
2909impl sum_tree::Item for InsertionFragment {
2910 type Summary = InsertionFragmentKey;
2911
2912 fn summary(&self, _cx: ()) -> Self::Summary {
2913 InsertionFragmentKey {
2914 timestamp: self.timestamp,
2915 split_offset: self.split_offset,
2916 }
2917 }
2918}
2919
2920impl sum_tree::KeyedItem for InsertionFragment {
2921 type Key = InsertionFragmentKey;
2922
2923 fn key(&self) -> Self::Key {
2924 sum_tree::Item::summary(self, ())
2925 }
2926}
2927
2928impl InsertionFragment {
2929 fn new(fragment: &Fragment) -> Self {
2930 Self {
2931 timestamp: fragment.timestamp,
2932 split_offset: fragment.insertion_offset,
2933 fragment_id: fragment.id.clone(),
2934 }
2935 }
2936
2937 fn insert_new(fragment: &Fragment) -> sum_tree::Edit<Self> {
2938 sum_tree::Edit::Insert(Self::new(fragment))
2939 }
2940}
2941
2942impl sum_tree::ContextLessSummary for InsertionFragmentKey {
2943 fn zero() -> Self {
2944 InsertionFragmentKey {
2945 timestamp: Lamport::MIN,
2946 split_offset: 0,
2947 }
2948 }
2949
2950 fn add_summary(&mut self, summary: &Self) {
2951 *self = *summary;
2952 }
2953}
2954
2955#[derive(Copy, Clone, Debug, Default, PartialEq, Eq, PartialOrd, Ord, Hash)]
2956pub struct FullOffset(pub usize);
2957
2958impl ops::AddAssign<usize> for FullOffset {
2959 fn add_assign(&mut self, rhs: usize) {
2960 self.0 += rhs;
2961 }
2962}
2963
2964impl ops::Add<usize> for FullOffset {
2965 type Output = Self;
2966
2967 fn add(mut self, rhs: usize) -> Self::Output {
2968 self += rhs;
2969 self
2970 }
2971}
2972
2973impl ops::Sub for FullOffset {
2974 type Output = usize;
2975
2976 fn sub(self, rhs: Self) -> Self::Output {
2977 self.0 - rhs.0
2978 }
2979}
2980
2981impl sum_tree::Dimension<'_, FragmentSummary> for usize {
2982 fn zero(_: &Option<clock::Global>) -> Self {
2983 Default::default()
2984 }
2985
2986 fn add_summary(&mut self, summary: &FragmentSummary, _: &Option<clock::Global>) {
2987 *self += summary.text.visible;
2988 }
2989}
2990
2991impl sum_tree::Dimension<'_, FragmentSummary> for FullOffset {
2992 fn zero(_: &Option<clock::Global>) -> Self {
2993 Default::default()
2994 }
2995
2996 fn add_summary(&mut self, summary: &FragmentSummary, _: &Option<clock::Global>) {
2997 self.0 += summary.text.visible + summary.text.deleted;
2998 }
2999}
3000
3001impl<'a> sum_tree::Dimension<'a, FragmentSummary> for Option<&'a Locator> {
3002 fn zero(_: &Option<clock::Global>) -> Self {
3003 Default::default()
3004 }
3005
3006 fn add_summary(&mut self, summary: &'a FragmentSummary, _: &Option<clock::Global>) {
3007 *self = Some(&summary.max_id);
3008 }
3009}
3010
3011impl sum_tree::SeekTarget<'_, FragmentSummary, FragmentTextSummary> for usize {
3012 fn cmp(
3013 &self,
3014 cursor_location: &FragmentTextSummary,
3015 _: &Option<clock::Global>,
3016 ) -> cmp::Ordering {
3017 Ord::cmp(self, &cursor_location.visible)
3018 }
3019}
3020
3021#[derive(Copy, Clone, Debug, Eq, PartialEq)]
3022enum VersionedFullOffset {
3023 Offset(FullOffset),
3024 Invalid,
3025}
3026
3027impl VersionedFullOffset {
3028 fn full_offset(&self) -> FullOffset {
3029 if let Self::Offset(position) = self {
3030 *position
3031 } else {
3032 panic!("invalid version")
3033 }
3034 }
3035}
3036
3037impl Default for VersionedFullOffset {
3038 fn default() -> Self {
3039 Self::Offset(Default::default())
3040 }
3041}
3042
3043impl<'a> sum_tree::Dimension<'a, FragmentSummary> for VersionedFullOffset {
3044 fn zero(_cx: &Option<clock::Global>) -> Self {
3045 Default::default()
3046 }
3047
3048 fn add_summary(&mut self, summary: &'a FragmentSummary, cx: &Option<clock::Global>) {
3049 if let Self::Offset(offset) = self {
3050 let version = cx.as_ref().unwrap();
3051 if version.observed_all(&summary.max_insertion_version) {
3052 *offset += summary.text.visible + summary.text.deleted;
3053 } else if version.observed_any(&summary.min_insertion_version) {
3054 *self = Self::Invalid;
3055 }
3056 }
3057 }
3058}
3059
3060impl sum_tree::SeekTarget<'_, FragmentSummary, Self> for VersionedFullOffset {
3061 fn cmp(&self, cursor_position: &Self, _: &Option<clock::Global>) -> cmp::Ordering {
3062 match (self, cursor_position) {
3063 (Self::Offset(a), Self::Offset(b)) => Ord::cmp(a, b),
3064 (Self::Offset(_), Self::Invalid) => cmp::Ordering::Less,
3065 (Self::Invalid, _) => unreachable!(),
3066 }
3067 }
3068}
3069
3070impl Operation {
3071 fn replica_id(&self) -> ReplicaId {
3072 operation_queue::Operation::lamport_timestamp(self).replica_id
3073 }
3074
3075 pub fn timestamp(&self) -> clock::Lamport {
3076 match self {
3077 Operation::Edit(edit) => edit.timestamp,
3078 Operation::Undo(undo) => undo.timestamp,
3079 }
3080 }
3081
3082 pub fn as_edit(&self) -> Option<&EditOperation> {
3083 match self {
3084 Operation::Edit(edit) => Some(edit),
3085 _ => None,
3086 }
3087 }
3088
3089 pub fn is_edit(&self) -> bool {
3090 matches!(self, Operation::Edit { .. })
3091 }
3092}
3093
3094impl operation_queue::Operation for Operation {
3095 fn lamport_timestamp(&self) -> clock::Lamport {
3096 match self {
3097 Operation::Edit(edit) => edit.timestamp,
3098 Operation::Undo(undo) => undo.timestamp,
3099 }
3100 }
3101}
3102
3103pub trait ToOffset {
3104 fn to_offset(&self, snapshot: &BufferSnapshot) -> usize;
3105 /// Turns this point into the next offset in the buffer that comes after this, respecting utf8 boundaries.
3106 fn to_next_offset(&self, snapshot: &BufferSnapshot) -> usize {
3107 snapshot
3108 .visible_text
3109 .ceil_char_boundary(self.to_offset(snapshot) + 1)
3110 }
3111 /// Turns this point into the previous offset in the buffer that comes before this, respecting utf8 boundaries.
3112 fn to_previous_offset(&self, snapshot: &BufferSnapshot) -> usize {
3113 snapshot
3114 .visible_text
3115 .floor_char_boundary(self.to_offset(snapshot).saturating_sub(1))
3116 }
3117}
3118
3119impl ToOffset for Point {
3120 #[inline]
3121 fn to_offset(&self, snapshot: &BufferSnapshot) -> usize {
3122 snapshot.point_to_offset(*self)
3123 }
3124}
3125
3126impl ToOffset for usize {
3127 #[track_caller]
3128 fn to_offset(&self, snapshot: &BufferSnapshot) -> usize {
3129 if !snapshot
3130 .as_rope()
3131 .assert_char_boundary::<{ cfg!(debug_assertions) }>(*self)
3132 {
3133 snapshot.as_rope().floor_char_boundary(*self)
3134 } else {
3135 *self
3136 }
3137 }
3138}
3139
3140impl ToOffset for Anchor {
3141 #[inline]
3142 fn to_offset(&self, snapshot: &BufferSnapshot) -> usize {
3143 snapshot.summary_for_anchor(self)
3144 }
3145}
3146
3147impl<T: ToOffset> ToOffset for &T {
3148 #[inline]
3149 fn to_offset(&self, content: &BufferSnapshot) -> usize {
3150 (*self).to_offset(content)
3151 }
3152}
3153
3154impl ToOffset for PointUtf16 {
3155 #[inline]
3156 fn to_offset(&self, snapshot: &BufferSnapshot) -> usize {
3157 snapshot.point_utf16_to_offset(*self)
3158 }
3159}
3160
3161impl ToOffset for Unclipped<PointUtf16> {
3162 #[inline]
3163 fn to_offset(&self, snapshot: &BufferSnapshot) -> usize {
3164 snapshot.unclipped_point_utf16_to_offset(*self)
3165 }
3166}
3167
3168pub trait ToPoint {
3169 fn to_point(&self, snapshot: &BufferSnapshot) -> Point;
3170}
3171
3172impl ToPoint for Anchor {
3173 #[inline]
3174 fn to_point(&self, snapshot: &BufferSnapshot) -> Point {
3175 snapshot.summary_for_anchor(self)
3176 }
3177}
3178
3179impl ToPoint for usize {
3180 #[inline]
3181 fn to_point(&self, snapshot: &BufferSnapshot) -> Point {
3182 snapshot.offset_to_point(*self)
3183 }
3184}
3185
3186impl ToPoint for Point {
3187 #[inline]
3188 fn to_point(&self, _: &BufferSnapshot) -> Point {
3189 *self
3190 }
3191}
3192
3193impl ToPoint for Unclipped<PointUtf16> {
3194 #[inline]
3195 fn to_point(&self, snapshot: &BufferSnapshot) -> Point {
3196 snapshot.unclipped_point_utf16_to_point(*self)
3197 }
3198}
3199
3200pub trait ToPointUtf16 {
3201 fn to_point_utf16(&self, snapshot: &BufferSnapshot) -> PointUtf16;
3202}
3203
3204impl ToPointUtf16 for Anchor {
3205 #[inline]
3206 fn to_point_utf16(&self, snapshot: &BufferSnapshot) -> PointUtf16 {
3207 snapshot.summary_for_anchor(self)
3208 }
3209}
3210
3211impl ToPointUtf16 for usize {
3212 #[inline]
3213 fn to_point_utf16(&self, snapshot: &BufferSnapshot) -> PointUtf16 {
3214 snapshot.offset_to_point_utf16(*self)
3215 }
3216}
3217
3218impl ToPointUtf16 for PointUtf16 {
3219 #[inline]
3220 fn to_point_utf16(&self, _: &BufferSnapshot) -> PointUtf16 {
3221 *self
3222 }
3223}
3224
3225impl ToPointUtf16 for Point {
3226 #[inline]
3227 fn to_point_utf16(&self, snapshot: &BufferSnapshot) -> PointUtf16 {
3228 snapshot.point_to_point_utf16(*self)
3229 }
3230}
3231
3232pub trait ToOffsetUtf16 {
3233 fn to_offset_utf16(&self, snapshot: &BufferSnapshot) -> OffsetUtf16;
3234}
3235
3236impl ToOffsetUtf16 for Anchor {
3237 #[inline]
3238 fn to_offset_utf16(&self, snapshot: &BufferSnapshot) -> OffsetUtf16 {
3239 snapshot.summary_for_anchor(self)
3240 }
3241}
3242
3243impl ToOffsetUtf16 for usize {
3244 #[inline]
3245 fn to_offset_utf16(&self, snapshot: &BufferSnapshot) -> OffsetUtf16 {
3246 snapshot.offset_to_offset_utf16(*self)
3247 }
3248}
3249
3250impl ToOffsetUtf16 for OffsetUtf16 {
3251 #[inline]
3252 fn to_offset_utf16(&self, _snapshot: &BufferSnapshot) -> OffsetUtf16 {
3253 *self
3254 }
3255}
3256
3257pub trait FromAnchor {
3258 fn from_anchor(anchor: &Anchor, snapshot: &BufferSnapshot) -> Self;
3259}
3260
3261impl FromAnchor for Anchor {
3262 #[inline]
3263 fn from_anchor(anchor: &Anchor, _snapshot: &BufferSnapshot) -> Self {
3264 *anchor
3265 }
3266}
3267
3268impl FromAnchor for Point {
3269 #[inline]
3270 fn from_anchor(anchor: &Anchor, snapshot: &BufferSnapshot) -> Self {
3271 snapshot.summary_for_anchor(anchor)
3272 }
3273}
3274
3275impl FromAnchor for PointUtf16 {
3276 #[inline]
3277 fn from_anchor(anchor: &Anchor, snapshot: &BufferSnapshot) -> Self {
3278 snapshot.summary_for_anchor(anchor)
3279 }
3280}
3281
3282impl FromAnchor for usize {
3283 #[inline]
3284 fn from_anchor(anchor: &Anchor, snapshot: &BufferSnapshot) -> Self {
3285 snapshot.summary_for_anchor(anchor)
3286 }
3287}
3288
3289#[derive(Clone, Copy, Debug, PartialEq)]
3290pub enum LineEnding {
3291 Unix,
3292 Windows,
3293}
3294
3295impl Default for LineEnding {
3296 fn default() -> Self {
3297 #[cfg(unix)]
3298 return Self::Unix;
3299
3300 #[cfg(not(unix))]
3301 return Self::Windows;
3302 }
3303}
3304
3305impl LineEnding {
3306 pub fn as_str(&self) -> &'static str {
3307 match self {
3308 LineEnding::Unix => "\n",
3309 LineEnding::Windows => "\r\n",
3310 }
3311 }
3312
3313 pub fn label(&self) -> &'static str {
3314 match self {
3315 LineEnding::Unix => "LF",
3316 LineEnding::Windows => "CRLF",
3317 }
3318 }
3319
3320 pub fn detect(text: &str) -> Self {
3321 let mut max_ix = cmp::min(text.len(), 1000);
3322 while !text.is_char_boundary(max_ix) {
3323 max_ix -= 1;
3324 }
3325
3326 if let Some(ix) = text[..max_ix].find(['\n']) {
3327 if ix > 0 && text.as_bytes()[ix - 1] == b'\r' {
3328 Self::Windows
3329 } else {
3330 Self::Unix
3331 }
3332 } else {
3333 Self::default()
3334 }
3335 }
3336
3337 pub fn normalize(text: &mut String) {
3338 if let Cow::Owned(replaced) = LINE_SEPARATORS_REGEX.replace_all(text, "\n") {
3339 *text = replaced;
3340 }
3341 }
3342
3343 pub fn normalize_arc(text: Arc<str>) -> Arc<str> {
3344 if let Cow::Owned(replaced) = LINE_SEPARATORS_REGEX.replace_all(&text, "\n") {
3345 replaced.into()
3346 } else {
3347 text
3348 }
3349 }
3350
3351 pub fn normalize_cow(text: Cow<str>) -> Cow<str> {
3352 if let Cow::Owned(replaced) = LINE_SEPARATORS_REGEX.replace_all(&text, "\n") {
3353 replaced.into()
3354 } else {
3355 text
3356 }
3357 }
3358}
3359
3360pub fn chunks_with_line_ending(rope: &Rope, line_ending: LineEnding) -> impl Iterator<Item = &str> {
3361 rope.chunks().flat_map(move |chunk| {
3362 let mut newline = false;
3363 let end_with_newline = chunk.ends_with('\n').then_some(line_ending.as_str());
3364 chunk
3365 .lines()
3366 .flat_map(move |line| {
3367 let ending = if newline {
3368 Some(line_ending.as_str())
3369 } else {
3370 None
3371 };
3372 newline = true;
3373 ending.into_iter().chain([line])
3374 })
3375 .chain(end_with_newline)
3376 })
3377}
3378
3379#[cfg(debug_assertions)]
3380pub mod debug {
3381 use super::*;
3382 use parking_lot::Mutex;
3383 use std::any::TypeId;
3384 use std::hash::{Hash, Hasher};
3385
3386 static GLOBAL_DEBUG_RANGES: Mutex<Option<GlobalDebugRanges>> = Mutex::new(None);
3387
3388 pub struct GlobalDebugRanges {
3389 pub ranges: Vec<DebugRange>,
3390 key_to_occurrence_index: HashMap<Key, usize>,
3391 next_occurrence_index: usize,
3392 }
3393
3394 pub struct DebugRange {
3395 key: Key,
3396 pub ranges: Vec<Range<Anchor>>,
3397 pub value: Arc<str>,
3398 pub occurrence_index: usize,
3399 }
3400
3401 #[derive(Debug, Clone, PartialEq, Eq, Hash)]
3402 struct Key {
3403 type_id: TypeId,
3404 hash: u64,
3405 }
3406
3407 impl GlobalDebugRanges {
3408 pub fn with_locked<R>(f: impl FnOnce(&mut Self) -> R) -> R {
3409 let mut state = GLOBAL_DEBUG_RANGES.lock();
3410 if state.is_none() {
3411 *state = Some(GlobalDebugRanges {
3412 ranges: Vec::new(),
3413 key_to_occurrence_index: HashMap::default(),
3414 next_occurrence_index: 0,
3415 });
3416 }
3417 if let Some(global_debug_ranges) = state.as_mut() {
3418 f(global_debug_ranges)
3419 } else {
3420 unreachable!()
3421 }
3422 }
3423
3424 pub fn insert<K: Hash + 'static>(
3425 &mut self,
3426 key: &K,
3427 ranges: Vec<Range<Anchor>>,
3428 value: Arc<str>,
3429 ) {
3430 let occurrence_index = *self
3431 .key_to_occurrence_index
3432 .entry(Key::new(key))
3433 .or_insert_with(|| {
3434 let occurrence_index = self.next_occurrence_index;
3435 self.next_occurrence_index += 1;
3436 occurrence_index
3437 });
3438 let key = Key::new(key);
3439 let existing = self
3440 .ranges
3441 .iter()
3442 .enumerate()
3443 .rfind(|(_, existing)| existing.key == key);
3444 if let Some((existing_ix, _)) = existing {
3445 self.ranges.remove(existing_ix);
3446 }
3447 self.ranges.push(DebugRange {
3448 ranges,
3449 key,
3450 value,
3451 occurrence_index,
3452 });
3453 }
3454
3455 pub fn remove<K: Hash + 'static>(&mut self, key: &K) {
3456 self.remove_impl(&Key::new(key));
3457 }
3458
3459 fn remove_impl(&mut self, key: &Key) {
3460 let existing = self
3461 .ranges
3462 .iter()
3463 .enumerate()
3464 .rfind(|(_, existing)| &existing.key == key);
3465 if let Some((existing_ix, _)) = existing {
3466 self.ranges.remove(existing_ix);
3467 }
3468 }
3469
3470 pub fn remove_all_with_key_type<K: 'static>(&mut self) {
3471 self.ranges
3472 .retain(|item| item.key.type_id != TypeId::of::<K>());
3473 }
3474 }
3475
3476 impl Key {
3477 fn new<K: Hash + 'static>(key: &K) -> Self {
3478 let type_id = TypeId::of::<K>();
3479 let mut hasher = collections::FxHasher::default();
3480 key.hash(&mut hasher);
3481 Key {
3482 type_id,
3483 hash: hasher.finish(),
3484 }
3485 }
3486 }
3487
3488 pub trait ToDebugRanges {
3489 fn to_debug_ranges(&self, snapshot: &BufferSnapshot) -> Vec<Range<usize>>;
3490 }
3491
3492 impl<T: ToOffset> ToDebugRanges for T {
3493 fn to_debug_ranges(&self, snapshot: &BufferSnapshot) -> Vec<Range<usize>> {
3494 [self.to_offset(snapshot)].to_debug_ranges(snapshot)
3495 }
3496 }
3497
3498 impl<T: ToOffset + Clone> ToDebugRanges for Range<T> {
3499 fn to_debug_ranges(&self, snapshot: &BufferSnapshot) -> Vec<Range<usize>> {
3500 [self.clone()].to_debug_ranges(snapshot)
3501 }
3502 }
3503
3504 impl<T: ToOffset> ToDebugRanges for Vec<T> {
3505 fn to_debug_ranges(&self, snapshot: &BufferSnapshot) -> Vec<Range<usize>> {
3506 self.as_slice().to_debug_ranges(snapshot)
3507 }
3508 }
3509
3510 impl<T: ToOffset> ToDebugRanges for Vec<Range<T>> {
3511 fn to_debug_ranges(&self, snapshot: &BufferSnapshot) -> Vec<Range<usize>> {
3512 self.as_slice().to_debug_ranges(snapshot)
3513 }
3514 }
3515
3516 impl<T: ToOffset> ToDebugRanges for [T] {
3517 fn to_debug_ranges(&self, snapshot: &BufferSnapshot) -> Vec<Range<usize>> {
3518 self.iter()
3519 .map(|item| {
3520 let offset = item.to_offset(snapshot);
3521 offset..offset
3522 })
3523 .collect()
3524 }
3525 }
3526
3527 impl<T: ToOffset> ToDebugRanges for [Range<T>] {
3528 fn to_debug_ranges(&self, snapshot: &BufferSnapshot) -> Vec<Range<usize>> {
3529 self.iter()
3530 .map(|range| range.start.to_offset(snapshot)..range.end.to_offset(snapshot))
3531 .collect()
3532 }
3533 }
3534}