1mod anchor;
2pub mod locator;
3#[cfg(any(test, feature = "test-support"))]
4pub mod network;
5pub mod operation_queue;
6mod patch;
7mod selection;
8pub mod subscription;
9#[cfg(test)]
10mod tests;
11mod undo_map;
12
13pub use anchor::*;
14use anyhow::{Context as _, Result};
15use clock::Lamport;
16pub use clock::ReplicaId;
17use collections::{HashMap, HashSet};
18use locator::Locator;
19use operation_queue::OperationQueue;
20pub use patch::Patch;
21use postage::{oneshot, prelude::*};
22
23use regex::Regex;
24pub use rope::*;
25pub use selection::*;
26use std::{
27 borrow::Cow,
28 cmp::{self, Ordering, Reverse},
29 fmt::Display,
30 future::Future,
31 iter::Iterator,
32 num::NonZeroU64,
33 ops::{self, Deref, Range, Sub},
34 str,
35 sync::{Arc, LazyLock},
36 time::{Duration, Instant},
37};
38pub use subscription::*;
39pub use sum_tree::Bias;
40use sum_tree::{Dimensions, FilterCursor, SumTree, TreeMap, TreeSet};
41use undo_map::UndoMap;
42
43#[cfg(any(test, feature = "test-support"))]
44use util::RandomCharIter;
45
46static LINE_SEPARATORS_REGEX: LazyLock<Regex> =
47 LazyLock::new(|| Regex::new(r"\r\n|\r").expect("Failed to create LINE_SEPARATORS_REGEX"));
48
49pub type TransactionId = clock::Lamport;
50
51pub struct Buffer {
52 snapshot: BufferSnapshot,
53 history: History,
54 deferred_ops: OperationQueue<Operation>,
55 deferred_replicas: HashSet<ReplicaId>,
56 pub lamport_clock: clock::Lamport,
57 subscriptions: Topic<usize>,
58 edit_id_resolvers: HashMap<clock::Lamport, Vec<oneshot::Sender<()>>>,
59 wait_for_version_txs: Vec<(clock::Global, oneshot::Sender<()>)>,
60}
61
62#[repr(transparent)]
63#[derive(Clone, Copy, Debug, Hash, PartialEq, PartialOrd, Ord, Eq)]
64pub struct BufferId(NonZeroU64);
65
66impl Display for BufferId {
67 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
68 write!(f, "{}", self.0)
69 }
70}
71
72impl From<NonZeroU64> for BufferId {
73 fn from(id: NonZeroU64) -> Self {
74 BufferId(id)
75 }
76}
77
78impl BufferId {
79 /// Returns Err if `id` is outside of BufferId domain.
80 pub fn new(id: u64) -> anyhow::Result<Self> {
81 let id = NonZeroU64::new(id).context("Buffer id cannot be 0.")?;
82 Ok(Self(id))
83 }
84
85 /// Increments this buffer id, returning the old value.
86 /// So that's a post-increment operator in disguise.
87 pub fn next(&mut self) -> Self {
88 let old = *self;
89 self.0 = self.0.saturating_add(1);
90 old
91 }
92
93 pub fn to_proto(self) -> u64 {
94 self.into()
95 }
96}
97
98impl From<BufferId> for u64 {
99 fn from(id: BufferId) -> Self {
100 id.0.get()
101 }
102}
103
104#[derive(Clone)]
105pub struct BufferSnapshot {
106 replica_id: ReplicaId,
107 remote_id: BufferId,
108 visible_text: Rope,
109 deleted_text: Rope,
110 line_ending: LineEnding,
111 undo_map: UndoMap,
112 fragments: SumTree<Fragment>,
113 insertions: SumTree<InsertionFragment>,
114 insertion_slices: TreeSet<InsertionSlice>,
115 pub version: clock::Global,
116}
117
118#[derive(Clone, Debug)]
119pub struct HistoryEntry {
120 transaction: Transaction,
121 first_edit_at: Instant,
122 last_edit_at: Instant,
123 suppress_grouping: bool,
124}
125
126#[derive(Clone, Debug)]
127pub struct Transaction {
128 pub id: TransactionId,
129 pub edit_ids: Vec<clock::Lamport>,
130 pub start: clock::Global,
131}
132
133impl Transaction {
134 pub fn merge_in(&mut self, other: Transaction) {
135 self.edit_ids.extend(other.edit_ids);
136 }
137}
138
139impl HistoryEntry {
140 pub fn transaction_id(&self) -> TransactionId {
141 self.transaction.id
142 }
143}
144
145struct History {
146 base_text: Rope,
147 operations: TreeMap<clock::Lamport, Operation>,
148 undo_stack: Vec<HistoryEntry>,
149 redo_stack: Vec<HistoryEntry>,
150 transaction_depth: usize,
151 group_interval: Duration,
152}
153
154#[derive(Clone, Debug, Eq, PartialEq)]
155struct InsertionSlice {
156 edit_id: clock::Lamport,
157 insertion_id: clock::Lamport,
158 range: Range<usize>,
159}
160
161impl Ord for InsertionSlice {
162 fn cmp(&self, other: &Self) -> Ordering {
163 self.edit_id
164 .cmp(&other.edit_id)
165 .then_with(|| self.insertion_id.cmp(&other.insertion_id))
166 .then_with(|| self.range.start.cmp(&other.range.start))
167 .then_with(|| self.range.end.cmp(&other.range.end))
168 }
169}
170
171impl PartialOrd for InsertionSlice {
172 fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
173 Some(self.cmp(other))
174 }
175}
176
177impl InsertionSlice {
178 fn from_fragment(edit_id: clock::Lamport, fragment: &Fragment) -> Self {
179 Self {
180 edit_id,
181 insertion_id: fragment.timestamp,
182 range: fragment.insertion_offset..fragment.insertion_offset + fragment.len,
183 }
184 }
185}
186
187impl History {
188 pub fn new(base_text: Rope) -> Self {
189 Self {
190 base_text,
191 operations: Default::default(),
192 undo_stack: Vec::new(),
193 redo_stack: Vec::new(),
194 transaction_depth: 0,
195 // Don't group transactions in tests unless we opt in, because it's a footgun.
196 #[cfg(any(test, feature = "test-support"))]
197 group_interval: Duration::ZERO,
198 #[cfg(not(any(test, feature = "test-support")))]
199 group_interval: Duration::from_millis(300),
200 }
201 }
202
203 fn push(&mut self, op: Operation) {
204 self.operations.insert(op.timestamp(), op);
205 }
206
207 fn start_transaction(
208 &mut self,
209 start: clock::Global,
210 now: Instant,
211 clock: &mut clock::Lamport,
212 ) -> Option<TransactionId> {
213 self.transaction_depth += 1;
214 if self.transaction_depth == 1 {
215 let id = clock.tick();
216 self.undo_stack.push(HistoryEntry {
217 transaction: Transaction {
218 id,
219 start,
220 edit_ids: Default::default(),
221 },
222 first_edit_at: now,
223 last_edit_at: now,
224 suppress_grouping: false,
225 });
226 Some(id)
227 } else {
228 None
229 }
230 }
231
232 fn end_transaction(&mut self, now: Instant) -> Option<&HistoryEntry> {
233 assert_ne!(self.transaction_depth, 0);
234 self.transaction_depth -= 1;
235 if self.transaction_depth == 0 {
236 if self
237 .undo_stack
238 .last()
239 .unwrap()
240 .transaction
241 .edit_ids
242 .is_empty()
243 {
244 self.undo_stack.pop();
245 None
246 } else {
247 self.redo_stack.clear();
248 let entry = self.undo_stack.last_mut().unwrap();
249 entry.last_edit_at = now;
250 Some(entry)
251 }
252 } else {
253 None
254 }
255 }
256
257 fn group(&mut self) -> Option<TransactionId> {
258 let mut count = 0;
259 let mut entries = self.undo_stack.iter();
260 if let Some(mut entry) = entries.next_back() {
261 while let Some(prev_entry) = entries.next_back() {
262 if !prev_entry.suppress_grouping
263 && entry.first_edit_at - prev_entry.last_edit_at < self.group_interval
264 {
265 entry = prev_entry;
266 count += 1;
267 } else {
268 break;
269 }
270 }
271 }
272 self.group_trailing(count)
273 }
274
275 fn group_until(&mut self, transaction_id: TransactionId) {
276 let mut count = 0;
277 for entry in self.undo_stack.iter().rev() {
278 if entry.transaction_id() == transaction_id {
279 self.group_trailing(count);
280 break;
281 } else if entry.suppress_grouping {
282 break;
283 } else {
284 count += 1;
285 }
286 }
287 }
288
289 fn group_trailing(&mut self, n: usize) -> Option<TransactionId> {
290 let new_len = self.undo_stack.len() - n;
291 let (entries_to_keep, entries_to_merge) = self.undo_stack.split_at_mut(new_len);
292 if let Some(last_entry) = entries_to_keep.last_mut() {
293 for entry in &*entries_to_merge {
294 for edit_id in &entry.transaction.edit_ids {
295 last_entry.transaction.edit_ids.push(*edit_id);
296 }
297 }
298
299 if let Some(entry) = entries_to_merge.last_mut() {
300 last_entry.last_edit_at = entry.last_edit_at;
301 }
302 }
303
304 self.undo_stack.truncate(new_len);
305 self.undo_stack.last().map(|e| e.transaction.id)
306 }
307
308 fn finalize_last_transaction(&mut self) -> Option<&Transaction> {
309 self.undo_stack.last_mut().map(|entry| {
310 entry.suppress_grouping = true;
311 &entry.transaction
312 })
313 }
314
315 fn push_transaction(&mut self, transaction: Transaction, now: Instant) {
316 assert_eq!(self.transaction_depth, 0);
317 self.undo_stack.push(HistoryEntry {
318 transaction,
319 first_edit_at: now,
320 last_edit_at: now,
321 suppress_grouping: false,
322 });
323 }
324
325 /// Differs from `push_transaction` in that it does not clear the redo
326 /// stack. Intended to be used to create a parent transaction to merge
327 /// potential child transactions into.
328 ///
329 /// The caller is responsible for removing it from the undo history using
330 /// `forget_transaction` if no edits are merged into it. Otherwise, if edits
331 /// are merged into this transaction, the caller is responsible for ensuring
332 /// the redo stack is cleared. The easiest way to ensure the redo stack is
333 /// cleared is to create transactions with the usual `start_transaction` and
334 /// `end_transaction` methods and merging the resulting transactions into
335 /// the transaction created by this method
336 fn push_empty_transaction(
337 &mut self,
338 start: clock::Global,
339 now: Instant,
340 clock: &mut clock::Lamport,
341 ) -> TransactionId {
342 assert_eq!(self.transaction_depth, 0);
343 let id = clock.tick();
344 let transaction = Transaction {
345 id,
346 start,
347 edit_ids: Vec::new(),
348 };
349 self.undo_stack.push(HistoryEntry {
350 transaction,
351 first_edit_at: now,
352 last_edit_at: now,
353 suppress_grouping: false,
354 });
355 id
356 }
357
358 fn push_undo(&mut self, op_id: clock::Lamport) {
359 assert_ne!(self.transaction_depth, 0);
360 if let Some(Operation::Edit(_)) = self.operations.get(&op_id) {
361 let last_transaction = self.undo_stack.last_mut().unwrap();
362 last_transaction.transaction.edit_ids.push(op_id);
363 }
364 }
365
366 fn pop_undo(&mut self) -> Option<&HistoryEntry> {
367 assert_eq!(self.transaction_depth, 0);
368 if let Some(entry) = self.undo_stack.pop() {
369 self.redo_stack.push(entry);
370 self.redo_stack.last()
371 } else {
372 None
373 }
374 }
375
376 fn remove_from_undo(&mut self, transaction_id: TransactionId) -> Option<&HistoryEntry> {
377 assert_eq!(self.transaction_depth, 0);
378
379 let entry_ix = self
380 .undo_stack
381 .iter()
382 .rposition(|entry| entry.transaction.id == transaction_id)?;
383 let entry = self.undo_stack.remove(entry_ix);
384 self.redo_stack.push(entry);
385 self.redo_stack.last()
386 }
387
388 fn remove_from_undo_until(&mut self, transaction_id: TransactionId) -> &[HistoryEntry] {
389 assert_eq!(self.transaction_depth, 0);
390
391 let redo_stack_start_len = self.redo_stack.len();
392 if let Some(entry_ix) = self
393 .undo_stack
394 .iter()
395 .rposition(|entry| entry.transaction.id == transaction_id)
396 {
397 self.redo_stack
398 .extend(self.undo_stack.drain(entry_ix..).rev());
399 }
400 &self.redo_stack[redo_stack_start_len..]
401 }
402
403 fn forget(&mut self, transaction_id: TransactionId) -> Option<Transaction> {
404 assert_eq!(self.transaction_depth, 0);
405 if let Some(entry_ix) = self
406 .undo_stack
407 .iter()
408 .rposition(|entry| entry.transaction.id == transaction_id)
409 {
410 Some(self.undo_stack.remove(entry_ix).transaction)
411 } else if let Some(entry_ix) = self
412 .redo_stack
413 .iter()
414 .rposition(|entry| entry.transaction.id == transaction_id)
415 {
416 Some(self.redo_stack.remove(entry_ix).transaction)
417 } else {
418 None
419 }
420 }
421
422 fn transaction(&self, transaction_id: TransactionId) -> Option<&Transaction> {
423 let entry = self
424 .undo_stack
425 .iter()
426 .rfind(|entry| entry.transaction.id == transaction_id)
427 .or_else(|| {
428 self.redo_stack
429 .iter()
430 .rfind(|entry| entry.transaction.id == transaction_id)
431 })?;
432 Some(&entry.transaction)
433 }
434
435 fn transaction_mut(&mut self, transaction_id: TransactionId) -> Option<&mut Transaction> {
436 let entry = self
437 .undo_stack
438 .iter_mut()
439 .rfind(|entry| entry.transaction.id == transaction_id)
440 .or_else(|| {
441 self.redo_stack
442 .iter_mut()
443 .rfind(|entry| entry.transaction.id == transaction_id)
444 })?;
445 Some(&mut entry.transaction)
446 }
447
448 fn merge_transactions(&mut self, transaction: TransactionId, destination: TransactionId) {
449 if let Some(transaction) = self.forget(transaction)
450 && let Some(destination) = self.transaction_mut(destination)
451 {
452 destination.edit_ids.extend(transaction.edit_ids);
453 }
454 }
455
456 fn pop_redo(&mut self) -> Option<&HistoryEntry> {
457 assert_eq!(self.transaction_depth, 0);
458 if let Some(entry) = self.redo_stack.pop() {
459 self.undo_stack.push(entry);
460 self.undo_stack.last()
461 } else {
462 None
463 }
464 }
465
466 fn remove_from_redo(&mut self, transaction_id: TransactionId) -> &[HistoryEntry] {
467 assert_eq!(self.transaction_depth, 0);
468
469 let undo_stack_start_len = self.undo_stack.len();
470 if let Some(entry_ix) = self
471 .redo_stack
472 .iter()
473 .rposition(|entry| entry.transaction.id == transaction_id)
474 {
475 self.undo_stack
476 .extend(self.redo_stack.drain(entry_ix..).rev());
477 }
478 &self.undo_stack[undo_stack_start_len..]
479 }
480}
481
482struct Edits<'a, D: TextDimension, F: FnMut(&FragmentSummary) -> bool> {
483 visible_cursor: rope::Cursor<'a>,
484 deleted_cursor: rope::Cursor<'a>,
485 fragments_cursor: Option<FilterCursor<'a, 'static, F, Fragment, FragmentTextSummary>>,
486 undos: &'a UndoMap,
487 since: &'a clock::Global,
488 old_end: D,
489 new_end: D,
490 range: Range<(&'a Locator, usize)>,
491 buffer_id: BufferId,
492}
493
494#[derive(Clone, Debug, Default, Eq, PartialEq)]
495pub struct Edit<D> {
496 pub old: Range<D>,
497 pub new: Range<D>,
498}
499impl<D> Edit<D>
500where
501 D: PartialEq,
502{
503 pub fn is_empty(&self) -> bool {
504 self.old.start == self.old.end && self.new.start == self.new.end
505 }
506}
507
508impl<D, DDelta> Edit<D>
509where
510 D: Sub<D, Output = DDelta> + Copy,
511{
512 pub fn old_len(&self) -> DDelta {
513 self.old.end - self.old.start
514 }
515
516 pub fn new_len(&self) -> DDelta {
517 self.new.end - self.new.start
518 }
519}
520
521impl<D1, D2> Edit<(D1, D2)> {
522 pub fn flatten(self) -> (Edit<D1>, Edit<D2>) {
523 (
524 Edit {
525 old: self.old.start.0..self.old.end.0,
526 new: self.new.start.0..self.new.end.0,
527 },
528 Edit {
529 old: self.old.start.1..self.old.end.1,
530 new: self.new.start.1..self.new.end.1,
531 },
532 )
533 }
534}
535
536#[derive(Eq, PartialEq, Clone, Debug)]
537pub struct Fragment {
538 pub id: Locator,
539 pub timestamp: clock::Lamport,
540 pub insertion_offset: usize,
541 pub len: usize,
542 pub visible: bool,
543 pub deletions: HashSet<clock::Lamport>,
544 pub max_undos: clock::Global,
545}
546
547#[derive(Eq, PartialEq, Clone, Debug)]
548pub struct FragmentSummary {
549 text: FragmentTextSummary,
550 max_id: Locator,
551 max_version: clock::Global,
552 min_insertion_version: clock::Global,
553 max_insertion_version: clock::Global,
554}
555
556#[derive(Copy, Default, Clone, Debug, PartialEq, Eq)]
557struct FragmentTextSummary {
558 visible: usize,
559 deleted: usize,
560}
561
562impl<'a> sum_tree::Dimension<'a, FragmentSummary> for FragmentTextSummary {
563 fn zero(_: &Option<clock::Global>) -> Self {
564 Default::default()
565 }
566
567 fn add_summary(&mut self, summary: &'a FragmentSummary, _: &Option<clock::Global>) {
568 self.visible += summary.text.visible;
569 self.deleted += summary.text.deleted;
570 }
571}
572
573#[derive(Eq, PartialEq, Clone, Debug)]
574struct InsertionFragment {
575 timestamp: clock::Lamport,
576 split_offset: usize,
577 fragment_id: Locator,
578}
579
580#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord)]
581struct InsertionFragmentKey {
582 timestamp: clock::Lamport,
583 split_offset: usize,
584}
585
586#[derive(Clone, Debug, Eq, PartialEq)]
587pub enum Operation {
588 Edit(EditOperation),
589 Undo(UndoOperation),
590}
591
592#[derive(Clone, Debug, Eq, PartialEq)]
593pub struct EditOperation {
594 pub timestamp: clock::Lamport,
595 pub version: clock::Global,
596 pub ranges: Vec<Range<FullOffset>>,
597 pub new_text: Vec<Arc<str>>,
598}
599
600#[derive(Clone, Debug, Eq, PartialEq)]
601pub struct UndoOperation {
602 pub timestamp: clock::Lamport,
603 pub version: clock::Global,
604 pub counts: HashMap<clock::Lamport, u32>,
605}
606
607/// Stores information about the indentation of a line (tabs and spaces).
608#[derive(Clone, Copy, Debug, Eq, PartialEq)]
609pub struct LineIndent {
610 pub tabs: u32,
611 pub spaces: u32,
612 pub line_blank: bool,
613}
614
615impl LineIndent {
616 pub fn from_chunks(chunks: &mut Chunks) -> Self {
617 let mut tabs = 0;
618 let mut spaces = 0;
619 let mut line_blank = true;
620
621 'outer: while let Some(chunk) = chunks.peek() {
622 for ch in chunk.chars() {
623 if ch == '\t' {
624 tabs += 1;
625 } else if ch == ' ' {
626 spaces += 1;
627 } else {
628 if ch != '\n' {
629 line_blank = false;
630 }
631 break 'outer;
632 }
633 }
634
635 chunks.next();
636 }
637
638 Self {
639 tabs,
640 spaces,
641 line_blank,
642 }
643 }
644
645 /// Constructs a new `LineIndent` which only contains spaces.
646 pub fn spaces(spaces: u32) -> Self {
647 Self {
648 tabs: 0,
649 spaces,
650 line_blank: true,
651 }
652 }
653
654 /// Constructs a new `LineIndent` which only contains tabs.
655 pub fn tabs(tabs: u32) -> Self {
656 Self {
657 tabs,
658 spaces: 0,
659 line_blank: true,
660 }
661 }
662
663 /// Indicates whether the line is empty.
664 pub fn is_line_empty(&self) -> bool {
665 self.tabs == 0 && self.spaces == 0 && self.line_blank
666 }
667
668 /// Indicates whether the line is blank (contains only whitespace).
669 pub fn is_line_blank(&self) -> bool {
670 self.line_blank
671 }
672
673 /// Returns the number of indentation characters (tabs or spaces).
674 pub fn raw_len(&self) -> u32 {
675 self.tabs + self.spaces
676 }
677
678 /// Returns the number of indentation characters (tabs or spaces), taking tab size into account.
679 pub fn len(&self, tab_size: u32) -> u32 {
680 self.tabs * tab_size + self.spaces
681 }
682}
683
684impl From<&str> for LineIndent {
685 fn from(value: &str) -> Self {
686 Self::from_iter(value.chars())
687 }
688}
689
690impl FromIterator<char> for LineIndent {
691 fn from_iter<T: IntoIterator<Item = char>>(chars: T) -> Self {
692 let mut tabs = 0;
693 let mut spaces = 0;
694 let mut line_blank = true;
695 for c in chars {
696 if c == '\t' {
697 tabs += 1;
698 } else if c == ' ' {
699 spaces += 1;
700 } else {
701 if c != '\n' {
702 line_blank = false;
703 }
704 break;
705 }
706 }
707 Self {
708 tabs,
709 spaces,
710 line_blank,
711 }
712 }
713}
714
715impl Buffer {
716 pub fn new(replica_id: ReplicaId, remote_id: BufferId, base_text: impl Into<String>) -> Buffer {
717 let mut base_text = base_text.into();
718 let line_ending = LineEnding::detect(&base_text);
719 LineEnding::normalize(&mut base_text);
720 Self::new_normalized(replica_id, remote_id, line_ending, Rope::from(&*base_text))
721 }
722
723 pub fn new_normalized(
724 replica_id: ReplicaId,
725 remote_id: BufferId,
726 line_ending: LineEnding,
727 normalized: Rope,
728 ) -> Buffer {
729 let history = History::new(normalized);
730 let mut fragments = SumTree::new(&None);
731 let mut insertions = SumTree::default();
732
733 let mut lamport_clock = clock::Lamport::new(replica_id);
734 let mut version = clock::Global::new();
735
736 let visible_text = history.base_text.clone();
737 if !visible_text.is_empty() {
738 let insertion_timestamp = clock::Lamport::new(ReplicaId::LOCAL);
739 lamport_clock.observe(insertion_timestamp);
740 version.observe(insertion_timestamp);
741 let fragment_id = Locator::between(&Locator::min(), &Locator::max());
742 let fragment = Fragment {
743 id: fragment_id,
744 timestamp: insertion_timestamp,
745 insertion_offset: 0,
746 len: visible_text.len(),
747 visible: true,
748 deletions: Default::default(),
749 max_undos: Default::default(),
750 };
751 insertions.push(InsertionFragment::new(&fragment), ());
752 fragments.push(fragment, &None);
753 }
754
755 Buffer {
756 snapshot: BufferSnapshot {
757 replica_id,
758 remote_id,
759 visible_text,
760 deleted_text: Rope::new(),
761 line_ending,
762 fragments,
763 insertions,
764 version,
765 undo_map: Default::default(),
766 insertion_slices: Default::default(),
767 },
768 history,
769 deferred_ops: OperationQueue::new(),
770 deferred_replicas: HashSet::default(),
771 lamport_clock,
772 subscriptions: Default::default(),
773 edit_id_resolvers: Default::default(),
774 wait_for_version_txs: Default::default(),
775 }
776 }
777
778 pub fn version(&self) -> clock::Global {
779 self.version.clone()
780 }
781
782 pub fn snapshot(&self) -> BufferSnapshot {
783 self.snapshot.clone()
784 }
785
786 pub fn branch(&self) -> Self {
787 Self {
788 snapshot: self.snapshot.clone(),
789 history: History::new(self.base_text().clone()),
790 deferred_ops: OperationQueue::new(),
791 deferred_replicas: HashSet::default(),
792 lamport_clock: clock::Lamport::new(ReplicaId::LOCAL_BRANCH),
793 subscriptions: Default::default(),
794 edit_id_resolvers: Default::default(),
795 wait_for_version_txs: Default::default(),
796 }
797 }
798
799 pub fn replica_id(&self) -> ReplicaId {
800 self.lamport_clock.replica_id
801 }
802
803 pub fn remote_id(&self) -> BufferId {
804 self.remote_id
805 }
806
807 pub fn deferred_ops_len(&self) -> usize {
808 self.deferred_ops.len()
809 }
810
811 pub fn transaction_group_interval(&self) -> Duration {
812 self.history.group_interval
813 }
814
815 pub fn edit<R, I, S, T>(&mut self, edits: R) -> Operation
816 where
817 R: IntoIterator<IntoIter = I>,
818 I: ExactSizeIterator<Item = (Range<S>, T)>,
819 S: ToOffset,
820 T: Into<Arc<str>>,
821 {
822 let edits = edits
823 .into_iter()
824 .map(|(range, new_text)| (range, new_text.into()));
825
826 self.start_transaction();
827 let timestamp = self.lamport_clock.tick();
828 let operation = Operation::Edit(self.apply_local_edit(edits, timestamp));
829
830 self.history.push(operation.clone());
831 self.history.push_undo(operation.timestamp());
832 self.snapshot.version.observe(operation.timestamp());
833 self.end_transaction();
834 operation
835 }
836
837 fn apply_local_edit<S: ToOffset, T: Into<Arc<str>>>(
838 &mut self,
839 edits: impl ExactSizeIterator<Item = (Range<S>, T)>,
840 timestamp: clock::Lamport,
841 ) -> EditOperation {
842 let mut edits_patch = Patch::default();
843 let mut edit_op = EditOperation {
844 timestamp,
845 version: self.version(),
846 ranges: Vec::with_capacity(edits.len()),
847 new_text: Vec::with_capacity(edits.len()),
848 };
849 let mut new_insertions = Vec::new();
850 let mut insertion_offset = 0;
851 let mut insertion_slices = Vec::new();
852
853 let mut edits = edits
854 .map(|(range, new_text)| (range.to_offset(&*self), new_text))
855 .peekable();
856
857 let mut new_ropes =
858 RopeBuilder::new(self.visible_text.cursor(0), self.deleted_text.cursor(0));
859 let mut old_fragments = self.fragments.cursor::<FragmentTextSummary>(&None);
860 let mut new_fragments = old_fragments.slice(&edits.peek().unwrap().0.start, Bias::Right);
861 new_ropes.append(new_fragments.summary().text);
862
863 let mut fragment_start = old_fragments.start().visible;
864 for (range, new_text) in edits {
865 let new_text = LineEnding::normalize_arc(new_text.into());
866 let fragment_end = old_fragments.end().visible;
867
868 // If the current fragment ends before this range, then jump ahead to the first fragment
869 // that extends past the start of this range, reusing any intervening fragments.
870 if fragment_end < range.start {
871 // If the current fragment has been partially consumed, then consume the rest of it
872 // and advance to the next fragment before slicing.
873 if fragment_start > old_fragments.start().visible {
874 if fragment_end > fragment_start {
875 let mut suffix = old_fragments.item().unwrap().clone();
876 suffix.len = fragment_end - fragment_start;
877 suffix.insertion_offset += fragment_start - old_fragments.start().visible;
878 new_insertions.push(InsertionFragment::insert_new(&suffix));
879 new_ropes.push_fragment(&suffix, suffix.visible);
880 new_fragments.push(suffix, &None);
881 }
882 old_fragments.next();
883 }
884
885 let slice = old_fragments.slice(&range.start, Bias::Right);
886 new_ropes.append(slice.summary().text);
887 new_fragments.append(slice, &None);
888 fragment_start = old_fragments.start().visible;
889 }
890
891 let full_range_start = FullOffset(range.start + old_fragments.start().deleted);
892
893 // Preserve any portion of the current fragment that precedes this range.
894 if fragment_start < range.start {
895 let mut prefix = old_fragments.item().unwrap().clone();
896 prefix.len = range.start - fragment_start;
897 prefix.insertion_offset += fragment_start - old_fragments.start().visible;
898 prefix.id = Locator::between(&new_fragments.summary().max_id, &prefix.id);
899 new_insertions.push(InsertionFragment::insert_new(&prefix));
900 new_ropes.push_fragment(&prefix, prefix.visible);
901 new_fragments.push(prefix, &None);
902 fragment_start = range.start;
903 }
904
905 // Insert the new text before any existing fragments within the range.
906 if !new_text.is_empty() {
907 let new_start = new_fragments.summary().text.visible;
908
909 let fragment = Fragment {
910 id: Locator::between(
911 &new_fragments.summary().max_id,
912 old_fragments
913 .item()
914 .map_or(&Locator::max(), |old_fragment| &old_fragment.id),
915 ),
916 timestamp,
917 insertion_offset,
918 len: new_text.len(),
919 deletions: Default::default(),
920 max_undos: Default::default(),
921 visible: true,
922 };
923 edits_patch.push(Edit {
924 old: fragment_start..fragment_start,
925 new: new_start..new_start + new_text.len(),
926 });
927 insertion_slices.push(InsertionSlice::from_fragment(timestamp, &fragment));
928 new_insertions.push(InsertionFragment::insert_new(&fragment));
929 new_ropes.push_str(new_text.as_ref());
930 new_fragments.push(fragment, &None);
931 insertion_offset += new_text.len();
932 }
933
934 // Advance through every fragment that intersects this range, marking the intersecting
935 // portions as deleted.
936 while fragment_start < range.end {
937 let fragment = old_fragments.item().unwrap();
938 let fragment_end = old_fragments.end().visible;
939 let mut intersection = fragment.clone();
940 let intersection_end = cmp::min(range.end, fragment_end);
941 if fragment.visible {
942 intersection.len = intersection_end - fragment_start;
943 intersection.insertion_offset += fragment_start - old_fragments.start().visible;
944 intersection.id =
945 Locator::between(&new_fragments.summary().max_id, &intersection.id);
946 intersection.deletions.insert(timestamp);
947 intersection.visible = false;
948 }
949 if intersection.len > 0 {
950 if fragment.visible && !intersection.visible {
951 let new_start = new_fragments.summary().text.visible;
952 edits_patch.push(Edit {
953 old: fragment_start..intersection_end,
954 new: new_start..new_start,
955 });
956 insertion_slices
957 .push(InsertionSlice::from_fragment(timestamp, &intersection));
958 }
959 new_insertions.push(InsertionFragment::insert_new(&intersection));
960 new_ropes.push_fragment(&intersection, fragment.visible);
961 new_fragments.push(intersection, &None);
962 fragment_start = intersection_end;
963 }
964 if fragment_end <= range.end {
965 old_fragments.next();
966 }
967 }
968
969 let full_range_end = FullOffset(range.end + old_fragments.start().deleted);
970 edit_op.ranges.push(full_range_start..full_range_end);
971 edit_op.new_text.push(new_text);
972 }
973
974 // If the current fragment has been partially consumed, then consume the rest of it
975 // and advance to the next fragment before slicing.
976 if fragment_start > old_fragments.start().visible {
977 let fragment_end = old_fragments.end().visible;
978 if fragment_end > fragment_start {
979 let mut suffix = old_fragments.item().unwrap().clone();
980 suffix.len = fragment_end - fragment_start;
981 suffix.insertion_offset += fragment_start - old_fragments.start().visible;
982 new_insertions.push(InsertionFragment::insert_new(&suffix));
983 new_ropes.push_fragment(&suffix, suffix.visible);
984 new_fragments.push(suffix, &None);
985 }
986 old_fragments.next();
987 }
988
989 let suffix = old_fragments.suffix();
990 new_ropes.append(suffix.summary().text);
991 new_fragments.append(suffix, &None);
992 let (visible_text, deleted_text) = new_ropes.finish();
993 drop(old_fragments);
994
995 self.snapshot.fragments = new_fragments;
996 self.snapshot.insertions.edit(new_insertions, ());
997 self.snapshot.visible_text = visible_text;
998 self.snapshot.deleted_text = deleted_text;
999 self.subscriptions.publish_mut(&edits_patch);
1000 self.snapshot.insertion_slices.extend(insertion_slices);
1001 edit_op
1002 }
1003
1004 pub fn set_line_ending(&mut self, line_ending: LineEnding) {
1005 self.snapshot.line_ending = line_ending;
1006 }
1007
1008 pub fn apply_ops<I: IntoIterator<Item = Operation>>(&mut self, ops: I) {
1009 let mut deferred_ops = Vec::new();
1010 for op in ops {
1011 self.history.push(op.clone());
1012 if self.can_apply_op(&op) {
1013 self.apply_op(op);
1014 } else {
1015 self.deferred_replicas.insert(op.replica_id());
1016 deferred_ops.push(op);
1017 }
1018 }
1019 self.deferred_ops.insert(deferred_ops);
1020 self.flush_deferred_ops();
1021 }
1022
1023 fn apply_op(&mut self, op: Operation) {
1024 match op {
1025 Operation::Edit(edit) => {
1026 if !self.version.observed(edit.timestamp) {
1027 self.apply_remote_edit(
1028 &edit.version,
1029 &edit.ranges,
1030 &edit.new_text,
1031 edit.timestamp,
1032 );
1033 self.snapshot.version.observe(edit.timestamp);
1034 self.lamport_clock.observe(edit.timestamp);
1035 self.resolve_edit(edit.timestamp);
1036 }
1037 }
1038 Operation::Undo(undo) => {
1039 if !self.version.observed(undo.timestamp) {
1040 self.apply_undo(&undo);
1041 self.snapshot.version.observe(undo.timestamp);
1042 self.lamport_clock.observe(undo.timestamp);
1043 }
1044 }
1045 }
1046 self.wait_for_version_txs.retain_mut(|(version, tx)| {
1047 if self.snapshot.version().observed_all(version) {
1048 tx.try_send(()).ok();
1049 false
1050 } else {
1051 true
1052 }
1053 });
1054 }
1055
1056 fn apply_remote_edit(
1057 &mut self,
1058 version: &clock::Global,
1059 ranges: &[Range<FullOffset>],
1060 new_text: &[Arc<str>],
1061 timestamp: clock::Lamport,
1062 ) {
1063 if ranges.is_empty() {
1064 return;
1065 }
1066
1067 let edits = ranges.iter().zip(new_text.iter());
1068 let mut edits_patch = Patch::default();
1069 let mut insertion_slices = Vec::new();
1070 let cx = Some(version.clone());
1071 let mut new_insertions = Vec::new();
1072 let mut insertion_offset = 0;
1073 let mut new_ropes =
1074 RopeBuilder::new(self.visible_text.cursor(0), self.deleted_text.cursor(0));
1075 let mut old_fragments = self
1076 .fragments
1077 .cursor::<Dimensions<VersionedFullOffset, usize>>(&cx);
1078 let mut new_fragments =
1079 old_fragments.slice(&VersionedFullOffset::Offset(ranges[0].start), Bias::Left);
1080 new_ropes.append(new_fragments.summary().text);
1081
1082 let mut fragment_start = old_fragments.start().0.full_offset();
1083 for (range, new_text) in edits {
1084 let fragment_end = old_fragments.end().0.full_offset();
1085
1086 // If the current fragment ends before this range, then jump ahead to the first fragment
1087 // that extends past the start of this range, reusing any intervening fragments.
1088 if fragment_end < range.start {
1089 // If the current fragment has been partially consumed, then consume the rest of it
1090 // and advance to the next fragment before slicing.
1091 if fragment_start > old_fragments.start().0.full_offset() {
1092 if fragment_end > fragment_start {
1093 let mut suffix = old_fragments.item().unwrap().clone();
1094 suffix.len = fragment_end.0 - fragment_start.0;
1095 suffix.insertion_offset +=
1096 fragment_start - old_fragments.start().0.full_offset();
1097 new_insertions.push(InsertionFragment::insert_new(&suffix));
1098 new_ropes.push_fragment(&suffix, suffix.visible);
1099 new_fragments.push(suffix, &None);
1100 }
1101 old_fragments.next();
1102 }
1103
1104 let slice =
1105 old_fragments.slice(&VersionedFullOffset::Offset(range.start), Bias::Left);
1106 new_ropes.append(slice.summary().text);
1107 new_fragments.append(slice, &None);
1108 fragment_start = old_fragments.start().0.full_offset();
1109 }
1110
1111 // If we are at the end of a non-concurrent fragment, advance to the next one.
1112 let fragment_end = old_fragments.end().0.full_offset();
1113 if fragment_end == range.start && fragment_end > fragment_start {
1114 let mut fragment = old_fragments.item().unwrap().clone();
1115 fragment.len = fragment_end.0 - fragment_start.0;
1116 fragment.insertion_offset += fragment_start - old_fragments.start().0.full_offset();
1117 new_insertions.push(InsertionFragment::insert_new(&fragment));
1118 new_ropes.push_fragment(&fragment, fragment.visible);
1119 new_fragments.push(fragment, &None);
1120 old_fragments.next();
1121 fragment_start = old_fragments.start().0.full_offset();
1122 }
1123
1124 // Skip over insertions that are concurrent to this edit, but have a lower lamport
1125 // timestamp.
1126 while let Some(fragment) = old_fragments.item() {
1127 if fragment_start == range.start && fragment.timestamp > timestamp {
1128 new_ropes.push_fragment(fragment, fragment.visible);
1129 new_fragments.push(fragment.clone(), &None);
1130 old_fragments.next();
1131 debug_assert_eq!(fragment_start, range.start);
1132 } else {
1133 break;
1134 }
1135 }
1136 debug_assert!(fragment_start <= range.start);
1137
1138 // Preserve any portion of the current fragment that precedes this range.
1139 if fragment_start < range.start {
1140 let mut prefix = old_fragments.item().unwrap().clone();
1141 prefix.len = range.start.0 - fragment_start.0;
1142 prefix.insertion_offset += fragment_start - old_fragments.start().0.full_offset();
1143 prefix.id = Locator::between(&new_fragments.summary().max_id, &prefix.id);
1144 new_insertions.push(InsertionFragment::insert_new(&prefix));
1145 fragment_start = range.start;
1146 new_ropes.push_fragment(&prefix, prefix.visible);
1147 new_fragments.push(prefix, &None);
1148 }
1149
1150 // Insert the new text before any existing fragments within the range.
1151 if !new_text.is_empty() {
1152 let mut old_start = old_fragments.start().1;
1153 if old_fragments.item().is_some_and(|f| f.visible) {
1154 old_start += fragment_start.0 - old_fragments.start().0.full_offset().0;
1155 }
1156 let new_start = new_fragments.summary().text.visible;
1157 let fragment = Fragment {
1158 id: Locator::between(
1159 &new_fragments.summary().max_id,
1160 old_fragments
1161 .item()
1162 .map_or(&Locator::max(), |old_fragment| &old_fragment.id),
1163 ),
1164 timestamp,
1165 insertion_offset,
1166 len: new_text.len(),
1167 deletions: Default::default(),
1168 max_undos: Default::default(),
1169 visible: true,
1170 };
1171 edits_patch.push(Edit {
1172 old: old_start..old_start,
1173 new: new_start..new_start + new_text.len(),
1174 });
1175 insertion_slices.push(InsertionSlice::from_fragment(timestamp, &fragment));
1176 new_insertions.push(InsertionFragment::insert_new(&fragment));
1177 new_ropes.push_str(new_text);
1178 new_fragments.push(fragment, &None);
1179 insertion_offset += new_text.len();
1180 }
1181
1182 // Advance through every fragment that intersects this range, marking the intersecting
1183 // portions as deleted.
1184 while fragment_start < range.end {
1185 let fragment = old_fragments.item().unwrap();
1186 let fragment_end = old_fragments.end().0.full_offset();
1187 let mut intersection = fragment.clone();
1188 let intersection_end = cmp::min(range.end, fragment_end);
1189 if fragment.was_visible(version, &self.undo_map) {
1190 intersection.len = intersection_end.0 - fragment_start.0;
1191 intersection.insertion_offset +=
1192 fragment_start - old_fragments.start().0.full_offset();
1193 intersection.id =
1194 Locator::between(&new_fragments.summary().max_id, &intersection.id);
1195 intersection.deletions.insert(timestamp);
1196 intersection.visible = false;
1197 insertion_slices.push(InsertionSlice::from_fragment(timestamp, &intersection));
1198 }
1199 if intersection.len > 0 {
1200 if fragment.visible && !intersection.visible {
1201 let old_start = old_fragments.start().1
1202 + (fragment_start.0 - old_fragments.start().0.full_offset().0);
1203 let new_start = new_fragments.summary().text.visible;
1204 edits_patch.push(Edit {
1205 old: old_start..old_start + intersection.len,
1206 new: new_start..new_start,
1207 });
1208 }
1209 new_insertions.push(InsertionFragment::insert_new(&intersection));
1210 new_ropes.push_fragment(&intersection, fragment.visible);
1211 new_fragments.push(intersection, &None);
1212 fragment_start = intersection_end;
1213 }
1214 if fragment_end <= range.end {
1215 old_fragments.next();
1216 }
1217 }
1218 }
1219
1220 // If the current fragment has been partially consumed, then consume the rest of it
1221 // and advance to the next fragment before slicing.
1222 if fragment_start > old_fragments.start().0.full_offset() {
1223 let fragment_end = old_fragments.end().0.full_offset();
1224 if fragment_end > fragment_start {
1225 let mut suffix = old_fragments.item().unwrap().clone();
1226 suffix.len = fragment_end.0 - fragment_start.0;
1227 suffix.insertion_offset += fragment_start - old_fragments.start().0.full_offset();
1228 new_insertions.push(InsertionFragment::insert_new(&suffix));
1229 new_ropes.push_fragment(&suffix, suffix.visible);
1230 new_fragments.push(suffix, &None);
1231 }
1232 old_fragments.next();
1233 }
1234
1235 let suffix = old_fragments.suffix();
1236 new_ropes.append(suffix.summary().text);
1237 new_fragments.append(suffix, &None);
1238 let (visible_text, deleted_text) = new_ropes.finish();
1239 drop(old_fragments);
1240
1241 self.snapshot.fragments = new_fragments;
1242 self.snapshot.visible_text = visible_text;
1243 self.snapshot.deleted_text = deleted_text;
1244 self.snapshot.insertions.edit(new_insertions, ());
1245 self.snapshot.insertion_slices.extend(insertion_slices);
1246 self.subscriptions.publish_mut(&edits_patch)
1247 }
1248
1249 fn fragment_ids_for_edits<'a>(
1250 &'a self,
1251 edit_ids: impl Iterator<Item = &'a clock::Lamport>,
1252 ) -> Vec<&'a Locator> {
1253 // Get all of the insertion slices changed by the given edits.
1254 let mut insertion_slices = Vec::new();
1255 for edit_id in edit_ids {
1256 let insertion_slice = InsertionSlice {
1257 edit_id: *edit_id,
1258 insertion_id: clock::Lamport::MIN,
1259 range: 0..0,
1260 };
1261 let slices = self
1262 .snapshot
1263 .insertion_slices
1264 .iter_from(&insertion_slice)
1265 .take_while(|slice| slice.edit_id == *edit_id);
1266 insertion_slices.extend(slices)
1267 }
1268 insertion_slices
1269 .sort_unstable_by_key(|s| (s.insertion_id, s.range.start, Reverse(s.range.end)));
1270
1271 // Get all of the fragments corresponding to these insertion slices.
1272 let mut fragment_ids = Vec::new();
1273 let mut insertions_cursor = self.insertions.cursor::<InsertionFragmentKey>(());
1274 for insertion_slice in &insertion_slices {
1275 if insertion_slice.insertion_id != insertions_cursor.start().timestamp
1276 || insertion_slice.range.start > insertions_cursor.start().split_offset
1277 {
1278 insertions_cursor.seek_forward(
1279 &InsertionFragmentKey {
1280 timestamp: insertion_slice.insertion_id,
1281 split_offset: insertion_slice.range.start,
1282 },
1283 Bias::Left,
1284 );
1285 }
1286 while let Some(item) = insertions_cursor.item() {
1287 if item.timestamp != insertion_slice.insertion_id
1288 || item.split_offset >= insertion_slice.range.end
1289 {
1290 break;
1291 }
1292 fragment_ids.push(&item.fragment_id);
1293 insertions_cursor.next();
1294 }
1295 }
1296 fragment_ids.sort_unstable();
1297 fragment_ids
1298 }
1299
1300 fn apply_undo(&mut self, undo: &UndoOperation) {
1301 self.snapshot.undo_map.insert(undo);
1302
1303 let mut edits = Patch::default();
1304 let mut old_fragments = self
1305 .fragments
1306 .cursor::<Dimensions<Option<&Locator>, usize>>(&None);
1307 let mut new_fragments = SumTree::new(&None);
1308 let mut new_ropes =
1309 RopeBuilder::new(self.visible_text.cursor(0), self.deleted_text.cursor(0));
1310
1311 for fragment_id in self.fragment_ids_for_edits(undo.counts.keys()) {
1312 let preceding_fragments = old_fragments.slice(&Some(fragment_id), Bias::Left);
1313 new_ropes.append(preceding_fragments.summary().text);
1314 new_fragments.append(preceding_fragments, &None);
1315
1316 if let Some(fragment) = old_fragments.item() {
1317 let mut fragment = fragment.clone();
1318 let fragment_was_visible = fragment.visible;
1319
1320 fragment.visible = fragment.is_visible(&self.undo_map);
1321 fragment.max_undos.observe(undo.timestamp);
1322
1323 let old_start = old_fragments.start().1;
1324 let new_start = new_fragments.summary().text.visible;
1325 if fragment_was_visible && !fragment.visible {
1326 edits.push(Edit {
1327 old: old_start..old_start + fragment.len,
1328 new: new_start..new_start,
1329 });
1330 } else if !fragment_was_visible && fragment.visible {
1331 edits.push(Edit {
1332 old: old_start..old_start,
1333 new: new_start..new_start + fragment.len,
1334 });
1335 }
1336 new_ropes.push_fragment(&fragment, fragment_was_visible);
1337 new_fragments.push(fragment, &None);
1338
1339 old_fragments.next();
1340 }
1341 }
1342
1343 let suffix = old_fragments.suffix();
1344 new_ropes.append(suffix.summary().text);
1345 new_fragments.append(suffix, &None);
1346
1347 drop(old_fragments);
1348 let (visible_text, deleted_text) = new_ropes.finish();
1349 self.snapshot.fragments = new_fragments;
1350 self.snapshot.visible_text = visible_text;
1351 self.snapshot.deleted_text = deleted_text;
1352 self.subscriptions.publish_mut(&edits);
1353 }
1354
1355 fn flush_deferred_ops(&mut self) {
1356 self.deferred_replicas.clear();
1357 let mut deferred_ops = Vec::new();
1358 for op in self.deferred_ops.drain().iter().cloned() {
1359 if self.can_apply_op(&op) {
1360 self.apply_op(op);
1361 } else {
1362 self.deferred_replicas.insert(op.replica_id());
1363 deferred_ops.push(op);
1364 }
1365 }
1366 self.deferred_ops.insert(deferred_ops);
1367 }
1368
1369 fn can_apply_op(&self, op: &Operation) -> bool {
1370 if self.deferred_replicas.contains(&op.replica_id()) {
1371 false
1372 } else {
1373 self.version.observed_all(match op {
1374 Operation::Edit(edit) => &edit.version,
1375 Operation::Undo(undo) => &undo.version,
1376 })
1377 }
1378 }
1379
1380 pub fn has_deferred_ops(&self) -> bool {
1381 !self.deferred_ops.is_empty()
1382 }
1383
1384 pub fn peek_undo_stack(&self) -> Option<&HistoryEntry> {
1385 self.history.undo_stack.last()
1386 }
1387
1388 pub fn peek_redo_stack(&self) -> Option<&HistoryEntry> {
1389 self.history.redo_stack.last()
1390 }
1391
1392 pub fn start_transaction(&mut self) -> Option<TransactionId> {
1393 self.start_transaction_at(Instant::now())
1394 }
1395
1396 pub fn start_transaction_at(&mut self, now: Instant) -> Option<TransactionId> {
1397 self.history
1398 .start_transaction(self.version.clone(), now, &mut self.lamport_clock)
1399 }
1400
1401 pub fn end_transaction(&mut self) -> Option<(TransactionId, clock::Global)> {
1402 self.end_transaction_at(Instant::now())
1403 }
1404
1405 pub fn end_transaction_at(&mut self, now: Instant) -> Option<(TransactionId, clock::Global)> {
1406 if let Some(entry) = self.history.end_transaction(now) {
1407 let since = entry.transaction.start.clone();
1408 let id = self.history.group().unwrap();
1409 Some((id, since))
1410 } else {
1411 None
1412 }
1413 }
1414
1415 pub fn finalize_last_transaction(&mut self) -> Option<&Transaction> {
1416 self.history.finalize_last_transaction()
1417 }
1418
1419 pub fn group_until_transaction(&mut self, transaction_id: TransactionId) {
1420 self.history.group_until(transaction_id);
1421 }
1422
1423 pub fn base_text(&self) -> &Rope {
1424 &self.history.base_text
1425 }
1426
1427 pub fn operations(&self) -> &TreeMap<clock::Lamport, Operation> {
1428 &self.history.operations
1429 }
1430
1431 pub fn undo(&mut self) -> Option<(TransactionId, Operation)> {
1432 if let Some(entry) = self.history.pop_undo() {
1433 let transaction = entry.transaction.clone();
1434 let transaction_id = transaction.id;
1435 let op = self.undo_or_redo(transaction);
1436 Some((transaction_id, op))
1437 } else {
1438 None
1439 }
1440 }
1441
1442 pub fn undo_transaction(&mut self, transaction_id: TransactionId) -> Option<Operation> {
1443 let transaction = self
1444 .history
1445 .remove_from_undo(transaction_id)?
1446 .transaction
1447 .clone();
1448 Some(self.undo_or_redo(transaction))
1449 }
1450
1451 pub fn undo_to_transaction(&mut self, transaction_id: TransactionId) -> Vec<Operation> {
1452 let transactions = self
1453 .history
1454 .remove_from_undo_until(transaction_id)
1455 .iter()
1456 .map(|entry| entry.transaction.clone())
1457 .collect::<Vec<_>>();
1458
1459 transactions
1460 .into_iter()
1461 .map(|transaction| self.undo_or_redo(transaction))
1462 .collect()
1463 }
1464
1465 pub fn forget_transaction(&mut self, transaction_id: TransactionId) -> Option<Transaction> {
1466 self.history.forget(transaction_id)
1467 }
1468
1469 pub fn get_transaction(&self, transaction_id: TransactionId) -> Option<&Transaction> {
1470 self.history.transaction(transaction_id)
1471 }
1472
1473 pub fn merge_transactions(&mut self, transaction: TransactionId, destination: TransactionId) {
1474 self.history.merge_transactions(transaction, destination);
1475 }
1476
1477 pub fn redo(&mut self) -> Option<(TransactionId, Operation)> {
1478 if let Some(entry) = self.history.pop_redo() {
1479 let transaction = entry.transaction.clone();
1480 let transaction_id = transaction.id;
1481 let op = self.undo_or_redo(transaction);
1482 Some((transaction_id, op))
1483 } else {
1484 None
1485 }
1486 }
1487
1488 pub fn redo_to_transaction(&mut self, transaction_id: TransactionId) -> Vec<Operation> {
1489 let transactions = self
1490 .history
1491 .remove_from_redo(transaction_id)
1492 .iter()
1493 .map(|entry| entry.transaction.clone())
1494 .collect::<Vec<_>>();
1495
1496 transactions
1497 .into_iter()
1498 .map(|transaction| self.undo_or_redo(transaction))
1499 .collect()
1500 }
1501
1502 fn undo_or_redo(&mut self, transaction: Transaction) -> Operation {
1503 let mut counts = HashMap::default();
1504 for edit_id in transaction.edit_ids {
1505 counts.insert(edit_id, self.undo_map.undo_count(edit_id).saturating_add(1));
1506 }
1507
1508 let operation = self.undo_operations(counts);
1509 self.history.push(operation.clone());
1510 operation
1511 }
1512
1513 pub fn undo_operations(&mut self, counts: HashMap<clock::Lamport, u32>) -> Operation {
1514 let timestamp = self.lamport_clock.tick();
1515 let version = self.version();
1516 self.snapshot.version.observe(timestamp);
1517 let undo = UndoOperation {
1518 timestamp,
1519 version,
1520 counts,
1521 };
1522 self.apply_undo(&undo);
1523 Operation::Undo(undo)
1524 }
1525
1526 pub fn push_transaction(&mut self, transaction: Transaction, now: Instant) {
1527 self.history.push_transaction(transaction, now);
1528 }
1529
1530 /// Differs from `push_transaction` in that it does not clear the redo stack.
1531 /// The caller responsible for
1532 /// Differs from `push_transaction` in that it does not clear the redo
1533 /// stack. Intended to be used to create a parent transaction to merge
1534 /// potential child transactions into.
1535 ///
1536 /// The caller is responsible for removing it from the undo history using
1537 /// `forget_transaction` if no edits are merged into it. Otherwise, if edits
1538 /// are merged into this transaction, the caller is responsible for ensuring
1539 /// the redo stack is cleared. The easiest way to ensure the redo stack is
1540 /// cleared is to create transactions with the usual `start_transaction` and
1541 /// `end_transaction` methods and merging the resulting transactions into
1542 /// the transaction created by this method
1543 pub fn push_empty_transaction(&mut self, now: Instant) -> TransactionId {
1544 self.history
1545 .push_empty_transaction(self.version.clone(), now, &mut self.lamport_clock)
1546 }
1547
1548 pub fn edited_ranges_for_transaction_id<D>(
1549 &self,
1550 transaction_id: TransactionId,
1551 ) -> impl '_ + Iterator<Item = Range<D>>
1552 where
1553 D: TextDimension,
1554 {
1555 self.history
1556 .transaction(transaction_id)
1557 .into_iter()
1558 .flat_map(|transaction| self.edited_ranges_for_transaction(transaction))
1559 }
1560
1561 pub fn edited_ranges_for_edit_ids<'a, D>(
1562 &'a self,
1563 edit_ids: impl IntoIterator<Item = &'a clock::Lamport>,
1564 ) -> impl 'a + Iterator<Item = Range<D>>
1565 where
1566 D: TextDimension,
1567 {
1568 // get fragment ranges
1569 let mut cursor = self
1570 .fragments
1571 .cursor::<Dimensions<Option<&Locator>, usize>>(&None);
1572 let offset_ranges = self
1573 .fragment_ids_for_edits(edit_ids.into_iter())
1574 .into_iter()
1575 .filter_map(move |fragment_id| {
1576 cursor.seek_forward(&Some(fragment_id), Bias::Left);
1577 let fragment = cursor.item()?;
1578 let start_offset = cursor.start().1;
1579 let end_offset = start_offset + if fragment.visible { fragment.len } else { 0 };
1580 Some(start_offset..end_offset)
1581 });
1582
1583 // combine adjacent ranges
1584 let mut prev_range: Option<Range<usize>> = None;
1585 let disjoint_ranges = offset_ranges
1586 .map(Some)
1587 .chain([None])
1588 .filter_map(move |range| {
1589 if let Some((range, prev_range)) = range.as_ref().zip(prev_range.as_mut())
1590 && prev_range.end == range.start
1591 {
1592 prev_range.end = range.end;
1593 return None;
1594 }
1595 let result = prev_range.clone();
1596 prev_range = range;
1597 result
1598 });
1599
1600 // convert to the desired text dimension.
1601 let mut position = D::zero(());
1602 let mut rope_cursor = self.visible_text.cursor(0);
1603 disjoint_ranges.map(move |range| {
1604 position.add_assign(&rope_cursor.summary(range.start));
1605 let start = position;
1606 position.add_assign(&rope_cursor.summary(range.end));
1607 let end = position;
1608 start..end
1609 })
1610 }
1611
1612 pub fn edited_ranges_for_transaction<'a, D>(
1613 &'a self,
1614 transaction: &'a Transaction,
1615 ) -> impl 'a + Iterator<Item = Range<D>>
1616 where
1617 D: TextDimension,
1618 {
1619 self.edited_ranges_for_edit_ids(&transaction.edit_ids)
1620 }
1621
1622 pub fn subscribe(&mut self) -> Subscription<usize> {
1623 self.subscriptions.subscribe()
1624 }
1625
1626 pub fn wait_for_edits<It: IntoIterator<Item = clock::Lamport>>(
1627 &mut self,
1628 edit_ids: It,
1629 ) -> impl 'static + Future<Output = Result<()>> + use<It> {
1630 let mut futures = Vec::new();
1631 for edit_id in edit_ids {
1632 if !self.version.observed(edit_id) {
1633 let (tx, rx) = oneshot::channel();
1634 self.edit_id_resolvers.entry(edit_id).or_default().push(tx);
1635 futures.push(rx);
1636 }
1637 }
1638
1639 async move {
1640 for mut future in futures {
1641 if future.recv().await.is_none() {
1642 anyhow::bail!("gave up waiting for edits");
1643 }
1644 }
1645 Ok(())
1646 }
1647 }
1648
1649 pub fn wait_for_anchors<It: IntoIterator<Item = Anchor>>(
1650 &mut self,
1651 anchors: It,
1652 ) -> impl 'static + Future<Output = Result<()>> + use<It> {
1653 let mut futures = Vec::new();
1654 for anchor in anchors {
1655 if !self.version.observed(anchor.timestamp)
1656 && anchor != Anchor::MAX
1657 && anchor != Anchor::MIN
1658 {
1659 let (tx, rx) = oneshot::channel();
1660 self.edit_id_resolvers
1661 .entry(anchor.timestamp)
1662 .or_default()
1663 .push(tx);
1664 futures.push(rx);
1665 }
1666 }
1667
1668 async move {
1669 for mut future in futures {
1670 if future.recv().await.is_none() {
1671 anyhow::bail!("gave up waiting for anchors");
1672 }
1673 }
1674 Ok(())
1675 }
1676 }
1677
1678 pub fn wait_for_version(
1679 &mut self,
1680 version: clock::Global,
1681 ) -> impl Future<Output = Result<()>> + use<> {
1682 let mut rx = None;
1683 if !self.snapshot.version.observed_all(&version) {
1684 let channel = oneshot::channel();
1685 self.wait_for_version_txs.push((version, channel.0));
1686 rx = Some(channel.1);
1687 }
1688 async move {
1689 if let Some(mut rx) = rx
1690 && rx.recv().await.is_none()
1691 {
1692 anyhow::bail!("gave up waiting for version");
1693 }
1694 Ok(())
1695 }
1696 }
1697
1698 pub fn give_up_waiting(&mut self) {
1699 self.edit_id_resolvers.clear();
1700 self.wait_for_version_txs.clear();
1701 }
1702
1703 fn resolve_edit(&mut self, edit_id: clock::Lamport) {
1704 for mut tx in self
1705 .edit_id_resolvers
1706 .remove(&edit_id)
1707 .into_iter()
1708 .flatten()
1709 {
1710 tx.try_send(()).ok();
1711 }
1712 }
1713}
1714
1715#[cfg(any(test, feature = "test-support"))]
1716impl Buffer {
1717 #[track_caller]
1718 pub fn edit_via_marked_text(&mut self, marked_string: &str) {
1719 let edits = self.edits_for_marked_text(marked_string);
1720 self.edit(edits);
1721 }
1722
1723 #[track_caller]
1724 pub fn edits_for_marked_text(&self, marked_string: &str) -> Vec<(Range<usize>, String)> {
1725 let old_text = self.text();
1726 let (new_text, mut ranges) = util::test::marked_text_ranges(marked_string, false);
1727 if ranges.is_empty() {
1728 ranges.push(0..new_text.len());
1729 }
1730
1731 assert_eq!(
1732 old_text[..ranges[0].start],
1733 new_text[..ranges[0].start],
1734 "invalid edit"
1735 );
1736
1737 let mut delta = 0;
1738 let mut edits = Vec::new();
1739 let mut ranges = ranges.into_iter().peekable();
1740
1741 while let Some(inserted_range) = ranges.next() {
1742 let new_start = inserted_range.start;
1743 let old_start = (new_start as isize - delta) as usize;
1744
1745 let following_text = if let Some(next_range) = ranges.peek() {
1746 &new_text[inserted_range.end..next_range.start]
1747 } else {
1748 &new_text[inserted_range.end..]
1749 };
1750
1751 let inserted_len = inserted_range.len();
1752 let deleted_len = old_text[old_start..]
1753 .find(following_text)
1754 .expect("invalid edit");
1755
1756 let old_range = old_start..old_start + deleted_len;
1757 edits.push((old_range, new_text[inserted_range].to_string()));
1758 delta += inserted_len as isize - deleted_len as isize;
1759 }
1760
1761 assert_eq!(
1762 old_text.len() as isize + delta,
1763 new_text.len() as isize,
1764 "invalid edit"
1765 );
1766
1767 edits
1768 }
1769
1770 pub fn check_invariants(&self) {
1771 // Ensure every fragment is ordered by locator in the fragment tree and corresponds
1772 // to an insertion fragment in the insertions tree.
1773 let mut prev_fragment_id = Locator::min();
1774 for fragment in self.snapshot.fragments.items(&None) {
1775 assert!(fragment.id > prev_fragment_id);
1776 prev_fragment_id = fragment.id.clone();
1777
1778 let insertion_fragment = self
1779 .snapshot
1780 .insertions
1781 .get(
1782 &InsertionFragmentKey {
1783 timestamp: fragment.timestamp,
1784 split_offset: fragment.insertion_offset,
1785 },
1786 (),
1787 )
1788 .unwrap();
1789 assert_eq!(
1790 insertion_fragment.fragment_id, fragment.id,
1791 "fragment: {:?}\ninsertion: {:?}",
1792 fragment, insertion_fragment
1793 );
1794 }
1795
1796 let mut cursor = self.snapshot.fragments.cursor::<Option<&Locator>>(&None);
1797 for insertion_fragment in self.snapshot.insertions.cursor::<()>(()) {
1798 cursor.seek(&Some(&insertion_fragment.fragment_id), Bias::Left);
1799 let fragment = cursor.item().unwrap();
1800 assert_eq!(insertion_fragment.fragment_id, fragment.id);
1801 assert_eq!(insertion_fragment.split_offset, fragment.insertion_offset);
1802 }
1803
1804 let fragment_summary = self.snapshot.fragments.summary();
1805 assert_eq!(
1806 fragment_summary.text.visible,
1807 self.snapshot.visible_text.len()
1808 );
1809 assert_eq!(
1810 fragment_summary.text.deleted,
1811 self.snapshot.deleted_text.len()
1812 );
1813
1814 assert!(!self.text().contains("\r\n"));
1815 }
1816
1817 pub fn set_group_interval(&mut self, group_interval: Duration) {
1818 self.history.group_interval = group_interval;
1819 }
1820
1821 pub fn random_byte_range(&self, start_offset: usize, rng: &mut impl rand::Rng) -> Range<usize> {
1822 let end = self.clip_offset(rng.random_range(start_offset..=self.len()), Bias::Right);
1823 let start = self.clip_offset(rng.random_range(start_offset..=end), Bias::Right);
1824 start..end
1825 }
1826
1827 pub fn get_random_edits<T>(
1828 &self,
1829 rng: &mut T,
1830 edit_count: usize,
1831 ) -> Vec<(Range<usize>, Arc<str>)>
1832 where
1833 T: rand::Rng,
1834 {
1835 let mut edits: Vec<(Range<usize>, Arc<str>)> = Vec::new();
1836 let mut last_end = None;
1837 for _ in 0..edit_count {
1838 if last_end.is_some_and(|last_end| last_end >= self.len()) {
1839 break;
1840 }
1841 let new_start = last_end.map_or(0, |last_end| last_end + 1);
1842 let range = self.random_byte_range(new_start, rng);
1843 last_end = Some(range.end);
1844
1845 let new_text_len = rng.random_range(0..10);
1846 let new_text: String = RandomCharIter::new(&mut *rng).take(new_text_len).collect();
1847
1848 edits.push((range, new_text.into()));
1849 }
1850 edits
1851 }
1852
1853 pub fn randomly_edit<T>(
1854 &mut self,
1855 rng: &mut T,
1856 edit_count: usize,
1857 ) -> (Vec<(Range<usize>, Arc<str>)>, Operation)
1858 where
1859 T: rand::Rng,
1860 {
1861 let mut edits = self.get_random_edits(rng, edit_count);
1862 log::info!("mutating buffer {:?} with {:?}", self.replica_id, edits);
1863
1864 let op = self.edit(edits.iter().cloned());
1865 if let Operation::Edit(edit) = &op {
1866 assert_eq!(edits.len(), edit.new_text.len());
1867 for (edit, new_text) in edits.iter_mut().zip(&edit.new_text) {
1868 edit.1 = new_text.clone();
1869 }
1870 } else {
1871 unreachable!()
1872 }
1873
1874 (edits, op)
1875 }
1876
1877 pub fn randomly_undo_redo(&mut self, rng: &mut impl rand::Rng) -> Vec<Operation> {
1878 use rand::prelude::*;
1879
1880 let mut ops = Vec::new();
1881 for _ in 0..rng.random_range(1..=5) {
1882 if let Some(entry) = self.history.undo_stack.choose(rng) {
1883 let transaction = entry.transaction.clone();
1884 log::info!(
1885 "undoing buffer {:?} transaction {:?}",
1886 self.replica_id,
1887 transaction
1888 );
1889 ops.push(self.undo_or_redo(transaction));
1890 }
1891 }
1892 ops
1893 }
1894}
1895
1896impl Deref for Buffer {
1897 type Target = BufferSnapshot;
1898
1899 fn deref(&self) -> &Self::Target {
1900 &self.snapshot
1901 }
1902}
1903
1904impl BufferSnapshot {
1905 pub fn as_rope(&self) -> &Rope {
1906 &self.visible_text
1907 }
1908
1909 pub fn rope_for_version(&self, version: &clock::Global) -> Rope {
1910 let mut rope = Rope::new();
1911
1912 let mut cursor = self
1913 .fragments
1914 .filter::<_, FragmentTextSummary>(&None, move |summary| {
1915 !version.observed_all(&summary.max_version)
1916 });
1917 cursor.next();
1918
1919 let mut visible_cursor = self.visible_text.cursor(0);
1920 let mut deleted_cursor = self.deleted_text.cursor(0);
1921
1922 while let Some(fragment) = cursor.item() {
1923 if cursor.start().visible > visible_cursor.offset() {
1924 let text = visible_cursor.slice(cursor.start().visible);
1925 rope.append(text);
1926 }
1927
1928 if fragment.was_visible(version, &self.undo_map) {
1929 if fragment.visible {
1930 let text = visible_cursor.slice(cursor.end().visible);
1931 rope.append(text);
1932 } else {
1933 deleted_cursor.seek_forward(cursor.start().deleted);
1934 let text = deleted_cursor.slice(cursor.end().deleted);
1935 rope.append(text);
1936 }
1937 } else if fragment.visible {
1938 visible_cursor.seek_forward(cursor.end().visible);
1939 }
1940
1941 cursor.next();
1942 }
1943
1944 if cursor.start().visible > visible_cursor.offset() {
1945 let text = visible_cursor.slice(cursor.start().visible);
1946 rope.append(text);
1947 }
1948
1949 rope
1950 }
1951
1952 pub fn remote_id(&self) -> BufferId {
1953 self.remote_id
1954 }
1955
1956 pub fn replica_id(&self) -> ReplicaId {
1957 self.replica_id
1958 }
1959
1960 pub fn row_count(&self) -> u32 {
1961 self.max_point().row + 1
1962 }
1963
1964 pub fn len(&self) -> usize {
1965 self.visible_text.len()
1966 }
1967
1968 pub fn is_empty(&self) -> bool {
1969 self.len() == 0
1970 }
1971
1972 pub fn chars(&self) -> impl Iterator<Item = char> + '_ {
1973 self.chars_at(0)
1974 }
1975
1976 pub fn chars_for_range<T: ToOffset>(&self, range: Range<T>) -> impl Iterator<Item = char> + '_ {
1977 self.text_for_range(range).flat_map(str::chars)
1978 }
1979
1980 pub fn reversed_chars_for_range<T: ToOffset>(
1981 &self,
1982 range: Range<T>,
1983 ) -> impl Iterator<Item = char> + '_ {
1984 self.reversed_chunks_in_range(range)
1985 .flat_map(|chunk| chunk.chars().rev())
1986 }
1987
1988 pub fn contains_str_at<T>(&self, position: T, needle: &str) -> bool
1989 where
1990 T: ToOffset,
1991 {
1992 let position = position.to_offset(self);
1993 position == self.clip_offset(position, Bias::Left)
1994 && self
1995 .bytes_in_range(position..self.len())
1996 .flatten()
1997 .copied()
1998 .take(needle.len())
1999 .eq(needle.bytes())
2000 }
2001
2002 pub fn common_prefix_at<T>(&self, position: T, needle: &str) -> Range<T>
2003 where
2004 T: ToOffset + TextDimension,
2005 {
2006 let offset = position.to_offset(self);
2007 let common_prefix_len = needle
2008 .char_indices()
2009 .map(|(index, _)| index)
2010 .chain([needle.len()])
2011 .take_while(|&len| len <= offset)
2012 .filter(|&len| {
2013 let left = self
2014 .chars_for_range(offset - len..offset)
2015 .flat_map(char::to_lowercase);
2016 let right = needle[..len].chars().flat_map(char::to_lowercase);
2017 left.eq(right)
2018 })
2019 .last()
2020 .unwrap_or(0);
2021 let start_offset = offset - common_prefix_len;
2022 let start = self.text_summary_for_range(0..start_offset);
2023 start..position
2024 }
2025
2026 pub fn text(&self) -> String {
2027 self.visible_text.to_string()
2028 }
2029
2030 pub fn line_ending(&self) -> LineEnding {
2031 self.line_ending
2032 }
2033
2034 pub fn deleted_text(&self) -> String {
2035 self.deleted_text.to_string()
2036 }
2037
2038 pub fn fragments(&self) -> impl Iterator<Item = &Fragment> {
2039 self.fragments.iter()
2040 }
2041
2042 pub fn text_summary(&self) -> TextSummary {
2043 self.visible_text.summary()
2044 }
2045
2046 pub fn max_point(&self) -> Point {
2047 self.visible_text.max_point()
2048 }
2049
2050 pub fn max_point_utf16(&self) -> PointUtf16 {
2051 self.visible_text.max_point_utf16()
2052 }
2053
2054 pub fn point_to_offset(&self, point: Point) -> usize {
2055 self.visible_text.point_to_offset(point)
2056 }
2057
2058 pub fn point_to_offset_utf16(&self, point: Point) -> OffsetUtf16 {
2059 self.visible_text.point_to_offset_utf16(point)
2060 }
2061
2062 pub fn point_utf16_to_offset_utf16(&self, point: PointUtf16) -> OffsetUtf16 {
2063 self.visible_text.point_utf16_to_offset_utf16(point)
2064 }
2065
2066 pub fn point_utf16_to_offset(&self, point: PointUtf16) -> usize {
2067 self.visible_text.point_utf16_to_offset(point)
2068 }
2069
2070 pub fn unclipped_point_utf16_to_offset(&self, point: Unclipped<PointUtf16>) -> usize {
2071 self.visible_text.unclipped_point_utf16_to_offset(point)
2072 }
2073
2074 pub fn unclipped_point_utf16_to_point(&self, point: Unclipped<PointUtf16>) -> Point {
2075 self.visible_text.unclipped_point_utf16_to_point(point)
2076 }
2077
2078 pub fn offset_utf16_to_offset(&self, offset: OffsetUtf16) -> usize {
2079 self.visible_text.offset_utf16_to_offset(offset)
2080 }
2081
2082 pub fn offset_to_offset_utf16(&self, offset: usize) -> OffsetUtf16 {
2083 self.visible_text.offset_to_offset_utf16(offset)
2084 }
2085
2086 pub fn offset_to_point(&self, offset: usize) -> Point {
2087 self.visible_text.offset_to_point(offset)
2088 }
2089
2090 pub fn offset_to_point_utf16(&self, offset: usize) -> PointUtf16 {
2091 self.visible_text.offset_to_point_utf16(offset)
2092 }
2093
2094 pub fn point_to_point_utf16(&self, point: Point) -> PointUtf16 {
2095 self.visible_text.point_to_point_utf16(point)
2096 }
2097
2098 pub fn point_utf16_to_point(&self, point: PointUtf16) -> Point {
2099 self.visible_text.point_utf16_to_point(point)
2100 }
2101
2102 pub fn version(&self) -> &clock::Global {
2103 &self.version
2104 }
2105
2106 pub fn chars_at<T: ToOffset>(&self, position: T) -> impl Iterator<Item = char> + '_ {
2107 let offset = position.to_offset(self);
2108 self.visible_text.chars_at(offset)
2109 }
2110
2111 pub fn reversed_chars_at<T: ToOffset>(&self, position: T) -> impl Iterator<Item = char> + '_ {
2112 let offset = position.to_offset(self);
2113 self.visible_text.reversed_chars_at(offset)
2114 }
2115
2116 pub fn reversed_chunks_in_range<T: ToOffset>(&self, range: Range<T>) -> rope::Chunks<'_> {
2117 let range = range.start.to_offset(self)..range.end.to_offset(self);
2118 self.visible_text.reversed_chunks_in_range(range)
2119 }
2120
2121 pub fn bytes_in_range<T: ToOffset>(&self, range: Range<T>) -> rope::Bytes<'_> {
2122 let start = range.start.to_offset(self);
2123 let end = range.end.to_offset(self);
2124 self.visible_text.bytes_in_range(start..end)
2125 }
2126
2127 pub fn reversed_bytes_in_range<T: ToOffset>(&self, range: Range<T>) -> rope::Bytes<'_> {
2128 let start = range.start.to_offset(self);
2129 let end = range.end.to_offset(self);
2130 self.visible_text.reversed_bytes_in_range(start..end)
2131 }
2132
2133 pub fn text_for_range<T: ToOffset>(&self, range: Range<T>) -> Chunks<'_> {
2134 let start = range.start.to_offset(self);
2135 let end = range.end.to_offset(self);
2136 self.visible_text.chunks_in_range(start..end)
2137 }
2138
2139 pub fn line_len(&self, row: u32) -> u32 {
2140 let row_start_offset = Point::new(row, 0).to_offset(self);
2141 let row_end_offset = if row >= self.max_point().row {
2142 self.len()
2143 } else {
2144 Point::new(row + 1, 0).to_previous_offset(self)
2145 };
2146 (row_end_offset - row_start_offset) as u32
2147 }
2148
2149 pub fn line_indents_in_row_range(
2150 &self,
2151 row_range: Range<u32>,
2152 ) -> impl Iterator<Item = (u32, LineIndent)> + '_ {
2153 let start = Point::new(row_range.start, 0).to_offset(self);
2154 let end = Point::new(row_range.end, self.line_len(row_range.end)).to_offset(self);
2155
2156 let mut chunks = self.as_rope().chunks_in_range(start..end);
2157 let mut row = row_range.start;
2158 let mut done = false;
2159 std::iter::from_fn(move || {
2160 if done {
2161 None
2162 } else {
2163 let indent = (row, LineIndent::from_chunks(&mut chunks));
2164 done = !chunks.next_line();
2165 row += 1;
2166 Some(indent)
2167 }
2168 })
2169 }
2170
2171 /// Returns the line indents in the given row range, exclusive of end row, in reversed order.
2172 pub fn reversed_line_indents_in_row_range(
2173 &self,
2174 row_range: Range<u32>,
2175 ) -> impl Iterator<Item = (u32, LineIndent)> + '_ {
2176 let start = Point::new(row_range.start, 0).to_offset(self);
2177
2178 let end_point;
2179 let end;
2180 if row_range.end > row_range.start {
2181 end_point = Point::new(row_range.end - 1, self.line_len(row_range.end - 1));
2182 end = end_point.to_offset(self);
2183 } else {
2184 end_point = Point::new(row_range.start, 0);
2185 end = start;
2186 };
2187
2188 let mut chunks = self.as_rope().chunks_in_range(start..end);
2189 // Move the cursor to the start of the last line if it's not empty.
2190 chunks.seek(end);
2191 if end_point.column > 0 {
2192 chunks.prev_line();
2193 }
2194
2195 let mut row = end_point.row;
2196 let mut done = false;
2197 std::iter::from_fn(move || {
2198 if done {
2199 None
2200 } else {
2201 let initial_offset = chunks.offset();
2202 let indent = (row, LineIndent::from_chunks(&mut chunks));
2203 if chunks.offset() > initial_offset {
2204 chunks.prev_line();
2205 }
2206 done = !chunks.prev_line();
2207 if !done {
2208 row -= 1;
2209 }
2210
2211 Some(indent)
2212 }
2213 })
2214 }
2215
2216 pub fn line_indent_for_row(&self, row: u32) -> LineIndent {
2217 LineIndent::from_iter(self.chars_at(Point::new(row, 0)))
2218 }
2219
2220 pub fn is_line_blank(&self, row: u32) -> bool {
2221 self.text_for_range(Point::new(row, 0)..Point::new(row, self.line_len(row)))
2222 .all(|chunk| chunk.matches(|c: char| !c.is_whitespace()).next().is_none())
2223 }
2224
2225 pub fn text_summary_for_range<D, O: ToOffset>(&self, range: Range<O>) -> D
2226 where
2227 D: TextDimension,
2228 {
2229 self.visible_text
2230 .cursor(range.start.to_offset(self))
2231 .summary(range.end.to_offset(self))
2232 }
2233
2234 pub fn summaries_for_anchors<'a, D, A>(&'a self, anchors: A) -> impl 'a + Iterator<Item = D>
2235 where
2236 D: 'a + TextDimension,
2237 A: 'a + IntoIterator<Item = &'a Anchor>,
2238 {
2239 let anchors = anchors.into_iter();
2240 self.summaries_for_anchors_with_payload::<D, _, ()>(anchors.map(|a| (a, ())))
2241 .map(|d| d.0)
2242 }
2243
2244 pub fn summaries_for_anchors_with_payload<'a, D, A, T>(
2245 &'a self,
2246 anchors: A,
2247 ) -> impl 'a + Iterator<Item = (D, T)>
2248 where
2249 D: 'a + TextDimension,
2250 A: 'a + IntoIterator<Item = (&'a Anchor, T)>,
2251 {
2252 let anchors = anchors.into_iter();
2253 let mut insertion_cursor = self.insertions.cursor::<InsertionFragmentKey>(());
2254 let mut fragment_cursor = self
2255 .fragments
2256 .cursor::<Dimensions<Option<&Locator>, usize>>(&None);
2257 let mut text_cursor = self.visible_text.cursor(0);
2258 let mut position = D::zero(());
2259
2260 anchors.map(move |(anchor, payload)| {
2261 if *anchor == Anchor::MIN {
2262 return (D::zero(()), payload);
2263 } else if *anchor == Anchor::MAX {
2264 return (D::from_text_summary(&self.visible_text.summary()), payload);
2265 }
2266
2267 let anchor_key = InsertionFragmentKey {
2268 timestamp: anchor.timestamp,
2269 split_offset: anchor.offset,
2270 };
2271 insertion_cursor.seek(&anchor_key, anchor.bias);
2272 if let Some(insertion) = insertion_cursor.item() {
2273 let comparison = sum_tree::KeyedItem::key(insertion).cmp(&anchor_key);
2274 if comparison == Ordering::Greater
2275 || (anchor.bias == Bias::Left
2276 && comparison == Ordering::Equal
2277 && anchor.offset > 0)
2278 {
2279 insertion_cursor.prev();
2280 }
2281 } else {
2282 insertion_cursor.prev();
2283 }
2284 let Some(insertion) = insertion_cursor.item() else {
2285 panic!(
2286 "invalid insertion for buffer {}@{:?} with anchor {:?}",
2287 self.remote_id(),
2288 self.version,
2289 anchor
2290 );
2291 };
2292 assert_eq!(
2293 insertion.timestamp,
2294 anchor.timestamp,
2295 "invalid insertion for buffer {}@{:?} and anchor {:?}",
2296 self.remote_id(),
2297 self.version,
2298 anchor
2299 );
2300
2301 fragment_cursor.seek_forward(&Some(&insertion.fragment_id), Bias::Left);
2302 let fragment = fragment_cursor.item().unwrap();
2303 let mut fragment_offset = fragment_cursor.start().1;
2304 if fragment.visible {
2305 fragment_offset += anchor.offset - insertion.split_offset;
2306 }
2307
2308 position.add_assign(&text_cursor.summary(fragment_offset));
2309 (position, payload)
2310 })
2311 }
2312
2313 pub fn summary_for_anchor<D>(&self, anchor: &Anchor) -> D
2314 where
2315 D: TextDimension,
2316 {
2317 self.text_summary_for_range(0..self.offset_for_anchor(anchor))
2318 }
2319
2320 pub fn offset_for_anchor(&self, anchor: &Anchor) -> usize {
2321 if *anchor == Anchor::MIN {
2322 0
2323 } else if *anchor == Anchor::MAX {
2324 self.visible_text.len()
2325 } else {
2326 debug_assert!(anchor.buffer_id == Some(self.remote_id));
2327 debug_assert!(self.version.observed(anchor.timestamp));
2328 let anchor_key = InsertionFragmentKey {
2329 timestamp: anchor.timestamp,
2330 split_offset: anchor.offset,
2331 };
2332 let mut insertion_cursor = self.insertions.cursor::<InsertionFragmentKey>(());
2333 insertion_cursor.seek(&anchor_key, anchor.bias);
2334 if let Some(insertion) = insertion_cursor.item() {
2335 let comparison = sum_tree::KeyedItem::key(insertion).cmp(&anchor_key);
2336 if comparison == Ordering::Greater
2337 || (anchor.bias == Bias::Left
2338 && comparison == Ordering::Equal
2339 && anchor.offset > 0)
2340 {
2341 insertion_cursor.prev();
2342 }
2343 } else {
2344 insertion_cursor.prev();
2345 }
2346
2347 let Some(insertion) = insertion_cursor
2348 .item()
2349 .filter(|insertion| insertion.timestamp == anchor.timestamp)
2350 else {
2351 self.panic_bad_anchor(anchor);
2352 };
2353
2354 let (start, _, item) = self
2355 .fragments
2356 .find::<Dimensions<Option<&Locator>, usize>, _>(
2357 &None,
2358 &Some(&insertion.fragment_id),
2359 Bias::Left,
2360 );
2361 let fragment = item.unwrap();
2362 let mut fragment_offset = start.1;
2363 if fragment.visible {
2364 fragment_offset += anchor.offset - insertion.split_offset;
2365 }
2366 fragment_offset
2367 }
2368 }
2369
2370 #[cold]
2371 fn panic_bad_anchor(&self, anchor: &Anchor) -> ! {
2372 if anchor.buffer_id.is_some_and(|id| id != self.remote_id) {
2373 panic!(
2374 "invalid anchor - buffer id does not match: anchor {anchor:?}; buffer id: {}, version: {:?}",
2375 self.remote_id, self.version
2376 );
2377 } else if !self.version.observed(anchor.timestamp) {
2378 panic!(
2379 "invalid anchor - snapshot has not observed lamport: {:?}; version: {:?}",
2380 anchor, self.version
2381 );
2382 } else {
2383 panic!(
2384 "invalid anchor {:?}. buffer id: {}, version: {:?}",
2385 anchor, self.remote_id, self.version
2386 );
2387 }
2388 }
2389
2390 fn fragment_id_for_anchor(&self, anchor: &Anchor) -> &Locator {
2391 self.try_fragment_id_for_anchor(anchor)
2392 .unwrap_or_else(|| self.panic_bad_anchor(anchor))
2393 }
2394
2395 fn try_fragment_id_for_anchor(&self, anchor: &Anchor) -> Option<&Locator> {
2396 if *anchor == Anchor::MIN {
2397 Some(Locator::min_ref())
2398 } else if *anchor == Anchor::MAX {
2399 Some(Locator::max_ref())
2400 } else {
2401 let anchor_key = InsertionFragmentKey {
2402 timestamp: anchor.timestamp,
2403 split_offset: anchor.offset,
2404 };
2405 let mut insertion_cursor = self.insertions.cursor::<InsertionFragmentKey>(());
2406 insertion_cursor.seek(&anchor_key, anchor.bias);
2407 if let Some(insertion) = insertion_cursor.item() {
2408 let comparison = sum_tree::KeyedItem::key(insertion).cmp(&anchor_key);
2409 if comparison == Ordering::Greater
2410 || (anchor.bias == Bias::Left
2411 && comparison == Ordering::Equal
2412 && anchor.offset > 0)
2413 {
2414 insertion_cursor.prev();
2415 }
2416 } else {
2417 insertion_cursor.prev();
2418 }
2419
2420 insertion_cursor
2421 .item()
2422 .filter(|insertion| {
2423 !cfg!(debug_assertions) || insertion.timestamp == anchor.timestamp
2424 })
2425 .map(|insertion| &insertion.fragment_id)
2426 }
2427 }
2428
2429 pub fn anchor_before<T: ToOffset>(&self, position: T) -> Anchor {
2430 self.anchor_at(position, Bias::Left)
2431 }
2432
2433 pub fn anchor_after<T: ToOffset>(&self, position: T) -> Anchor {
2434 self.anchor_at(position, Bias::Right)
2435 }
2436
2437 pub fn anchor_at<T: ToOffset>(&self, position: T, bias: Bias) -> Anchor {
2438 self.anchor_at_offset(position.to_offset(self), bias)
2439 }
2440
2441 fn anchor_at_offset(&self, offset: usize, bias: Bias) -> Anchor {
2442 if bias == Bias::Left && offset == 0 {
2443 Anchor::MIN
2444 } else if bias == Bias::Right && offset == self.len() {
2445 Anchor::MAX
2446 } else {
2447 if offset > self.visible_text.len() {
2448 panic!("offset {} is out of bounds", offset)
2449 }
2450 let (start, _, item) = self.fragments.find::<usize, _>(&None, &offset, bias);
2451 let fragment = item.unwrap();
2452 let overshoot = offset - start;
2453 Anchor {
2454 timestamp: fragment.timestamp,
2455 offset: fragment.insertion_offset + overshoot,
2456 bias,
2457 buffer_id: Some(self.remote_id),
2458 }
2459 }
2460 }
2461
2462 pub fn can_resolve(&self, anchor: &Anchor) -> bool {
2463 *anchor == Anchor::MIN
2464 || *anchor == Anchor::MAX
2465 || (Some(self.remote_id) == anchor.buffer_id && self.version.observed(anchor.timestamp))
2466 }
2467
2468 pub fn clip_offset(&self, offset: usize, bias: Bias) -> usize {
2469 self.visible_text.clip_offset(offset, bias)
2470 }
2471
2472 pub fn clip_point(&self, point: Point, bias: Bias) -> Point {
2473 self.visible_text.clip_point(point, bias)
2474 }
2475
2476 pub fn clip_offset_utf16(&self, offset: OffsetUtf16, bias: Bias) -> OffsetUtf16 {
2477 self.visible_text.clip_offset_utf16(offset, bias)
2478 }
2479
2480 pub fn clip_point_utf16(&self, point: Unclipped<PointUtf16>, bias: Bias) -> PointUtf16 {
2481 self.visible_text.clip_point_utf16(point, bias)
2482 }
2483
2484 pub fn edits_since<'a, D>(
2485 &'a self,
2486 since: &'a clock::Global,
2487 ) -> impl 'a + Iterator<Item = Edit<D>>
2488 where
2489 D: TextDimension + Ord,
2490 {
2491 self.edits_since_in_range(since, Anchor::MIN..Anchor::MAX)
2492 }
2493
2494 pub fn anchored_edits_since<'a, D>(
2495 &'a self,
2496 since: &'a clock::Global,
2497 ) -> impl 'a + Iterator<Item = (Edit<D>, Range<Anchor>)>
2498 where
2499 D: TextDimension + Ord,
2500 {
2501 self.anchored_edits_since_in_range(since, Anchor::MIN..Anchor::MAX)
2502 }
2503
2504 pub fn edits_since_in_range<'a, D>(
2505 &'a self,
2506 since: &'a clock::Global,
2507 range: Range<Anchor>,
2508 ) -> impl 'a + Iterator<Item = Edit<D>>
2509 where
2510 D: TextDimension + Ord,
2511 {
2512 self.anchored_edits_since_in_range(since, range)
2513 .map(|item| item.0)
2514 }
2515
2516 pub fn anchored_edits_since_in_range<'a, D>(
2517 &'a self,
2518 since: &'a clock::Global,
2519 range: Range<Anchor>,
2520 ) -> impl 'a + Iterator<Item = (Edit<D>, Range<Anchor>)>
2521 where
2522 D: TextDimension + Ord,
2523 {
2524 let fragments_cursor = if *since == self.version {
2525 None
2526 } else {
2527 let mut cursor = self.fragments.filter(&None, move |summary| {
2528 !since.observed_all(&summary.max_version)
2529 });
2530 cursor.next();
2531 Some(cursor)
2532 };
2533 let start_fragment_id = self.fragment_id_for_anchor(&range.start);
2534 let (start, _, item) = self
2535 .fragments
2536 .find::<Dimensions<Option<&Locator>, FragmentTextSummary>, _>(
2537 &None,
2538 &Some(start_fragment_id),
2539 Bias::Left,
2540 );
2541 let mut visible_start = start.1.visible;
2542 let mut deleted_start = start.1.deleted;
2543 if let Some(fragment) = item {
2544 let overshoot = range.start.offset - fragment.insertion_offset;
2545 if fragment.visible {
2546 visible_start += overshoot;
2547 } else {
2548 deleted_start += overshoot;
2549 }
2550 }
2551 let end_fragment_id = self.fragment_id_for_anchor(&range.end);
2552
2553 Edits {
2554 visible_cursor: self.visible_text.cursor(visible_start),
2555 deleted_cursor: self.deleted_text.cursor(deleted_start),
2556 fragments_cursor,
2557 undos: &self.undo_map,
2558 since,
2559 old_end: D::zero(()),
2560 new_end: D::zero(()),
2561 range: (start_fragment_id, range.start.offset)..(end_fragment_id, range.end.offset),
2562 buffer_id: self.remote_id,
2563 }
2564 }
2565
2566 pub fn has_edits_since_in_range(&self, since: &clock::Global, range: Range<Anchor>) -> bool {
2567 if *since != self.version {
2568 let start_fragment_id = self.fragment_id_for_anchor(&range.start);
2569 let end_fragment_id = self.fragment_id_for_anchor(&range.end);
2570 let mut cursor = self.fragments.filter::<_, usize>(&None, move |summary| {
2571 !since.observed_all(&summary.max_version)
2572 });
2573 cursor.next();
2574 while let Some(fragment) = cursor.item() {
2575 if fragment.id > *end_fragment_id {
2576 break;
2577 }
2578 if fragment.id > *start_fragment_id {
2579 let was_visible = fragment.was_visible(since, &self.undo_map);
2580 let is_visible = fragment.visible;
2581 if was_visible != is_visible {
2582 return true;
2583 }
2584 }
2585 cursor.next();
2586 }
2587 }
2588 false
2589 }
2590
2591 pub fn has_edits_since(&self, since: &clock::Global) -> bool {
2592 if *since != self.version {
2593 let mut cursor = self.fragments.filter::<_, usize>(&None, move |summary| {
2594 !since.observed_all(&summary.max_version)
2595 });
2596 cursor.next();
2597 while let Some(fragment) = cursor.item() {
2598 let was_visible = fragment.was_visible(since, &self.undo_map);
2599 let is_visible = fragment.visible;
2600 if was_visible != is_visible {
2601 return true;
2602 }
2603 cursor.next();
2604 }
2605 }
2606 false
2607 }
2608
2609 pub fn range_to_version(&self, range: Range<usize>, version: &clock::Global) -> Range<usize> {
2610 let mut offsets = self.offsets_to_version([range.start, range.end], version);
2611 offsets.next().unwrap()..offsets.next().unwrap()
2612 }
2613
2614 /// Converts the given sequence of offsets into their corresponding offsets
2615 /// at a prior version of this buffer.
2616 pub fn offsets_to_version<'a>(
2617 &'a self,
2618 offsets: impl 'a + IntoIterator<Item = usize>,
2619 version: &'a clock::Global,
2620 ) -> impl 'a + Iterator<Item = usize> {
2621 let mut edits = self.edits_since(version).peekable();
2622 let mut last_old_end = 0;
2623 let mut last_new_end = 0;
2624 offsets.into_iter().map(move |new_offset| {
2625 while let Some(edit) = edits.peek() {
2626 if edit.new.start > new_offset {
2627 break;
2628 }
2629
2630 if edit.new.end <= new_offset {
2631 last_new_end = edit.new.end;
2632 last_old_end = edit.old.end;
2633 edits.next();
2634 continue;
2635 }
2636
2637 let overshoot = new_offset - edit.new.start;
2638 return (edit.old.start + overshoot).min(edit.old.end);
2639 }
2640
2641 last_old_end + new_offset.saturating_sub(last_new_end)
2642 })
2643 }
2644
2645 /// Visually annotates a position or range with the `Debug` representation of a value. The
2646 /// callsite of this function is used as a key - previous annotations will be removed.
2647 #[cfg(debug_assertions)]
2648 #[track_caller]
2649 pub fn debug<R, V>(&self, ranges: &R, value: V)
2650 where
2651 R: debug::ToDebugRanges,
2652 V: std::fmt::Debug,
2653 {
2654 self.debug_with_key(std::panic::Location::caller(), ranges, value);
2655 }
2656
2657 /// Visually annotates a position or range with the `Debug` representation of a value. Previous
2658 /// debug annotations with the same key will be removed. The key is also used to determine the
2659 /// annotation's color.
2660 #[cfg(debug_assertions)]
2661 pub fn debug_with_key<K, R, V>(&self, key: &K, ranges: &R, value: V)
2662 where
2663 K: std::hash::Hash + 'static,
2664 R: debug::ToDebugRanges,
2665 V: std::fmt::Debug,
2666 {
2667 let ranges = ranges
2668 .to_debug_ranges(self)
2669 .into_iter()
2670 .map(|range| self.anchor_after(range.start)..self.anchor_before(range.end))
2671 .collect();
2672 debug::GlobalDebugRanges::with_locked(|debug_ranges| {
2673 debug_ranges.insert(key, ranges, format!("{value:?}").into());
2674 });
2675 }
2676}
2677
2678struct RopeBuilder<'a> {
2679 old_visible_cursor: rope::Cursor<'a>,
2680 old_deleted_cursor: rope::Cursor<'a>,
2681 new_visible: Rope,
2682 new_deleted: Rope,
2683}
2684
2685impl<'a> RopeBuilder<'a> {
2686 fn new(old_visible_cursor: rope::Cursor<'a>, old_deleted_cursor: rope::Cursor<'a>) -> Self {
2687 Self {
2688 old_visible_cursor,
2689 old_deleted_cursor,
2690 new_visible: Rope::new(),
2691 new_deleted: Rope::new(),
2692 }
2693 }
2694
2695 fn append(&mut self, len: FragmentTextSummary) {
2696 self.push(len.visible, true, true);
2697 self.push(len.deleted, false, false);
2698 }
2699
2700 fn push_fragment(&mut self, fragment: &Fragment, was_visible: bool) {
2701 debug_assert!(fragment.len > 0);
2702 self.push(fragment.len, was_visible, fragment.visible)
2703 }
2704
2705 fn push(&mut self, len: usize, was_visible: bool, is_visible: bool) {
2706 let text = if was_visible {
2707 self.old_visible_cursor
2708 .slice(self.old_visible_cursor.offset() + len)
2709 } else {
2710 self.old_deleted_cursor
2711 .slice(self.old_deleted_cursor.offset() + len)
2712 };
2713 if is_visible {
2714 self.new_visible.append(text);
2715 } else {
2716 self.new_deleted.append(text);
2717 }
2718 }
2719
2720 fn push_str(&mut self, text: &str) {
2721 self.new_visible.push(text);
2722 }
2723
2724 fn finish(mut self) -> (Rope, Rope) {
2725 self.new_visible.append(self.old_visible_cursor.suffix());
2726 self.new_deleted.append(self.old_deleted_cursor.suffix());
2727 (self.new_visible, self.new_deleted)
2728 }
2729}
2730
2731impl<D: TextDimension + Ord, F: FnMut(&FragmentSummary) -> bool> Iterator for Edits<'_, D, F> {
2732 type Item = (Edit<D>, Range<Anchor>);
2733
2734 fn next(&mut self) -> Option<Self::Item> {
2735 let mut pending_edit: Option<Self::Item> = None;
2736 let cursor = self.fragments_cursor.as_mut()?;
2737
2738 while let Some(fragment) = cursor.item() {
2739 if fragment.id < *self.range.start.0 {
2740 cursor.next();
2741 continue;
2742 } else if fragment.id > *self.range.end.0 {
2743 break;
2744 }
2745
2746 if cursor.start().visible > self.visible_cursor.offset() {
2747 let summary = self.visible_cursor.summary(cursor.start().visible);
2748 self.old_end.add_assign(&summary);
2749 self.new_end.add_assign(&summary);
2750 }
2751
2752 if pending_edit
2753 .as_ref()
2754 .is_some_and(|(change, _)| change.new.end < self.new_end)
2755 {
2756 break;
2757 }
2758
2759 let start_anchor = Anchor {
2760 timestamp: fragment.timestamp,
2761 offset: fragment.insertion_offset,
2762 bias: Bias::Right,
2763 buffer_id: Some(self.buffer_id),
2764 };
2765 let end_anchor = Anchor {
2766 timestamp: fragment.timestamp,
2767 offset: fragment.insertion_offset + fragment.len,
2768 bias: Bias::Left,
2769 buffer_id: Some(self.buffer_id),
2770 };
2771
2772 if !fragment.was_visible(self.since, self.undos) && fragment.visible {
2773 let mut visible_end = cursor.end().visible;
2774 if fragment.id == *self.range.end.0 {
2775 visible_end = cmp::min(
2776 visible_end,
2777 cursor.start().visible + (self.range.end.1 - fragment.insertion_offset),
2778 );
2779 }
2780
2781 let fragment_summary = self.visible_cursor.summary(visible_end);
2782 let mut new_end = self.new_end;
2783 new_end.add_assign(&fragment_summary);
2784 if let Some((edit, range)) = pending_edit.as_mut() {
2785 edit.new.end = new_end;
2786 range.end = end_anchor;
2787 } else {
2788 pending_edit = Some((
2789 Edit {
2790 old: self.old_end..self.old_end,
2791 new: self.new_end..new_end,
2792 },
2793 start_anchor..end_anchor,
2794 ));
2795 }
2796
2797 self.new_end = new_end;
2798 } else if fragment.was_visible(self.since, self.undos) && !fragment.visible {
2799 let mut deleted_end = cursor.end().deleted;
2800 if fragment.id == *self.range.end.0 {
2801 deleted_end = cmp::min(
2802 deleted_end,
2803 cursor.start().deleted + (self.range.end.1 - fragment.insertion_offset),
2804 );
2805 }
2806
2807 if cursor.start().deleted > self.deleted_cursor.offset() {
2808 self.deleted_cursor.seek_forward(cursor.start().deleted);
2809 }
2810 let fragment_summary = self.deleted_cursor.summary(deleted_end);
2811 let mut old_end = self.old_end;
2812 old_end.add_assign(&fragment_summary);
2813 if let Some((edit, range)) = pending_edit.as_mut() {
2814 edit.old.end = old_end;
2815 range.end = end_anchor;
2816 } else {
2817 pending_edit = Some((
2818 Edit {
2819 old: self.old_end..old_end,
2820 new: self.new_end..self.new_end,
2821 },
2822 start_anchor..end_anchor,
2823 ));
2824 }
2825
2826 self.old_end = old_end;
2827 }
2828
2829 cursor.next();
2830 }
2831
2832 pending_edit
2833 }
2834}
2835
2836impl Fragment {
2837 fn is_visible(&self, undos: &UndoMap) -> bool {
2838 !undos.is_undone(self.timestamp) && self.deletions.iter().all(|d| undos.is_undone(*d))
2839 }
2840
2841 fn was_visible(&self, version: &clock::Global, undos: &UndoMap) -> bool {
2842 (version.observed(self.timestamp) && !undos.was_undone(self.timestamp, version))
2843 && self
2844 .deletions
2845 .iter()
2846 .all(|d| !version.observed(*d) || undos.was_undone(*d, version))
2847 }
2848}
2849
2850impl sum_tree::Item for Fragment {
2851 type Summary = FragmentSummary;
2852
2853 fn summary(&self, _cx: &Option<clock::Global>) -> Self::Summary {
2854 let mut max_version = clock::Global::new();
2855 max_version.observe(self.timestamp);
2856 for deletion in &self.deletions {
2857 max_version.observe(*deletion);
2858 }
2859 max_version.join(&self.max_undos);
2860
2861 let mut min_insertion_version = clock::Global::new();
2862 min_insertion_version.observe(self.timestamp);
2863 let max_insertion_version = min_insertion_version.clone();
2864 if self.visible {
2865 FragmentSummary {
2866 max_id: self.id.clone(),
2867 text: FragmentTextSummary {
2868 visible: self.len,
2869 deleted: 0,
2870 },
2871 max_version,
2872 min_insertion_version,
2873 max_insertion_version,
2874 }
2875 } else {
2876 FragmentSummary {
2877 max_id: self.id.clone(),
2878 text: FragmentTextSummary {
2879 visible: 0,
2880 deleted: self.len,
2881 },
2882 max_version,
2883 min_insertion_version,
2884 max_insertion_version,
2885 }
2886 }
2887 }
2888}
2889
2890impl sum_tree::Summary for FragmentSummary {
2891 type Context<'a> = &'a Option<clock::Global>;
2892
2893 fn zero(_cx: Self::Context<'_>) -> Self {
2894 Default::default()
2895 }
2896
2897 fn add_summary(&mut self, other: &Self, _: Self::Context<'_>) {
2898 self.max_id.assign(&other.max_id);
2899 self.text.visible += &other.text.visible;
2900 self.text.deleted += &other.text.deleted;
2901 self.max_version.join(&other.max_version);
2902 self.min_insertion_version
2903 .meet(&other.min_insertion_version);
2904 self.max_insertion_version
2905 .join(&other.max_insertion_version);
2906 }
2907}
2908
2909impl Default for FragmentSummary {
2910 fn default() -> Self {
2911 FragmentSummary {
2912 max_id: Locator::min(),
2913 text: FragmentTextSummary::default(),
2914 max_version: clock::Global::new(),
2915 min_insertion_version: clock::Global::new(),
2916 max_insertion_version: clock::Global::new(),
2917 }
2918 }
2919}
2920
2921impl sum_tree::Item for InsertionFragment {
2922 type Summary = InsertionFragmentKey;
2923
2924 fn summary(&self, _cx: ()) -> Self::Summary {
2925 InsertionFragmentKey {
2926 timestamp: self.timestamp,
2927 split_offset: self.split_offset,
2928 }
2929 }
2930}
2931
2932impl sum_tree::KeyedItem for InsertionFragment {
2933 type Key = InsertionFragmentKey;
2934
2935 fn key(&self) -> Self::Key {
2936 sum_tree::Item::summary(self, ())
2937 }
2938}
2939
2940impl InsertionFragment {
2941 fn new(fragment: &Fragment) -> Self {
2942 Self {
2943 timestamp: fragment.timestamp,
2944 split_offset: fragment.insertion_offset,
2945 fragment_id: fragment.id.clone(),
2946 }
2947 }
2948
2949 fn insert_new(fragment: &Fragment) -> sum_tree::Edit<Self> {
2950 sum_tree::Edit::Insert(Self::new(fragment))
2951 }
2952}
2953
2954impl sum_tree::ContextLessSummary for InsertionFragmentKey {
2955 fn zero() -> Self {
2956 InsertionFragmentKey {
2957 timestamp: Lamport::MIN,
2958 split_offset: 0,
2959 }
2960 }
2961
2962 fn add_summary(&mut self, summary: &Self) {
2963 *self = *summary;
2964 }
2965}
2966
2967#[derive(Copy, Clone, Debug, Default, PartialEq, Eq, PartialOrd, Ord, Hash)]
2968pub struct FullOffset(pub usize);
2969
2970impl ops::AddAssign<usize> for FullOffset {
2971 fn add_assign(&mut self, rhs: usize) {
2972 self.0 += rhs;
2973 }
2974}
2975
2976impl ops::Add<usize> for FullOffset {
2977 type Output = Self;
2978
2979 fn add(mut self, rhs: usize) -> Self::Output {
2980 self += rhs;
2981 self
2982 }
2983}
2984
2985impl ops::Sub for FullOffset {
2986 type Output = usize;
2987
2988 fn sub(self, rhs: Self) -> Self::Output {
2989 self.0 - rhs.0
2990 }
2991}
2992
2993impl sum_tree::Dimension<'_, FragmentSummary> for usize {
2994 fn zero(_: &Option<clock::Global>) -> Self {
2995 Default::default()
2996 }
2997
2998 fn add_summary(&mut self, summary: &FragmentSummary, _: &Option<clock::Global>) {
2999 *self += summary.text.visible;
3000 }
3001}
3002
3003impl sum_tree::Dimension<'_, FragmentSummary> for FullOffset {
3004 fn zero(_: &Option<clock::Global>) -> Self {
3005 Default::default()
3006 }
3007
3008 fn add_summary(&mut self, summary: &FragmentSummary, _: &Option<clock::Global>) {
3009 self.0 += summary.text.visible + summary.text.deleted;
3010 }
3011}
3012
3013impl<'a> sum_tree::Dimension<'a, FragmentSummary> for Option<&'a Locator> {
3014 fn zero(_: &Option<clock::Global>) -> Self {
3015 Default::default()
3016 }
3017
3018 fn add_summary(&mut self, summary: &'a FragmentSummary, _: &Option<clock::Global>) {
3019 *self = Some(&summary.max_id);
3020 }
3021}
3022
3023impl sum_tree::SeekTarget<'_, FragmentSummary, FragmentTextSummary> for usize {
3024 fn cmp(
3025 &self,
3026 cursor_location: &FragmentTextSummary,
3027 _: &Option<clock::Global>,
3028 ) -> cmp::Ordering {
3029 Ord::cmp(self, &cursor_location.visible)
3030 }
3031}
3032
3033#[derive(Copy, Clone, Debug, Eq, PartialEq)]
3034enum VersionedFullOffset {
3035 Offset(FullOffset),
3036 Invalid,
3037}
3038
3039impl VersionedFullOffset {
3040 fn full_offset(&self) -> FullOffset {
3041 if let Self::Offset(position) = self {
3042 *position
3043 } else {
3044 panic!("invalid version")
3045 }
3046 }
3047}
3048
3049impl Default for VersionedFullOffset {
3050 fn default() -> Self {
3051 Self::Offset(Default::default())
3052 }
3053}
3054
3055impl<'a> sum_tree::Dimension<'a, FragmentSummary> for VersionedFullOffset {
3056 fn zero(_cx: &Option<clock::Global>) -> Self {
3057 Default::default()
3058 }
3059
3060 fn add_summary(&mut self, summary: &'a FragmentSummary, cx: &Option<clock::Global>) {
3061 if let Self::Offset(offset) = self {
3062 let version = cx.as_ref().unwrap();
3063 if version.observed_all(&summary.max_insertion_version) {
3064 *offset += summary.text.visible + summary.text.deleted;
3065 } else if version.observed_any(&summary.min_insertion_version) {
3066 *self = Self::Invalid;
3067 }
3068 }
3069 }
3070}
3071
3072impl sum_tree::SeekTarget<'_, FragmentSummary, Self> for VersionedFullOffset {
3073 fn cmp(&self, cursor_position: &Self, _: &Option<clock::Global>) -> cmp::Ordering {
3074 match (self, cursor_position) {
3075 (Self::Offset(a), Self::Offset(b)) => Ord::cmp(a, b),
3076 (Self::Offset(_), Self::Invalid) => cmp::Ordering::Less,
3077 (Self::Invalid, _) => unreachable!(),
3078 }
3079 }
3080}
3081
3082impl Operation {
3083 fn replica_id(&self) -> ReplicaId {
3084 operation_queue::Operation::lamport_timestamp(self).replica_id
3085 }
3086
3087 pub fn timestamp(&self) -> clock::Lamport {
3088 match self {
3089 Operation::Edit(edit) => edit.timestamp,
3090 Operation::Undo(undo) => undo.timestamp,
3091 }
3092 }
3093
3094 pub fn as_edit(&self) -> Option<&EditOperation> {
3095 match self {
3096 Operation::Edit(edit) => Some(edit),
3097 _ => None,
3098 }
3099 }
3100
3101 pub fn is_edit(&self) -> bool {
3102 matches!(self, Operation::Edit { .. })
3103 }
3104}
3105
3106impl operation_queue::Operation for Operation {
3107 fn lamport_timestamp(&self) -> clock::Lamport {
3108 match self {
3109 Operation::Edit(edit) => edit.timestamp,
3110 Operation::Undo(undo) => undo.timestamp,
3111 }
3112 }
3113}
3114
3115pub trait ToOffset {
3116 fn to_offset(&self, snapshot: &BufferSnapshot) -> usize;
3117 /// Turns this point into the next offset in the buffer that comes after this, respecting utf8 boundaries.
3118 fn to_next_offset(&self, snapshot: &BufferSnapshot) -> usize {
3119 snapshot
3120 .visible_text
3121 .ceil_char_boundary(self.to_offset(snapshot) + 1)
3122 }
3123 /// Turns this point into the previous offset in the buffer that comes before this, respecting utf8 boundaries.
3124 fn to_previous_offset(&self, snapshot: &BufferSnapshot) -> usize {
3125 snapshot
3126 .visible_text
3127 .floor_char_boundary(self.to_offset(snapshot).saturating_sub(1))
3128 }
3129}
3130
3131impl ToOffset for Point {
3132 #[inline]
3133 fn to_offset(&self, snapshot: &BufferSnapshot) -> usize {
3134 snapshot.point_to_offset(*self)
3135 }
3136}
3137
3138impl ToOffset for usize {
3139 fn to_offset(&self, snapshot: &BufferSnapshot) -> usize {
3140 assert!(
3141 *self <= snapshot.len(),
3142 "offset {} is out of range, snapshot length is {}",
3143 self,
3144 snapshot.len()
3145 );
3146 *self
3147 }
3148}
3149
3150impl ToOffset for Anchor {
3151 #[inline]
3152 fn to_offset(&self, snapshot: &BufferSnapshot) -> usize {
3153 snapshot.summary_for_anchor(self)
3154 }
3155}
3156
3157impl<T: ToOffset> ToOffset for &T {
3158 #[inline]
3159 fn to_offset(&self, content: &BufferSnapshot) -> usize {
3160 (*self).to_offset(content)
3161 }
3162}
3163
3164impl ToOffset for PointUtf16 {
3165 #[inline]
3166 fn to_offset(&self, snapshot: &BufferSnapshot) -> usize {
3167 snapshot.point_utf16_to_offset(*self)
3168 }
3169}
3170
3171impl ToOffset for Unclipped<PointUtf16> {
3172 #[inline]
3173 fn to_offset(&self, snapshot: &BufferSnapshot) -> usize {
3174 snapshot.unclipped_point_utf16_to_offset(*self)
3175 }
3176}
3177
3178pub trait ToPoint {
3179 fn to_point(&self, snapshot: &BufferSnapshot) -> Point;
3180}
3181
3182impl ToPoint for Anchor {
3183 #[inline]
3184 fn to_point(&self, snapshot: &BufferSnapshot) -> Point {
3185 snapshot.summary_for_anchor(self)
3186 }
3187}
3188
3189impl ToPoint for usize {
3190 #[inline]
3191 fn to_point(&self, snapshot: &BufferSnapshot) -> Point {
3192 snapshot.offset_to_point(*self)
3193 }
3194}
3195
3196impl ToPoint for Point {
3197 #[inline]
3198 fn to_point(&self, _: &BufferSnapshot) -> Point {
3199 *self
3200 }
3201}
3202
3203impl ToPoint for Unclipped<PointUtf16> {
3204 #[inline]
3205 fn to_point(&self, snapshot: &BufferSnapshot) -> Point {
3206 snapshot.unclipped_point_utf16_to_point(*self)
3207 }
3208}
3209
3210pub trait ToPointUtf16 {
3211 fn to_point_utf16(&self, snapshot: &BufferSnapshot) -> PointUtf16;
3212}
3213
3214impl ToPointUtf16 for Anchor {
3215 #[inline]
3216 fn to_point_utf16(&self, snapshot: &BufferSnapshot) -> PointUtf16 {
3217 snapshot.summary_for_anchor(self)
3218 }
3219}
3220
3221impl ToPointUtf16 for usize {
3222 #[inline]
3223 fn to_point_utf16(&self, snapshot: &BufferSnapshot) -> PointUtf16 {
3224 snapshot.offset_to_point_utf16(*self)
3225 }
3226}
3227
3228impl ToPointUtf16 for PointUtf16 {
3229 #[inline]
3230 fn to_point_utf16(&self, _: &BufferSnapshot) -> PointUtf16 {
3231 *self
3232 }
3233}
3234
3235impl ToPointUtf16 for Point {
3236 #[inline]
3237 fn to_point_utf16(&self, snapshot: &BufferSnapshot) -> PointUtf16 {
3238 snapshot.point_to_point_utf16(*self)
3239 }
3240}
3241
3242pub trait ToOffsetUtf16 {
3243 fn to_offset_utf16(&self, snapshot: &BufferSnapshot) -> OffsetUtf16;
3244}
3245
3246impl ToOffsetUtf16 for Anchor {
3247 #[inline]
3248 fn to_offset_utf16(&self, snapshot: &BufferSnapshot) -> OffsetUtf16 {
3249 snapshot.summary_for_anchor(self)
3250 }
3251}
3252
3253impl ToOffsetUtf16 for usize {
3254 #[inline]
3255 fn to_offset_utf16(&self, snapshot: &BufferSnapshot) -> OffsetUtf16 {
3256 snapshot.offset_to_offset_utf16(*self)
3257 }
3258}
3259
3260impl ToOffsetUtf16 for OffsetUtf16 {
3261 #[inline]
3262 fn to_offset_utf16(&self, _snapshot: &BufferSnapshot) -> OffsetUtf16 {
3263 *self
3264 }
3265}
3266
3267pub trait FromAnchor {
3268 fn from_anchor(anchor: &Anchor, snapshot: &BufferSnapshot) -> Self;
3269}
3270
3271impl FromAnchor for Anchor {
3272 #[inline]
3273 fn from_anchor(anchor: &Anchor, _snapshot: &BufferSnapshot) -> Self {
3274 *anchor
3275 }
3276}
3277
3278impl FromAnchor for Point {
3279 #[inline]
3280 fn from_anchor(anchor: &Anchor, snapshot: &BufferSnapshot) -> Self {
3281 snapshot.summary_for_anchor(anchor)
3282 }
3283}
3284
3285impl FromAnchor for PointUtf16 {
3286 #[inline]
3287 fn from_anchor(anchor: &Anchor, snapshot: &BufferSnapshot) -> Self {
3288 snapshot.summary_for_anchor(anchor)
3289 }
3290}
3291
3292impl FromAnchor for usize {
3293 #[inline]
3294 fn from_anchor(anchor: &Anchor, snapshot: &BufferSnapshot) -> Self {
3295 snapshot.summary_for_anchor(anchor)
3296 }
3297}
3298
3299#[derive(Clone, Copy, Debug, PartialEq)]
3300pub enum LineEnding {
3301 Unix,
3302 Windows,
3303}
3304
3305impl Default for LineEnding {
3306 fn default() -> Self {
3307 #[cfg(unix)]
3308 return Self::Unix;
3309
3310 #[cfg(not(unix))]
3311 return Self::Windows;
3312 }
3313}
3314
3315impl LineEnding {
3316 pub fn as_str(&self) -> &'static str {
3317 match self {
3318 LineEnding::Unix => "\n",
3319 LineEnding::Windows => "\r\n",
3320 }
3321 }
3322
3323 pub fn label(&self) -> &'static str {
3324 match self {
3325 LineEnding::Unix => "LF",
3326 LineEnding::Windows => "CRLF",
3327 }
3328 }
3329
3330 pub fn detect(text: &str) -> Self {
3331 let mut max_ix = cmp::min(text.len(), 1000);
3332 while !text.is_char_boundary(max_ix) {
3333 max_ix -= 1;
3334 }
3335
3336 if let Some(ix) = text[..max_ix].find(['\n']) {
3337 if ix > 0 && text.as_bytes()[ix - 1] == b'\r' {
3338 Self::Windows
3339 } else {
3340 Self::Unix
3341 }
3342 } else {
3343 Self::default()
3344 }
3345 }
3346
3347 pub fn normalize(text: &mut String) {
3348 if let Cow::Owned(replaced) = LINE_SEPARATORS_REGEX.replace_all(text, "\n") {
3349 *text = replaced;
3350 }
3351 }
3352
3353 pub fn normalize_arc(text: Arc<str>) -> Arc<str> {
3354 if let Cow::Owned(replaced) = LINE_SEPARATORS_REGEX.replace_all(&text, "\n") {
3355 replaced.into()
3356 } else {
3357 text
3358 }
3359 }
3360
3361 pub fn normalize_cow(text: Cow<str>) -> Cow<str> {
3362 if let Cow::Owned(replaced) = LINE_SEPARATORS_REGEX.replace_all(&text, "\n") {
3363 replaced.into()
3364 } else {
3365 text
3366 }
3367 }
3368}
3369
3370#[cfg(debug_assertions)]
3371pub mod debug {
3372 use super::*;
3373 use parking_lot::Mutex;
3374 use std::any::TypeId;
3375 use std::hash::{Hash, Hasher};
3376
3377 static GLOBAL_DEBUG_RANGES: Mutex<Option<GlobalDebugRanges>> = Mutex::new(None);
3378
3379 pub struct GlobalDebugRanges {
3380 pub ranges: Vec<DebugRange>,
3381 key_to_occurrence_index: HashMap<Key, usize>,
3382 next_occurrence_index: usize,
3383 }
3384
3385 pub struct DebugRange {
3386 key: Key,
3387 pub ranges: Vec<Range<Anchor>>,
3388 pub value: Arc<str>,
3389 pub occurrence_index: usize,
3390 }
3391
3392 #[derive(Debug, Clone, PartialEq, Eq, Hash)]
3393 struct Key {
3394 type_id: TypeId,
3395 hash: u64,
3396 }
3397
3398 impl GlobalDebugRanges {
3399 pub fn with_locked<R>(f: impl FnOnce(&mut Self) -> R) -> R {
3400 let mut state = GLOBAL_DEBUG_RANGES.lock();
3401 if state.is_none() {
3402 *state = Some(GlobalDebugRanges {
3403 ranges: Vec::new(),
3404 key_to_occurrence_index: HashMap::default(),
3405 next_occurrence_index: 0,
3406 });
3407 }
3408 if let Some(global_debug_ranges) = state.as_mut() {
3409 f(global_debug_ranges)
3410 } else {
3411 unreachable!()
3412 }
3413 }
3414
3415 pub fn insert<K: Hash + 'static>(
3416 &mut self,
3417 key: &K,
3418 ranges: Vec<Range<Anchor>>,
3419 value: Arc<str>,
3420 ) {
3421 let occurrence_index = *self
3422 .key_to_occurrence_index
3423 .entry(Key::new(key))
3424 .or_insert_with(|| {
3425 let occurrence_index = self.next_occurrence_index;
3426 self.next_occurrence_index += 1;
3427 occurrence_index
3428 });
3429 let key = Key::new(key);
3430 let existing = self
3431 .ranges
3432 .iter()
3433 .enumerate()
3434 .rfind(|(_, existing)| existing.key == key);
3435 if let Some((existing_ix, _)) = existing {
3436 self.ranges.remove(existing_ix);
3437 }
3438 self.ranges.push(DebugRange {
3439 ranges,
3440 key,
3441 value,
3442 occurrence_index,
3443 });
3444 }
3445
3446 pub fn remove<K: Hash + 'static>(&mut self, key: &K) {
3447 self.remove_impl(&Key::new(key));
3448 }
3449
3450 fn remove_impl(&mut self, key: &Key) {
3451 let existing = self
3452 .ranges
3453 .iter()
3454 .enumerate()
3455 .rfind(|(_, existing)| &existing.key == key);
3456 if let Some((existing_ix, _)) = existing {
3457 self.ranges.remove(existing_ix);
3458 }
3459 }
3460
3461 pub fn remove_all_with_key_type<K: 'static>(&mut self) {
3462 self.ranges
3463 .retain(|item| item.key.type_id != TypeId::of::<K>());
3464 }
3465 }
3466
3467 impl Key {
3468 fn new<K: Hash + 'static>(key: &K) -> Self {
3469 let type_id = TypeId::of::<K>();
3470 let mut hasher = collections::FxHasher::default();
3471 key.hash(&mut hasher);
3472 Key {
3473 type_id,
3474 hash: hasher.finish(),
3475 }
3476 }
3477 }
3478
3479 pub trait ToDebugRanges {
3480 fn to_debug_ranges(&self, snapshot: &BufferSnapshot) -> Vec<Range<usize>>;
3481 }
3482
3483 impl<T: ToOffset> ToDebugRanges for T {
3484 fn to_debug_ranges(&self, snapshot: &BufferSnapshot) -> Vec<Range<usize>> {
3485 [self.to_offset(snapshot)].to_debug_ranges(snapshot)
3486 }
3487 }
3488
3489 impl<T: ToOffset + Clone> ToDebugRanges for Range<T> {
3490 fn to_debug_ranges(&self, snapshot: &BufferSnapshot) -> Vec<Range<usize>> {
3491 [self.clone()].to_debug_ranges(snapshot)
3492 }
3493 }
3494
3495 impl<T: ToOffset> ToDebugRanges for Vec<T> {
3496 fn to_debug_ranges(&self, snapshot: &BufferSnapshot) -> Vec<Range<usize>> {
3497 self.as_slice().to_debug_ranges(snapshot)
3498 }
3499 }
3500
3501 impl<T: ToOffset> ToDebugRanges for Vec<Range<T>> {
3502 fn to_debug_ranges(&self, snapshot: &BufferSnapshot) -> Vec<Range<usize>> {
3503 self.as_slice().to_debug_ranges(snapshot)
3504 }
3505 }
3506
3507 impl<T: ToOffset> ToDebugRanges for [T] {
3508 fn to_debug_ranges(&self, snapshot: &BufferSnapshot) -> Vec<Range<usize>> {
3509 self.iter()
3510 .map(|item| {
3511 let offset = item.to_offset(snapshot);
3512 offset..offset
3513 })
3514 .collect()
3515 }
3516 }
3517
3518 impl<T: ToOffset> ToDebugRanges for [Range<T>] {
3519 fn to_debug_ranges(&self, snapshot: &BufferSnapshot) -> Vec<Range<usize>> {
3520 self.iter()
3521 .map(|range| range.start.to_offset(snapshot)..range.end.to_offset(snapshot))
3522 .collect()
3523 }
3524 }
3525}