1mod anchor;
2pub mod locator;
3#[cfg(any(test, feature = "test-support"))]
4pub mod network;
5pub mod operation_queue;
6mod patch;
7mod selection;
8pub mod subscription;
9#[cfg(test)]
10mod tests;
11mod undo_map;
12
13pub use anchor::*;
14use anyhow::{Context as _, Result};
15use clock::Lamport;
16pub use clock::ReplicaId;
17use collections::{HashMap, HashSet};
18use locator::Locator;
19use operation_queue::OperationQueue;
20pub use patch::Patch;
21use postage::{oneshot, prelude::*};
22
23use regex::Regex;
24pub use rope::*;
25pub use selection::*;
26use smallvec::SmallVec;
27use std::{
28 borrow::Cow,
29 cmp::{self, Ordering, Reverse},
30 fmt::Display,
31 future::Future,
32 iter::Iterator,
33 num::NonZeroU64,
34 ops::{self, Deref, Range, Sub},
35 str,
36 sync::{Arc, LazyLock},
37 time::{Duration, Instant},
38};
39pub use subscription::*;
40pub use sum_tree::Bias;
41use sum_tree::{Dimensions, FilterCursor, SumTree, TreeMap, TreeSet};
42use undo_map::UndoMap;
43use util::debug_panic;
44
45#[cfg(any(test, feature = "test-support"))]
46use util::RandomCharIter;
47
48static LINE_SEPARATORS_REGEX: LazyLock<Regex> =
49 LazyLock::new(|| Regex::new(r"\r\n|\r").expect("Failed to create LINE_SEPARATORS_REGEX"));
50
51pub type TransactionId = clock::Lamport;
52
53pub struct Buffer {
54 snapshot: BufferSnapshot,
55 history: History,
56 deferred_ops: OperationQueue<Operation>,
57 deferred_replicas: HashSet<ReplicaId>,
58 pub lamport_clock: clock::Lamport,
59 subscriptions: Topic<usize>,
60 edit_id_resolvers: HashMap<clock::Lamport, Vec<oneshot::Sender<()>>>,
61 wait_for_version_txs: Vec<(clock::Global, oneshot::Sender<()>)>,
62}
63
64#[repr(transparent)]
65#[derive(Clone, Copy, Debug, Hash, PartialEq, PartialOrd, Ord, Eq)]
66pub struct BufferId(NonZeroU64);
67
68impl Display for BufferId {
69 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
70 write!(f, "{}", self.0)
71 }
72}
73
74impl From<NonZeroU64> for BufferId {
75 fn from(id: NonZeroU64) -> Self {
76 BufferId(id)
77 }
78}
79
80impl BufferId {
81 /// Returns Err if `id` is outside of BufferId domain.
82 pub fn new(id: u64) -> anyhow::Result<Self> {
83 let id = NonZeroU64::new(id).context("Buffer id cannot be 0.")?;
84 Ok(Self(id))
85 }
86
87 /// Increments this buffer id, returning the old value.
88 /// So that's a post-increment operator in disguise.
89 pub fn next(&mut self) -> Self {
90 let old = *self;
91 self.0 = self.0.saturating_add(1);
92 old
93 }
94
95 pub fn to_proto(self) -> u64 {
96 self.into()
97 }
98}
99
100impl From<BufferId> for u64 {
101 fn from(id: BufferId) -> Self {
102 id.0.get()
103 }
104}
105
106#[derive(Clone)]
107pub struct BufferSnapshot {
108 visible_text: Rope,
109 deleted_text: Rope,
110 fragments: SumTree<Fragment>,
111 insertions: SumTree<InsertionFragment>,
112 insertion_slices: TreeSet<InsertionSlice>,
113 undo_map: UndoMap,
114 pub version: clock::Global,
115 remote_id: BufferId,
116 replica_id: ReplicaId,
117 line_ending: LineEnding,
118}
119
120#[derive(Clone, Debug)]
121pub struct HistoryEntry {
122 transaction: Transaction,
123 first_edit_at: Instant,
124 last_edit_at: Instant,
125 suppress_grouping: bool,
126}
127
128#[derive(Clone, Debug)]
129pub struct Transaction {
130 pub id: TransactionId,
131 pub edit_ids: Vec<clock::Lamport>,
132 pub start: clock::Global,
133}
134
135impl Transaction {
136 pub fn merge_in(&mut self, other: Transaction) {
137 self.edit_ids.extend(other.edit_ids);
138 }
139}
140
141impl HistoryEntry {
142 pub fn transaction_id(&self) -> TransactionId {
143 self.transaction.id
144 }
145}
146
147struct History {
148 base_text: Rope,
149 operations: TreeMap<clock::Lamport, Operation>,
150 undo_stack: Vec<HistoryEntry>,
151 redo_stack: Vec<HistoryEntry>,
152 transaction_depth: usize,
153 group_interval: Duration,
154}
155
156#[derive(Clone, Debug, Eq, PartialEq)]
157struct InsertionSlice {
158 edit_id: clock::Lamport,
159 insertion_id: clock::Lamport,
160 range: Range<usize>,
161}
162
163impl Ord for InsertionSlice {
164 fn cmp(&self, other: &Self) -> Ordering {
165 self.edit_id
166 .cmp(&other.edit_id)
167 .then_with(|| self.insertion_id.cmp(&other.insertion_id))
168 .then_with(|| self.range.start.cmp(&other.range.start))
169 .then_with(|| self.range.end.cmp(&other.range.end))
170 }
171}
172
173impl PartialOrd for InsertionSlice {
174 fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
175 Some(self.cmp(other))
176 }
177}
178
179impl InsertionSlice {
180 fn from_fragment(edit_id: clock::Lamport, fragment: &Fragment) -> Self {
181 Self {
182 edit_id,
183 insertion_id: fragment.timestamp,
184 range: fragment.insertion_offset..fragment.insertion_offset + fragment.len,
185 }
186 }
187}
188
189impl History {
190 pub fn new(base_text: Rope) -> Self {
191 Self {
192 base_text,
193 operations: Default::default(),
194 undo_stack: Vec::new(),
195 redo_stack: Vec::new(),
196 transaction_depth: 0,
197 // Don't group transactions in tests unless we opt in, because it's a footgun.
198 #[cfg(any(test, feature = "test-support"))]
199 group_interval: Duration::ZERO,
200 #[cfg(not(any(test, feature = "test-support")))]
201 group_interval: Duration::from_millis(300),
202 }
203 }
204
205 fn push(&mut self, op: Operation) {
206 self.operations.insert(op.timestamp(), op);
207 }
208
209 fn start_transaction(
210 &mut self,
211 start: clock::Global,
212 now: Instant,
213 clock: &mut clock::Lamport,
214 ) -> Option<TransactionId> {
215 self.transaction_depth += 1;
216 if self.transaction_depth == 1 {
217 let id = clock.tick();
218 self.undo_stack.push(HistoryEntry {
219 transaction: Transaction {
220 id,
221 start,
222 edit_ids: Default::default(),
223 },
224 first_edit_at: now,
225 last_edit_at: now,
226 suppress_grouping: false,
227 });
228 Some(id)
229 } else {
230 None
231 }
232 }
233
234 fn end_transaction(&mut self, now: Instant) -> Option<&HistoryEntry> {
235 assert_ne!(self.transaction_depth, 0);
236 self.transaction_depth -= 1;
237 if self.transaction_depth == 0 {
238 if self
239 .undo_stack
240 .last()
241 .unwrap()
242 .transaction
243 .edit_ids
244 .is_empty()
245 {
246 self.undo_stack.pop();
247 None
248 } else {
249 self.redo_stack.clear();
250 let entry = self.undo_stack.last_mut().unwrap();
251 entry.last_edit_at = now;
252 Some(entry)
253 }
254 } else {
255 None
256 }
257 }
258
259 fn group(&mut self) -> Option<TransactionId> {
260 let mut count = 0;
261 let mut entries = self.undo_stack.iter();
262 if let Some(mut entry) = entries.next_back() {
263 while let Some(prev_entry) = entries.next_back() {
264 if !prev_entry.suppress_grouping
265 && entry.first_edit_at - prev_entry.last_edit_at < self.group_interval
266 {
267 entry = prev_entry;
268 count += 1;
269 } else {
270 break;
271 }
272 }
273 }
274 self.group_trailing(count)
275 }
276
277 fn group_until(&mut self, transaction_id: TransactionId) {
278 let mut count = 0;
279 for entry in self.undo_stack.iter().rev() {
280 if entry.transaction_id() == transaction_id {
281 self.group_trailing(count);
282 break;
283 } else if entry.suppress_grouping {
284 break;
285 } else {
286 count += 1;
287 }
288 }
289 }
290
291 fn group_trailing(&mut self, n: usize) -> Option<TransactionId> {
292 let new_len = self.undo_stack.len() - n;
293 let (entries_to_keep, entries_to_merge) = self.undo_stack.split_at_mut(new_len);
294 if let Some(last_entry) = entries_to_keep.last_mut() {
295 for entry in &*entries_to_merge {
296 for edit_id in &entry.transaction.edit_ids {
297 last_entry.transaction.edit_ids.push(*edit_id);
298 }
299 }
300
301 if let Some(entry) = entries_to_merge.last_mut() {
302 last_entry.last_edit_at = entry.last_edit_at;
303 }
304 }
305
306 self.undo_stack.truncate(new_len);
307 self.undo_stack.last().map(|e| e.transaction.id)
308 }
309
310 fn finalize_last_transaction(&mut self) -> Option<&Transaction> {
311 self.undo_stack.last_mut().map(|entry| {
312 entry.suppress_grouping = true;
313 &entry.transaction
314 })
315 }
316
317 fn push_transaction(&mut self, transaction: Transaction, now: Instant) {
318 assert_eq!(self.transaction_depth, 0);
319 self.undo_stack.push(HistoryEntry {
320 transaction,
321 first_edit_at: now,
322 last_edit_at: now,
323 suppress_grouping: false,
324 });
325 }
326
327 /// Differs from `push_transaction` in that it does not clear the redo
328 /// stack. Intended to be used to create a parent transaction to merge
329 /// potential child transactions into.
330 ///
331 /// The caller is responsible for removing it from the undo history using
332 /// `forget_transaction` if no edits are merged into it. Otherwise, if edits
333 /// are merged into this transaction, the caller is responsible for ensuring
334 /// the redo stack is cleared. The easiest way to ensure the redo stack is
335 /// cleared is to create transactions with the usual `start_transaction` and
336 /// `end_transaction` methods and merging the resulting transactions into
337 /// the transaction created by this method
338 fn push_empty_transaction(
339 &mut self,
340 start: clock::Global,
341 now: Instant,
342 clock: &mut clock::Lamport,
343 ) -> TransactionId {
344 assert_eq!(self.transaction_depth, 0);
345 let id = clock.tick();
346 let transaction = Transaction {
347 id,
348 start,
349 edit_ids: Vec::new(),
350 };
351 self.undo_stack.push(HistoryEntry {
352 transaction,
353 first_edit_at: now,
354 last_edit_at: now,
355 suppress_grouping: false,
356 });
357 id
358 }
359
360 fn push_undo(&mut self, op_id: clock::Lamport) {
361 assert_ne!(self.transaction_depth, 0);
362 if let Some(Operation::Edit(_)) = self.operations.get(&op_id) {
363 let last_transaction = self.undo_stack.last_mut().unwrap();
364 last_transaction.transaction.edit_ids.push(op_id);
365 }
366 }
367
368 fn pop_undo(&mut self) -> Option<&HistoryEntry> {
369 assert_eq!(self.transaction_depth, 0);
370 if let Some(entry) = self.undo_stack.pop() {
371 self.redo_stack.push(entry);
372 self.redo_stack.last()
373 } else {
374 None
375 }
376 }
377
378 fn remove_from_undo(&mut self, transaction_id: TransactionId) -> Option<&HistoryEntry> {
379 assert_eq!(self.transaction_depth, 0);
380
381 let entry_ix = self
382 .undo_stack
383 .iter()
384 .rposition(|entry| entry.transaction.id == transaction_id)?;
385 let entry = self.undo_stack.remove(entry_ix);
386 self.redo_stack.push(entry);
387 self.redo_stack.last()
388 }
389
390 fn remove_from_undo_until(&mut self, transaction_id: TransactionId) -> &[HistoryEntry] {
391 assert_eq!(self.transaction_depth, 0);
392
393 let redo_stack_start_len = self.redo_stack.len();
394 if let Some(entry_ix) = self
395 .undo_stack
396 .iter()
397 .rposition(|entry| entry.transaction.id == transaction_id)
398 {
399 self.redo_stack
400 .extend(self.undo_stack.drain(entry_ix..).rev());
401 }
402 &self.redo_stack[redo_stack_start_len..]
403 }
404
405 fn forget(&mut self, transaction_id: TransactionId) -> Option<Transaction> {
406 assert_eq!(self.transaction_depth, 0);
407 if let Some(entry_ix) = self
408 .undo_stack
409 .iter()
410 .rposition(|entry| entry.transaction.id == transaction_id)
411 {
412 Some(self.undo_stack.remove(entry_ix).transaction)
413 } else if let Some(entry_ix) = self
414 .redo_stack
415 .iter()
416 .rposition(|entry| entry.transaction.id == transaction_id)
417 {
418 Some(self.redo_stack.remove(entry_ix).transaction)
419 } else {
420 None
421 }
422 }
423
424 fn transaction(&self, transaction_id: TransactionId) -> Option<&Transaction> {
425 let entry = self
426 .undo_stack
427 .iter()
428 .rfind(|entry| entry.transaction.id == transaction_id)
429 .or_else(|| {
430 self.redo_stack
431 .iter()
432 .rfind(|entry| entry.transaction.id == transaction_id)
433 })?;
434 Some(&entry.transaction)
435 }
436
437 fn transaction_mut(&mut self, transaction_id: TransactionId) -> Option<&mut Transaction> {
438 let entry = self
439 .undo_stack
440 .iter_mut()
441 .rfind(|entry| entry.transaction.id == transaction_id)
442 .or_else(|| {
443 self.redo_stack
444 .iter_mut()
445 .rfind(|entry| entry.transaction.id == transaction_id)
446 })?;
447 Some(&mut entry.transaction)
448 }
449
450 fn merge_transactions(&mut self, transaction: TransactionId, destination: TransactionId) {
451 if let Some(transaction) = self.forget(transaction)
452 && let Some(destination) = self.transaction_mut(destination)
453 {
454 destination.edit_ids.extend(transaction.edit_ids);
455 }
456 }
457
458 fn pop_redo(&mut self) -> Option<&HistoryEntry> {
459 assert_eq!(self.transaction_depth, 0);
460 if let Some(entry) = self.redo_stack.pop() {
461 self.undo_stack.push(entry);
462 self.undo_stack.last()
463 } else {
464 None
465 }
466 }
467
468 fn remove_from_redo(&mut self, transaction_id: TransactionId) -> &[HistoryEntry] {
469 assert_eq!(self.transaction_depth, 0);
470
471 let undo_stack_start_len = self.undo_stack.len();
472 if let Some(entry_ix) = self
473 .redo_stack
474 .iter()
475 .rposition(|entry| entry.transaction.id == transaction_id)
476 {
477 self.undo_stack
478 .extend(self.redo_stack.drain(entry_ix..).rev());
479 }
480 &self.undo_stack[undo_stack_start_len..]
481 }
482}
483
484struct Edits<'a, D: TextDimension, F: FnMut(&FragmentSummary) -> bool> {
485 visible_cursor: rope::Cursor<'a>,
486 deleted_cursor: rope::Cursor<'a>,
487 fragments_cursor: Option<FilterCursor<'a, 'static, F, Fragment, FragmentTextSummary>>,
488 undos: &'a UndoMap,
489 since: &'a clock::Global,
490 old_end: D,
491 new_end: D,
492 range: Range<(&'a Locator, usize)>,
493 buffer_id: BufferId,
494}
495
496#[derive(Clone, Debug, Default, Eq, PartialEq)]
497pub struct Edit<D> {
498 pub old: Range<D>,
499 pub new: Range<D>,
500}
501impl<D> Edit<D>
502where
503 D: PartialEq,
504{
505 pub fn is_empty(&self) -> bool {
506 self.old.start == self.old.end && self.new.start == self.new.end
507 }
508}
509
510impl<D, DDelta> Edit<D>
511where
512 D: Sub<D, Output = DDelta> + Copy,
513{
514 pub fn old_len(&self) -> DDelta {
515 self.old.end - self.old.start
516 }
517
518 pub fn new_len(&self) -> DDelta {
519 self.new.end - self.new.start
520 }
521}
522
523impl<D1, D2> Edit<(D1, D2)> {
524 pub fn flatten(self) -> (Edit<D1>, Edit<D2>) {
525 (
526 Edit {
527 old: self.old.start.0..self.old.end.0,
528 new: self.new.start.0..self.new.end.0,
529 },
530 Edit {
531 old: self.old.start.1..self.old.end.1,
532 new: self.new.start.1..self.new.end.1,
533 },
534 )
535 }
536}
537
538#[derive(Eq, PartialEq, Clone, Debug)]
539pub struct Fragment {
540 pub id: Locator,
541 pub timestamp: clock::Lamport,
542 pub insertion_offset: usize,
543 pub len: usize,
544 pub visible: bool,
545 pub deletions: SmallVec<[clock::Lamport; 2]>,
546 pub max_undos: clock::Global,
547}
548
549#[derive(Eq, PartialEq, Clone, Debug)]
550pub struct FragmentSummary {
551 text: FragmentTextSummary,
552 max_id: Locator,
553 max_version: clock::Global,
554 min_insertion_version: clock::Global,
555 max_insertion_version: clock::Global,
556}
557
558#[derive(Copy, Default, Clone, Debug, PartialEq, Eq)]
559struct FragmentTextSummary {
560 visible: usize,
561 deleted: usize,
562}
563
564impl<'a> sum_tree::Dimension<'a, FragmentSummary> for FragmentTextSummary {
565 fn zero(_: &Option<clock::Global>) -> Self {
566 Default::default()
567 }
568
569 fn add_summary(&mut self, summary: &'a FragmentSummary, _: &Option<clock::Global>) {
570 self.visible += summary.text.visible;
571 self.deleted += summary.text.deleted;
572 }
573}
574
575#[derive(Eq, PartialEq, Clone, Debug)]
576struct InsertionFragment {
577 timestamp: clock::Lamport,
578 split_offset: usize,
579 fragment_id: Locator,
580}
581
582#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord)]
583struct InsertionFragmentKey {
584 timestamp: clock::Lamport,
585 split_offset: usize,
586}
587
588#[derive(Clone, Debug, Eq, PartialEq)]
589pub enum Operation {
590 Edit(EditOperation),
591 Undo(UndoOperation),
592}
593
594#[derive(Clone, Debug, Eq, PartialEq)]
595pub struct EditOperation {
596 pub timestamp: clock::Lamport,
597 pub version: clock::Global,
598 pub ranges: Vec<Range<FullOffset>>,
599 pub new_text: Vec<Arc<str>>,
600}
601
602#[derive(Clone, Debug, Eq, PartialEq)]
603pub struct UndoOperation {
604 pub timestamp: clock::Lamport,
605 pub version: clock::Global,
606 pub counts: HashMap<clock::Lamport, u32>,
607}
608
609/// Stores information about the indentation of a line (tabs and spaces).
610#[derive(Clone, Copy, Debug, Eq, PartialEq)]
611pub struct LineIndent {
612 pub tabs: u32,
613 pub spaces: u32,
614 pub line_blank: bool,
615}
616
617impl LineIndent {
618 pub fn from_chunks(chunks: &mut Chunks) -> Self {
619 let mut tabs = 0;
620 let mut spaces = 0;
621 let mut line_blank = true;
622
623 'outer: while let Some(chunk) = chunks.peek() {
624 for ch in chunk.chars() {
625 if ch == '\t' {
626 tabs += 1;
627 } else if ch == ' ' {
628 spaces += 1;
629 } else {
630 if ch != '\n' {
631 line_blank = false;
632 }
633 break 'outer;
634 }
635 }
636
637 chunks.next();
638 }
639
640 Self {
641 tabs,
642 spaces,
643 line_blank,
644 }
645 }
646
647 /// Constructs a new `LineIndent` which only contains spaces.
648 pub fn spaces(spaces: u32) -> Self {
649 Self {
650 tabs: 0,
651 spaces,
652 line_blank: true,
653 }
654 }
655
656 /// Constructs a new `LineIndent` which only contains tabs.
657 pub fn tabs(tabs: u32) -> Self {
658 Self {
659 tabs,
660 spaces: 0,
661 line_blank: true,
662 }
663 }
664
665 /// Indicates whether the line is empty.
666 pub fn is_line_empty(&self) -> bool {
667 self.tabs == 0 && self.spaces == 0 && self.line_blank
668 }
669
670 /// Indicates whether the line is blank (contains only whitespace).
671 pub fn is_line_blank(&self) -> bool {
672 self.line_blank
673 }
674
675 /// Returns the number of indentation characters (tabs or spaces).
676 pub fn raw_len(&self) -> u32 {
677 self.tabs + self.spaces
678 }
679
680 /// Returns the number of indentation characters (tabs or spaces), taking tab size into account.
681 pub fn len(&self, tab_size: u32) -> u32 {
682 self.tabs * tab_size + self.spaces
683 }
684}
685
686impl From<&str> for LineIndent {
687 fn from(value: &str) -> Self {
688 Self::from_iter(value.chars())
689 }
690}
691
692impl FromIterator<char> for LineIndent {
693 fn from_iter<T: IntoIterator<Item = char>>(chars: T) -> Self {
694 let mut tabs = 0;
695 let mut spaces = 0;
696 let mut line_blank = true;
697 for c in chars {
698 if c == '\t' {
699 tabs += 1;
700 } else if c == ' ' {
701 spaces += 1;
702 } else {
703 if c != '\n' {
704 line_blank = false;
705 }
706 break;
707 }
708 }
709 Self {
710 tabs,
711 spaces,
712 line_blank,
713 }
714 }
715}
716
717impl Buffer {
718 pub fn new(replica_id: ReplicaId, remote_id: BufferId, base_text: impl Into<String>) -> Buffer {
719 let mut base_text = base_text.into();
720 let line_ending = LineEnding::detect(&base_text);
721 LineEnding::normalize(&mut base_text);
722 Self::new_normalized(replica_id, remote_id, line_ending, Rope::from(&*base_text))
723 }
724
725 pub fn new_normalized(
726 replica_id: ReplicaId,
727 remote_id: BufferId,
728 line_ending: LineEnding,
729 normalized: Rope,
730 ) -> Buffer {
731 let history = History::new(normalized);
732 let mut fragments = SumTree::new(&None);
733 let mut insertions = SumTree::default();
734
735 let mut lamport_clock = clock::Lamport::new(replica_id);
736 let mut version = clock::Global::new();
737
738 let visible_text = history.base_text.clone();
739 if !visible_text.is_empty() {
740 let insertion_timestamp = clock::Lamport::new(ReplicaId::LOCAL);
741 lamport_clock.observe(insertion_timestamp);
742 version.observe(insertion_timestamp);
743 let fragment_id = Locator::between(&Locator::min(), &Locator::max());
744 let fragment = Fragment {
745 id: fragment_id,
746 timestamp: insertion_timestamp,
747 insertion_offset: 0,
748 len: visible_text.len(),
749 visible: true,
750 deletions: Default::default(),
751 max_undos: Default::default(),
752 };
753 insertions.push(InsertionFragment::new(&fragment), ());
754 fragments.push(fragment, &None);
755 }
756
757 Buffer {
758 snapshot: BufferSnapshot {
759 replica_id,
760 remote_id,
761 visible_text,
762 deleted_text: Rope::new(),
763 line_ending,
764 fragments,
765 insertions,
766 version,
767 undo_map: Default::default(),
768 insertion_slices: Default::default(),
769 },
770 history,
771 deferred_ops: OperationQueue::new(),
772 deferred_replicas: HashSet::default(),
773 lamport_clock,
774 subscriptions: Default::default(),
775 edit_id_resolvers: Default::default(),
776 wait_for_version_txs: Default::default(),
777 }
778 }
779
780 pub fn version(&self) -> clock::Global {
781 self.version.clone()
782 }
783
784 pub fn snapshot(&self) -> &BufferSnapshot {
785 &self.snapshot
786 }
787
788 pub fn into_snapshot(self) -> BufferSnapshot {
789 self.snapshot
790 }
791
792 pub fn branch(&self) -> Self {
793 Self {
794 snapshot: self.snapshot.clone(),
795 history: History::new(self.base_text().clone()),
796 deferred_ops: OperationQueue::new(),
797 deferred_replicas: HashSet::default(),
798 lamport_clock: clock::Lamport::new(ReplicaId::LOCAL_BRANCH),
799 subscriptions: Default::default(),
800 edit_id_resolvers: Default::default(),
801 wait_for_version_txs: Default::default(),
802 }
803 }
804
805 pub fn replica_id(&self) -> ReplicaId {
806 self.lamport_clock.replica_id
807 }
808
809 pub fn remote_id(&self) -> BufferId {
810 self.remote_id
811 }
812
813 pub fn deferred_ops_len(&self) -> usize {
814 self.deferred_ops.len()
815 }
816
817 pub fn transaction_group_interval(&self) -> Duration {
818 self.history.group_interval
819 }
820
821 pub fn edit<R, I, S, T>(&mut self, edits: R) -> Operation
822 where
823 R: IntoIterator<IntoIter = I>,
824 I: ExactSizeIterator<Item = (Range<S>, T)>,
825 S: ToOffset,
826 T: Into<Arc<str>>,
827 {
828 let edits = edits
829 .into_iter()
830 .map(|(range, new_text)| (range, new_text.into()));
831
832 self.start_transaction();
833 let timestamp = self.lamport_clock.tick();
834 let operation = Operation::Edit(self.apply_local_edit(edits, timestamp));
835
836 self.history.push(operation.clone());
837 self.history.push_undo(operation.timestamp());
838 self.snapshot.version.observe(operation.timestamp());
839 self.end_transaction();
840 operation
841 }
842
843 fn apply_local_edit<S: ToOffset, T: Into<Arc<str>>>(
844 &mut self,
845 edits: impl ExactSizeIterator<Item = (Range<S>, T)>,
846 timestamp: clock::Lamport,
847 ) -> EditOperation {
848 let mut edits_patch = Patch::default();
849 let mut edit_op = EditOperation {
850 timestamp,
851 version: self.version(),
852 ranges: Vec::with_capacity(edits.len()),
853 new_text: Vec::with_capacity(edits.len()),
854 };
855 let mut new_insertions = Vec::new();
856 let mut insertion_offset = 0;
857 let mut insertion_slices = Vec::new();
858
859 let mut edits = edits
860 .map(|(range, new_text)| (range.to_offset(&*self), new_text))
861 .peekable();
862
863 let mut new_ropes =
864 RopeBuilder::new(self.visible_text.cursor(0), self.deleted_text.cursor(0));
865 let mut old_fragments = self.fragments.cursor::<FragmentTextSummary>(&None);
866 let mut new_fragments = old_fragments.slice(&edits.peek().unwrap().0.start, Bias::Right);
867 new_ropes.append(new_fragments.summary().text);
868
869 let mut fragment_start = old_fragments.start().visible;
870 for (range, new_text) in edits {
871 let new_text = LineEnding::normalize_arc(new_text.into());
872 let fragment_end = old_fragments.end().visible;
873
874 // If the current fragment ends before this range, then jump ahead to the first fragment
875 // that extends past the start of this range, reusing any intervening fragments.
876 if fragment_end < range.start {
877 // If the current fragment has been partially consumed, then consume the rest of it
878 // and advance to the next fragment before slicing.
879 if fragment_start > old_fragments.start().visible {
880 if fragment_end > fragment_start {
881 let mut suffix = old_fragments.item().unwrap().clone();
882 suffix.len = fragment_end - fragment_start;
883 suffix.insertion_offset += fragment_start - old_fragments.start().visible;
884 new_insertions.push(InsertionFragment::insert_new(&suffix));
885 new_ropes.push_fragment(&suffix, suffix.visible);
886 new_fragments.push(suffix, &None);
887 }
888 old_fragments.next();
889 }
890
891 let slice = old_fragments.slice(&range.start, Bias::Right);
892 new_ropes.append(slice.summary().text);
893 new_fragments.append(slice, &None);
894 fragment_start = old_fragments.start().visible;
895 }
896
897 let full_range_start = FullOffset(range.start + old_fragments.start().deleted);
898
899 // Preserve any portion of the current fragment that precedes this range.
900 if fragment_start < range.start {
901 let mut prefix = old_fragments.item().unwrap().clone();
902 prefix.len = range.start - fragment_start;
903 prefix.insertion_offset += fragment_start - old_fragments.start().visible;
904 prefix.id = Locator::between(&new_fragments.summary().max_id, &prefix.id);
905 new_insertions.push(InsertionFragment::insert_new(&prefix));
906 new_ropes.push_fragment(&prefix, prefix.visible);
907 new_fragments.push(prefix, &None);
908 fragment_start = range.start;
909 }
910
911 // Insert the new text before any existing fragments within the range.
912 if !new_text.is_empty() {
913 let new_start = new_fragments.summary().text.visible;
914
915 let fragment = Fragment {
916 id: Locator::between(
917 &new_fragments.summary().max_id,
918 old_fragments
919 .item()
920 .map_or(&Locator::max(), |old_fragment| &old_fragment.id),
921 ),
922 timestamp,
923 insertion_offset,
924 len: new_text.len(),
925 deletions: Default::default(),
926 max_undos: Default::default(),
927 visible: true,
928 };
929 edits_patch.push(Edit {
930 old: fragment_start..fragment_start,
931 new: new_start..new_start + new_text.len(),
932 });
933 insertion_slices.push(InsertionSlice::from_fragment(timestamp, &fragment));
934 new_insertions.push(InsertionFragment::insert_new(&fragment));
935 new_ropes.push_str(new_text.as_ref());
936 new_fragments.push(fragment, &None);
937 insertion_offset += new_text.len();
938 }
939
940 // Advance through every fragment that intersects this range, marking the intersecting
941 // portions as deleted.
942 while fragment_start < range.end {
943 let fragment = old_fragments.item().unwrap();
944 let fragment_end = old_fragments.end().visible;
945 let mut intersection = fragment.clone();
946 let intersection_end = cmp::min(range.end, fragment_end);
947 if fragment.visible {
948 intersection.len = intersection_end - fragment_start;
949 intersection.insertion_offset += fragment_start - old_fragments.start().visible;
950 intersection.id =
951 Locator::between(&new_fragments.summary().max_id, &intersection.id);
952 intersection.deletions.push(timestamp);
953 intersection.visible = false;
954 }
955 if intersection.len > 0 {
956 if fragment.visible && !intersection.visible {
957 let new_start = new_fragments.summary().text.visible;
958 edits_patch.push(Edit {
959 old: fragment_start..intersection_end,
960 new: new_start..new_start,
961 });
962 insertion_slices
963 .push(InsertionSlice::from_fragment(timestamp, &intersection));
964 }
965 new_insertions.push(InsertionFragment::insert_new(&intersection));
966 new_ropes.push_fragment(&intersection, fragment.visible);
967 new_fragments.push(intersection, &None);
968 fragment_start = intersection_end;
969 }
970 if fragment_end <= range.end {
971 old_fragments.next();
972 }
973 }
974
975 let full_range_end = FullOffset(range.end + old_fragments.start().deleted);
976 edit_op.ranges.push(full_range_start..full_range_end);
977 edit_op.new_text.push(new_text);
978 }
979
980 // If the current fragment has been partially consumed, then consume the rest of it
981 // and advance to the next fragment before slicing.
982 if fragment_start > old_fragments.start().visible {
983 let fragment_end = old_fragments.end().visible;
984 if fragment_end > fragment_start {
985 let mut suffix = old_fragments.item().unwrap().clone();
986 suffix.len = fragment_end - fragment_start;
987 suffix.insertion_offset += fragment_start - old_fragments.start().visible;
988 new_insertions.push(InsertionFragment::insert_new(&suffix));
989 new_ropes.push_fragment(&suffix, suffix.visible);
990 new_fragments.push(suffix, &None);
991 }
992 old_fragments.next();
993 }
994
995 let suffix = old_fragments.suffix();
996 new_ropes.append(suffix.summary().text);
997 new_fragments.append(suffix, &None);
998 let (visible_text, deleted_text) = new_ropes.finish();
999 drop(old_fragments);
1000
1001 self.snapshot.fragments = new_fragments;
1002 self.snapshot.insertions.edit(new_insertions, ());
1003 self.snapshot.visible_text = visible_text;
1004 self.snapshot.deleted_text = deleted_text;
1005 self.subscriptions.publish_mut(&edits_patch);
1006 self.snapshot.insertion_slices.extend(insertion_slices);
1007 edit_op
1008 }
1009
1010 pub fn set_line_ending(&mut self, line_ending: LineEnding) {
1011 self.snapshot.line_ending = line_ending;
1012 }
1013
1014 pub fn apply_ops<I: IntoIterator<Item = Operation>>(&mut self, ops: I) {
1015 let mut deferred_ops = Vec::new();
1016 for op in ops {
1017 self.history.push(op.clone());
1018 if self.can_apply_op(&op) {
1019 self.apply_op(op);
1020 } else {
1021 self.deferred_replicas.insert(op.replica_id());
1022 deferred_ops.push(op);
1023 }
1024 }
1025 self.deferred_ops.insert(deferred_ops);
1026 self.flush_deferred_ops();
1027 }
1028
1029 fn apply_op(&mut self, op: Operation) {
1030 match op {
1031 Operation::Edit(edit) => {
1032 if !self.version.observed(edit.timestamp) {
1033 self.apply_remote_edit(
1034 &edit.version,
1035 &edit.ranges,
1036 &edit.new_text,
1037 edit.timestamp,
1038 );
1039 self.snapshot.version.observe(edit.timestamp);
1040 self.lamport_clock.observe(edit.timestamp);
1041 self.resolve_edit(edit.timestamp);
1042 }
1043 }
1044 Operation::Undo(undo) => {
1045 if !self.version.observed(undo.timestamp) {
1046 self.apply_undo(&undo);
1047 self.snapshot.version.observe(undo.timestamp);
1048 self.lamport_clock.observe(undo.timestamp);
1049 }
1050 }
1051 }
1052 self.wait_for_version_txs.retain_mut(|(version, tx)| {
1053 if self.snapshot.version().observed_all(version) {
1054 tx.try_send(()).ok();
1055 false
1056 } else {
1057 true
1058 }
1059 });
1060 }
1061
1062 fn apply_remote_edit(
1063 &mut self,
1064 version: &clock::Global,
1065 ranges: &[Range<FullOffset>],
1066 new_text: &[Arc<str>],
1067 timestamp: clock::Lamport,
1068 ) {
1069 if ranges.is_empty() {
1070 return;
1071 }
1072
1073 let edits = ranges.iter().zip(new_text.iter());
1074 let mut edits_patch = Patch::default();
1075 let mut insertion_slices = Vec::new();
1076 let cx = Some(version.clone());
1077 let mut new_insertions = Vec::new();
1078 let mut insertion_offset = 0;
1079 let mut new_ropes =
1080 RopeBuilder::new(self.visible_text.cursor(0), self.deleted_text.cursor(0));
1081 let mut old_fragments = self
1082 .fragments
1083 .cursor::<Dimensions<VersionedFullOffset, usize>>(&cx);
1084 let mut new_fragments =
1085 old_fragments.slice(&VersionedFullOffset::Offset(ranges[0].start), Bias::Left);
1086 new_ropes.append(new_fragments.summary().text);
1087
1088 let mut fragment_start = old_fragments.start().0.full_offset();
1089 for (range, new_text) in edits {
1090 let fragment_end = old_fragments.end().0.full_offset();
1091
1092 // If the current fragment ends before this range, then jump ahead to the first fragment
1093 // that extends past the start of this range, reusing any intervening fragments.
1094 if fragment_end < range.start {
1095 // If the current fragment has been partially consumed, then consume the rest of it
1096 // and advance to the next fragment before slicing.
1097 if fragment_start > old_fragments.start().0.full_offset() {
1098 if fragment_end > fragment_start {
1099 let mut suffix = old_fragments.item().unwrap().clone();
1100 suffix.len = fragment_end.0 - fragment_start.0;
1101 suffix.insertion_offset +=
1102 fragment_start - old_fragments.start().0.full_offset();
1103 new_insertions.push(InsertionFragment::insert_new(&suffix));
1104 new_ropes.push_fragment(&suffix, suffix.visible);
1105 new_fragments.push(suffix, &None);
1106 }
1107 old_fragments.next();
1108 }
1109
1110 let slice =
1111 old_fragments.slice(&VersionedFullOffset::Offset(range.start), Bias::Left);
1112 new_ropes.append(slice.summary().text);
1113 new_fragments.append(slice, &None);
1114 fragment_start = old_fragments.start().0.full_offset();
1115 }
1116
1117 // If we are at the end of a non-concurrent fragment, advance to the next one.
1118 let fragment_end = old_fragments.end().0.full_offset();
1119 if fragment_end == range.start && fragment_end > fragment_start {
1120 let mut fragment = old_fragments.item().unwrap().clone();
1121 fragment.len = fragment_end.0 - fragment_start.0;
1122 fragment.insertion_offset += fragment_start - old_fragments.start().0.full_offset();
1123 new_insertions.push(InsertionFragment::insert_new(&fragment));
1124 new_ropes.push_fragment(&fragment, fragment.visible);
1125 new_fragments.push(fragment, &None);
1126 old_fragments.next();
1127 fragment_start = old_fragments.start().0.full_offset();
1128 }
1129
1130 // Skip over insertions that are concurrent to this edit, but have a lower lamport
1131 // timestamp.
1132 while let Some(fragment) = old_fragments.item() {
1133 if fragment_start == range.start && fragment.timestamp > timestamp {
1134 new_ropes.push_fragment(fragment, fragment.visible);
1135 new_fragments.push(fragment.clone(), &None);
1136 old_fragments.next();
1137 debug_assert_eq!(fragment_start, range.start);
1138 } else {
1139 break;
1140 }
1141 }
1142 debug_assert!(fragment_start <= range.start);
1143
1144 // Preserve any portion of the current fragment that precedes this range.
1145 if fragment_start < range.start {
1146 let mut prefix = old_fragments.item().unwrap().clone();
1147 prefix.len = range.start.0 - fragment_start.0;
1148 prefix.insertion_offset += fragment_start - old_fragments.start().0.full_offset();
1149 prefix.id = Locator::between(&new_fragments.summary().max_id, &prefix.id);
1150 new_insertions.push(InsertionFragment::insert_new(&prefix));
1151 fragment_start = range.start;
1152 new_ropes.push_fragment(&prefix, prefix.visible);
1153 new_fragments.push(prefix, &None);
1154 }
1155
1156 // Insert the new text before any existing fragments within the range.
1157 if !new_text.is_empty() {
1158 let mut old_start = old_fragments.start().1;
1159 if old_fragments.item().is_some_and(|f| f.visible) {
1160 old_start += fragment_start.0 - old_fragments.start().0.full_offset().0;
1161 }
1162 let new_start = new_fragments.summary().text.visible;
1163 let fragment = Fragment {
1164 id: Locator::between(
1165 &new_fragments.summary().max_id,
1166 old_fragments
1167 .item()
1168 .map_or(&Locator::max(), |old_fragment| &old_fragment.id),
1169 ),
1170 timestamp,
1171 insertion_offset,
1172 len: new_text.len(),
1173 deletions: Default::default(),
1174 max_undos: Default::default(),
1175 visible: true,
1176 };
1177 edits_patch.push(Edit {
1178 old: old_start..old_start,
1179 new: new_start..new_start + new_text.len(),
1180 });
1181 insertion_slices.push(InsertionSlice::from_fragment(timestamp, &fragment));
1182 new_insertions.push(InsertionFragment::insert_new(&fragment));
1183 new_ropes.push_str(new_text);
1184 new_fragments.push(fragment, &None);
1185 insertion_offset += new_text.len();
1186 }
1187
1188 // Advance through every fragment that intersects this range, marking the intersecting
1189 // portions as deleted.
1190 while fragment_start < range.end {
1191 let fragment = old_fragments.item().unwrap();
1192 let fragment_end = old_fragments.end().0.full_offset();
1193 let mut intersection = fragment.clone();
1194 let intersection_end = cmp::min(range.end, fragment_end);
1195 if fragment.was_visible(version, &self.undo_map) {
1196 intersection.len = intersection_end.0 - fragment_start.0;
1197 intersection.insertion_offset +=
1198 fragment_start - old_fragments.start().0.full_offset();
1199 intersection.id =
1200 Locator::between(&new_fragments.summary().max_id, &intersection.id);
1201 intersection.deletions.push(timestamp);
1202 intersection.visible = false;
1203 insertion_slices.push(InsertionSlice::from_fragment(timestamp, &intersection));
1204 }
1205 if intersection.len > 0 {
1206 if fragment.visible && !intersection.visible {
1207 let old_start = old_fragments.start().1
1208 + (fragment_start.0 - old_fragments.start().0.full_offset().0);
1209 let new_start = new_fragments.summary().text.visible;
1210 edits_patch.push(Edit {
1211 old: old_start..old_start + intersection.len,
1212 new: new_start..new_start,
1213 });
1214 }
1215 new_insertions.push(InsertionFragment::insert_new(&intersection));
1216 new_ropes.push_fragment(&intersection, fragment.visible);
1217 new_fragments.push(intersection, &None);
1218 fragment_start = intersection_end;
1219 }
1220 if fragment_end <= range.end {
1221 old_fragments.next();
1222 }
1223 }
1224 }
1225
1226 // If the current fragment has been partially consumed, then consume the rest of it
1227 // and advance to the next fragment before slicing.
1228 if fragment_start > old_fragments.start().0.full_offset() {
1229 let fragment_end = old_fragments.end().0.full_offset();
1230 if fragment_end > fragment_start {
1231 let mut suffix = old_fragments.item().unwrap().clone();
1232 suffix.len = fragment_end.0 - fragment_start.0;
1233 suffix.insertion_offset += fragment_start - old_fragments.start().0.full_offset();
1234 new_insertions.push(InsertionFragment::insert_new(&suffix));
1235 new_ropes.push_fragment(&suffix, suffix.visible);
1236 new_fragments.push(suffix, &None);
1237 }
1238 old_fragments.next();
1239 }
1240
1241 let suffix = old_fragments.suffix();
1242 new_ropes.append(suffix.summary().text);
1243 new_fragments.append(suffix, &None);
1244 let (visible_text, deleted_text) = new_ropes.finish();
1245 drop(old_fragments);
1246
1247 self.snapshot.fragments = new_fragments;
1248 self.snapshot.visible_text = visible_text;
1249 self.snapshot.deleted_text = deleted_text;
1250 self.snapshot.insertions.edit(new_insertions, ());
1251 self.snapshot.insertion_slices.extend(insertion_slices);
1252 self.subscriptions.publish_mut(&edits_patch)
1253 }
1254
1255 fn fragment_ids_for_edits<'a>(
1256 &'a self,
1257 edit_ids: impl Iterator<Item = &'a clock::Lamport>,
1258 ) -> Vec<&'a Locator> {
1259 // Get all of the insertion slices changed by the given edits.
1260 let mut insertion_slices = Vec::new();
1261 for edit_id in edit_ids {
1262 let insertion_slice = InsertionSlice {
1263 edit_id: *edit_id,
1264 insertion_id: clock::Lamport::MIN,
1265 range: 0..0,
1266 };
1267 let slices = self
1268 .snapshot
1269 .insertion_slices
1270 .iter_from(&insertion_slice)
1271 .take_while(|slice| slice.edit_id == *edit_id);
1272 insertion_slices.extend(slices)
1273 }
1274 insertion_slices
1275 .sort_unstable_by_key(|s| (s.insertion_id, s.range.start, Reverse(s.range.end)));
1276
1277 // Get all of the fragments corresponding to these insertion slices.
1278 let mut fragment_ids = Vec::new();
1279 let mut insertions_cursor = self.insertions.cursor::<InsertionFragmentKey>(());
1280 for insertion_slice in &insertion_slices {
1281 if insertion_slice.insertion_id != insertions_cursor.start().timestamp
1282 || insertion_slice.range.start > insertions_cursor.start().split_offset
1283 {
1284 insertions_cursor.seek_forward(
1285 &InsertionFragmentKey {
1286 timestamp: insertion_slice.insertion_id,
1287 split_offset: insertion_slice.range.start,
1288 },
1289 Bias::Left,
1290 );
1291 }
1292 while let Some(item) = insertions_cursor.item() {
1293 if item.timestamp != insertion_slice.insertion_id
1294 || item.split_offset >= insertion_slice.range.end
1295 {
1296 break;
1297 }
1298 fragment_ids.push(&item.fragment_id);
1299 insertions_cursor.next();
1300 }
1301 }
1302 fragment_ids.sort_unstable();
1303 fragment_ids
1304 }
1305
1306 fn apply_undo(&mut self, undo: &UndoOperation) {
1307 self.snapshot.undo_map.insert(undo);
1308
1309 let mut edits = Patch::default();
1310 let mut old_fragments = self
1311 .fragments
1312 .cursor::<Dimensions<Option<&Locator>, usize>>(&None);
1313 let mut new_fragments = SumTree::new(&None);
1314 let mut new_ropes =
1315 RopeBuilder::new(self.visible_text.cursor(0), self.deleted_text.cursor(0));
1316
1317 for fragment_id in self.fragment_ids_for_edits(undo.counts.keys()) {
1318 let preceding_fragments = old_fragments.slice(&Some(fragment_id), Bias::Left);
1319 new_ropes.append(preceding_fragments.summary().text);
1320 new_fragments.append(preceding_fragments, &None);
1321
1322 if let Some(fragment) = old_fragments.item() {
1323 let mut fragment = fragment.clone();
1324 let fragment_was_visible = fragment.visible;
1325
1326 fragment.visible = fragment.is_visible(&self.undo_map);
1327 fragment.max_undos.observe(undo.timestamp);
1328
1329 let old_start = old_fragments.start().1;
1330 let new_start = new_fragments.summary().text.visible;
1331 if fragment_was_visible && !fragment.visible {
1332 edits.push(Edit {
1333 old: old_start..old_start + fragment.len,
1334 new: new_start..new_start,
1335 });
1336 } else if !fragment_was_visible && fragment.visible {
1337 edits.push(Edit {
1338 old: old_start..old_start,
1339 new: new_start..new_start + fragment.len,
1340 });
1341 }
1342 new_ropes.push_fragment(&fragment, fragment_was_visible);
1343 new_fragments.push(fragment, &None);
1344
1345 old_fragments.next();
1346 }
1347 }
1348
1349 let suffix = old_fragments.suffix();
1350 new_ropes.append(suffix.summary().text);
1351 new_fragments.append(suffix, &None);
1352
1353 drop(old_fragments);
1354 let (visible_text, deleted_text) = new_ropes.finish();
1355 self.snapshot.fragments = new_fragments;
1356 self.snapshot.visible_text = visible_text;
1357 self.snapshot.deleted_text = deleted_text;
1358 self.subscriptions.publish_mut(&edits);
1359 }
1360
1361 fn flush_deferred_ops(&mut self) {
1362 self.deferred_replicas.clear();
1363 let mut deferred_ops = Vec::new();
1364 for op in self.deferred_ops.drain().iter().cloned() {
1365 if self.can_apply_op(&op) {
1366 self.apply_op(op);
1367 } else {
1368 self.deferred_replicas.insert(op.replica_id());
1369 deferred_ops.push(op);
1370 }
1371 }
1372 self.deferred_ops.insert(deferred_ops);
1373 }
1374
1375 fn can_apply_op(&self, op: &Operation) -> bool {
1376 if self.deferred_replicas.contains(&op.replica_id()) {
1377 false
1378 } else {
1379 self.version.observed_all(match op {
1380 Operation::Edit(edit) => &edit.version,
1381 Operation::Undo(undo) => &undo.version,
1382 })
1383 }
1384 }
1385
1386 pub fn has_deferred_ops(&self) -> bool {
1387 !self.deferred_ops.is_empty()
1388 }
1389
1390 pub fn peek_undo_stack(&self) -> Option<&HistoryEntry> {
1391 self.history.undo_stack.last()
1392 }
1393
1394 pub fn peek_redo_stack(&self) -> Option<&HistoryEntry> {
1395 self.history.redo_stack.last()
1396 }
1397
1398 pub fn start_transaction(&mut self) -> Option<TransactionId> {
1399 self.start_transaction_at(Instant::now())
1400 }
1401
1402 pub fn start_transaction_at(&mut self, now: Instant) -> Option<TransactionId> {
1403 self.history
1404 .start_transaction(self.version.clone(), now, &mut self.lamport_clock)
1405 }
1406
1407 pub fn end_transaction(&mut self) -> Option<(TransactionId, clock::Global)> {
1408 self.end_transaction_at(Instant::now())
1409 }
1410
1411 pub fn end_transaction_at(&mut self, now: Instant) -> Option<(TransactionId, clock::Global)> {
1412 if let Some(entry) = self.history.end_transaction(now) {
1413 let since = entry.transaction.start.clone();
1414 let id = self.history.group().unwrap();
1415 Some((id, since))
1416 } else {
1417 None
1418 }
1419 }
1420
1421 pub fn finalize_last_transaction(&mut self) -> Option<&Transaction> {
1422 self.history.finalize_last_transaction()
1423 }
1424
1425 pub fn group_until_transaction(&mut self, transaction_id: TransactionId) {
1426 self.history.group_until(transaction_id);
1427 }
1428
1429 pub fn base_text(&self) -> &Rope {
1430 &self.history.base_text
1431 }
1432
1433 pub fn operations(&self) -> &TreeMap<clock::Lamport, Operation> {
1434 &self.history.operations
1435 }
1436
1437 pub fn undo(&mut self) -> Option<(TransactionId, Operation)> {
1438 if let Some(entry) = self.history.pop_undo() {
1439 let transaction = entry.transaction.clone();
1440 let transaction_id = transaction.id;
1441 let op = self.undo_or_redo(transaction);
1442 Some((transaction_id, op))
1443 } else {
1444 None
1445 }
1446 }
1447
1448 pub fn undo_transaction(&mut self, transaction_id: TransactionId) -> Option<Operation> {
1449 let transaction = self
1450 .history
1451 .remove_from_undo(transaction_id)?
1452 .transaction
1453 .clone();
1454 Some(self.undo_or_redo(transaction))
1455 }
1456
1457 pub fn undo_to_transaction(&mut self, transaction_id: TransactionId) -> Vec<Operation> {
1458 let transactions = self
1459 .history
1460 .remove_from_undo_until(transaction_id)
1461 .iter()
1462 .map(|entry| entry.transaction.clone())
1463 .collect::<Vec<_>>();
1464
1465 transactions
1466 .into_iter()
1467 .map(|transaction| self.undo_or_redo(transaction))
1468 .collect()
1469 }
1470
1471 pub fn forget_transaction(&mut self, transaction_id: TransactionId) -> Option<Transaction> {
1472 self.history.forget(transaction_id)
1473 }
1474
1475 pub fn get_transaction(&self, transaction_id: TransactionId) -> Option<&Transaction> {
1476 self.history.transaction(transaction_id)
1477 }
1478
1479 pub fn merge_transactions(&mut self, transaction: TransactionId, destination: TransactionId) {
1480 self.history.merge_transactions(transaction, destination);
1481 }
1482
1483 pub fn redo(&mut self) -> Option<(TransactionId, Operation)> {
1484 if let Some(entry) = self.history.pop_redo() {
1485 let transaction = entry.transaction.clone();
1486 let transaction_id = transaction.id;
1487 let op = self.undo_or_redo(transaction);
1488 Some((transaction_id, op))
1489 } else {
1490 None
1491 }
1492 }
1493
1494 pub fn redo_to_transaction(&mut self, transaction_id: TransactionId) -> Vec<Operation> {
1495 let transactions = self
1496 .history
1497 .remove_from_redo(transaction_id)
1498 .iter()
1499 .map(|entry| entry.transaction.clone())
1500 .collect::<Vec<_>>();
1501
1502 transactions
1503 .into_iter()
1504 .map(|transaction| self.undo_or_redo(transaction))
1505 .collect()
1506 }
1507
1508 fn undo_or_redo(&mut self, transaction: Transaction) -> Operation {
1509 let mut counts = HashMap::default();
1510 for edit_id in transaction.edit_ids {
1511 counts.insert(edit_id, self.undo_map.undo_count(edit_id).saturating_add(1));
1512 }
1513
1514 let operation = self.undo_operations(counts);
1515 self.history.push(operation.clone());
1516 operation
1517 }
1518
1519 pub fn undo_operations(&mut self, counts: HashMap<clock::Lamport, u32>) -> Operation {
1520 let timestamp = self.lamport_clock.tick();
1521 let version = self.version();
1522 self.snapshot.version.observe(timestamp);
1523 let undo = UndoOperation {
1524 timestamp,
1525 version,
1526 counts,
1527 };
1528 self.apply_undo(&undo);
1529 Operation::Undo(undo)
1530 }
1531
1532 pub fn push_transaction(&mut self, transaction: Transaction, now: Instant) {
1533 self.history.push_transaction(transaction, now);
1534 }
1535
1536 /// Differs from `push_transaction` in that it does not clear the redo stack.
1537 /// The caller responsible for
1538 /// Differs from `push_transaction` in that it does not clear the redo
1539 /// stack. Intended to be used to create a parent transaction to merge
1540 /// potential child transactions into.
1541 ///
1542 /// The caller is responsible for removing it from the undo history using
1543 /// `forget_transaction` if no edits are merged into it. Otherwise, if edits
1544 /// are merged into this transaction, the caller is responsible for ensuring
1545 /// the redo stack is cleared. The easiest way to ensure the redo stack is
1546 /// cleared is to create transactions with the usual `start_transaction` and
1547 /// `end_transaction` methods and merging the resulting transactions into
1548 /// the transaction created by this method
1549 pub fn push_empty_transaction(&mut self, now: Instant) -> TransactionId {
1550 self.history
1551 .push_empty_transaction(self.version.clone(), now, &mut self.lamport_clock)
1552 }
1553
1554 pub fn edited_ranges_for_transaction_id<D>(
1555 &self,
1556 transaction_id: TransactionId,
1557 ) -> impl '_ + Iterator<Item = Range<D>>
1558 where
1559 D: TextDimension,
1560 {
1561 self.history
1562 .transaction(transaction_id)
1563 .into_iter()
1564 .flat_map(|transaction| self.edited_ranges_for_transaction(transaction))
1565 }
1566
1567 pub fn edited_ranges_for_edit_ids<'a, D>(
1568 &'a self,
1569 edit_ids: impl IntoIterator<Item = &'a clock::Lamport>,
1570 ) -> impl 'a + Iterator<Item = Range<D>>
1571 where
1572 D: TextDimension,
1573 {
1574 // get fragment ranges
1575 let mut cursor = self
1576 .fragments
1577 .cursor::<Dimensions<Option<&Locator>, usize>>(&None);
1578 let offset_ranges = self
1579 .fragment_ids_for_edits(edit_ids.into_iter())
1580 .into_iter()
1581 .filter_map(move |fragment_id| {
1582 cursor.seek_forward(&Some(fragment_id), Bias::Left);
1583 let fragment = cursor.item()?;
1584 let start_offset = cursor.start().1;
1585 let end_offset = start_offset + if fragment.visible { fragment.len } else { 0 };
1586 Some(start_offset..end_offset)
1587 });
1588
1589 // combine adjacent ranges
1590 let mut prev_range: Option<Range<usize>> = None;
1591 let disjoint_ranges = offset_ranges
1592 .map(Some)
1593 .chain([None])
1594 .filter_map(move |range| {
1595 if let Some((range, prev_range)) = range.as_ref().zip(prev_range.as_mut())
1596 && prev_range.end == range.start
1597 {
1598 prev_range.end = range.end;
1599 return None;
1600 }
1601 let result = prev_range.clone();
1602 prev_range = range;
1603 result
1604 });
1605
1606 // convert to the desired text dimension.
1607 let mut position = D::zero(());
1608 let mut rope_cursor = self.visible_text.cursor(0);
1609 disjoint_ranges.map(move |range| {
1610 position.add_assign(&rope_cursor.summary(range.start));
1611 let start = position;
1612 position.add_assign(&rope_cursor.summary(range.end));
1613 let end = position;
1614 start..end
1615 })
1616 }
1617
1618 pub fn edited_ranges_for_transaction<'a, D>(
1619 &'a self,
1620 transaction: &'a Transaction,
1621 ) -> impl 'a + Iterator<Item = Range<D>>
1622 where
1623 D: TextDimension,
1624 {
1625 self.edited_ranges_for_edit_ids(&transaction.edit_ids)
1626 }
1627
1628 pub fn subscribe(&mut self) -> Subscription<usize> {
1629 self.subscriptions.subscribe()
1630 }
1631
1632 pub fn wait_for_edits<It: IntoIterator<Item = clock::Lamport>>(
1633 &mut self,
1634 edit_ids: It,
1635 ) -> impl 'static + Future<Output = Result<()>> + use<It> {
1636 let mut futures = Vec::new();
1637 for edit_id in edit_ids {
1638 if !self.version.observed(edit_id) {
1639 let (tx, rx) = oneshot::channel();
1640 self.edit_id_resolvers.entry(edit_id).or_default().push(tx);
1641 futures.push(rx);
1642 }
1643 }
1644
1645 async move {
1646 for mut future in futures {
1647 if future.recv().await.is_none() {
1648 anyhow::bail!("gave up waiting for edits");
1649 }
1650 }
1651 Ok(())
1652 }
1653 }
1654
1655 pub fn wait_for_anchors<It: IntoIterator<Item = Anchor>>(
1656 &mut self,
1657 anchors: It,
1658 ) -> impl 'static + Future<Output = Result<()>> + use<It> {
1659 let mut futures = Vec::new();
1660 for anchor in anchors {
1661 if !self.version.observed(anchor.timestamp()) && !anchor.is_max() && !anchor.is_min() {
1662 let (tx, rx) = oneshot::channel();
1663 self.edit_id_resolvers
1664 .entry(anchor.timestamp())
1665 .or_default()
1666 .push(tx);
1667 futures.push(rx);
1668 }
1669 }
1670
1671 async move {
1672 for mut future in futures {
1673 if future.recv().await.is_none() {
1674 anyhow::bail!("gave up waiting for anchors");
1675 }
1676 }
1677 Ok(())
1678 }
1679 }
1680
1681 pub fn wait_for_version(
1682 &mut self,
1683 version: clock::Global,
1684 ) -> impl Future<Output = Result<()>> + use<> {
1685 let mut rx = None;
1686 if !self.snapshot.version.observed_all(&version) {
1687 let channel = oneshot::channel();
1688 self.wait_for_version_txs.push((version, channel.0));
1689 rx = Some(channel.1);
1690 }
1691 async move {
1692 if let Some(mut rx) = rx
1693 && rx.recv().await.is_none()
1694 {
1695 anyhow::bail!("gave up waiting for version");
1696 }
1697 Ok(())
1698 }
1699 }
1700
1701 pub fn give_up_waiting(&mut self) {
1702 self.edit_id_resolvers.clear();
1703 self.wait_for_version_txs.clear();
1704 }
1705
1706 fn resolve_edit(&mut self, edit_id: clock::Lamport) {
1707 for mut tx in self
1708 .edit_id_resolvers
1709 .remove(&edit_id)
1710 .into_iter()
1711 .flatten()
1712 {
1713 tx.try_send(()).ok();
1714 }
1715 }
1716}
1717
1718#[cfg(any(test, feature = "test-support"))]
1719impl Buffer {
1720 #[track_caller]
1721 pub fn edit_via_marked_text(&mut self, marked_string: &str) {
1722 let edits = self.edits_for_marked_text(marked_string);
1723 self.edit(edits);
1724 }
1725
1726 #[track_caller]
1727 pub fn edits_for_marked_text(&self, marked_string: &str) -> Vec<(Range<usize>, String)> {
1728 let old_text = self.text();
1729 let (new_text, mut ranges) = util::test::marked_text_ranges(marked_string, false);
1730 if ranges.is_empty() {
1731 ranges.push(0..new_text.len());
1732 }
1733
1734 assert_eq!(
1735 old_text[..ranges[0].start],
1736 new_text[..ranges[0].start],
1737 "invalid edit"
1738 );
1739
1740 let mut delta = 0;
1741 let mut edits = Vec::new();
1742 let mut ranges = ranges.into_iter().peekable();
1743
1744 while let Some(inserted_range) = ranges.next() {
1745 let new_start = inserted_range.start;
1746 let old_start = (new_start as isize - delta) as usize;
1747
1748 let following_text = if let Some(next_range) = ranges.peek() {
1749 &new_text[inserted_range.end..next_range.start]
1750 } else {
1751 &new_text[inserted_range.end..]
1752 };
1753
1754 let inserted_len = inserted_range.len();
1755 let deleted_len = old_text[old_start..]
1756 .find(following_text)
1757 .expect("invalid edit");
1758
1759 let old_range = old_start..old_start + deleted_len;
1760 edits.push((old_range, new_text[inserted_range].to_string()));
1761 delta += inserted_len as isize - deleted_len as isize;
1762 }
1763
1764 assert_eq!(
1765 old_text.len() as isize + delta,
1766 new_text.len() as isize,
1767 "invalid edit"
1768 );
1769
1770 edits
1771 }
1772
1773 pub fn check_invariants(&self) {
1774 // Ensure every fragment is ordered by locator in the fragment tree and corresponds
1775 // to an insertion fragment in the insertions tree.
1776 let mut prev_fragment_id = Locator::min();
1777 for fragment in self.snapshot.fragments.items(&None) {
1778 assert!(fragment.id > prev_fragment_id);
1779 prev_fragment_id = fragment.id.clone();
1780
1781 let insertion_fragment = self
1782 .snapshot
1783 .insertions
1784 .get(
1785 &InsertionFragmentKey {
1786 timestamp: fragment.timestamp,
1787 split_offset: fragment.insertion_offset,
1788 },
1789 (),
1790 )
1791 .unwrap();
1792 assert_eq!(
1793 insertion_fragment.fragment_id, fragment.id,
1794 "fragment: {:?}\ninsertion: {:?}",
1795 fragment, insertion_fragment
1796 );
1797 }
1798
1799 let mut cursor = self.snapshot.fragments.cursor::<Option<&Locator>>(&None);
1800 for insertion_fragment in self.snapshot.insertions.cursor::<()>(()) {
1801 cursor.seek(&Some(&insertion_fragment.fragment_id), Bias::Left);
1802 let fragment = cursor.item().unwrap();
1803 assert_eq!(insertion_fragment.fragment_id, fragment.id);
1804 assert_eq!(insertion_fragment.split_offset, fragment.insertion_offset);
1805 }
1806
1807 let fragment_summary = self.snapshot.fragments.summary();
1808 assert_eq!(
1809 fragment_summary.text.visible,
1810 self.snapshot.visible_text.len()
1811 );
1812 assert_eq!(
1813 fragment_summary.text.deleted,
1814 self.snapshot.deleted_text.len()
1815 );
1816
1817 assert!(!self.text().contains("\r\n"));
1818 }
1819
1820 pub fn set_group_interval(&mut self, group_interval: Duration) {
1821 self.history.group_interval = group_interval;
1822 }
1823
1824 pub fn random_byte_range(&self, start_offset: usize, rng: &mut impl rand::Rng) -> Range<usize> {
1825 let end = self.clip_offset(rng.random_range(start_offset..=self.len()), Bias::Right);
1826 let start = self.clip_offset(rng.random_range(start_offset..=end), Bias::Right);
1827 start..end
1828 }
1829
1830 pub fn get_random_edits<T>(
1831 &self,
1832 rng: &mut T,
1833 edit_count: usize,
1834 ) -> Vec<(Range<usize>, Arc<str>)>
1835 where
1836 T: rand::Rng,
1837 {
1838 let mut edits: Vec<(Range<usize>, Arc<str>)> = Vec::new();
1839 let mut last_end = None;
1840 for _ in 0..edit_count {
1841 if last_end.is_some_and(|last_end| last_end >= self.len()) {
1842 break;
1843 }
1844 let new_start = last_end.map_or(0, |last_end| last_end + 1);
1845 let range = self.random_byte_range(new_start, rng);
1846 last_end = Some(range.end);
1847
1848 let new_text_len = rng.random_range(0..10);
1849 let new_text: String = RandomCharIter::new(&mut *rng).take(new_text_len).collect();
1850
1851 edits.push((range, new_text.into()));
1852 }
1853 edits
1854 }
1855
1856 pub fn randomly_edit<T>(
1857 &mut self,
1858 rng: &mut T,
1859 edit_count: usize,
1860 ) -> (Vec<(Range<usize>, Arc<str>)>, Operation)
1861 where
1862 T: rand::Rng,
1863 {
1864 let mut edits = self.get_random_edits(rng, edit_count);
1865 log::info!("mutating buffer {:?} with {:?}", self.replica_id, edits);
1866
1867 let op = self.edit(edits.iter().cloned());
1868 if let Operation::Edit(edit) = &op {
1869 assert_eq!(edits.len(), edit.new_text.len());
1870 for (edit, new_text) in edits.iter_mut().zip(&edit.new_text) {
1871 edit.1 = new_text.clone();
1872 }
1873 } else {
1874 unreachable!()
1875 }
1876
1877 (edits, op)
1878 }
1879
1880 pub fn randomly_undo_redo(&mut self, rng: &mut impl rand::Rng) -> Vec<Operation> {
1881 use rand::prelude::*;
1882
1883 let mut ops = Vec::new();
1884 for _ in 0..rng.random_range(1..=5) {
1885 if let Some(entry) = self.history.undo_stack.choose(rng) {
1886 let transaction = entry.transaction.clone();
1887 log::info!(
1888 "undoing buffer {:?} transaction {:?}",
1889 self.replica_id,
1890 transaction
1891 );
1892 ops.push(self.undo_or_redo(transaction));
1893 }
1894 }
1895 ops
1896 }
1897}
1898
1899impl Deref for Buffer {
1900 type Target = BufferSnapshot;
1901
1902 fn deref(&self) -> &Self::Target {
1903 &self.snapshot
1904 }
1905}
1906
1907impl BufferSnapshot {
1908 pub fn as_rope(&self) -> &Rope {
1909 &self.visible_text
1910 }
1911
1912 pub fn rope_for_version(&self, version: &clock::Global) -> Rope {
1913 let mut rope = Rope::new();
1914
1915 let mut cursor = self
1916 .fragments
1917 .filter::<_, FragmentTextSummary>(&None, move |summary| {
1918 !version.observed_all(&summary.max_version)
1919 });
1920 cursor.next();
1921
1922 let mut visible_cursor = self.visible_text.cursor(0);
1923 let mut deleted_cursor = self.deleted_text.cursor(0);
1924
1925 while let Some(fragment) = cursor.item() {
1926 if cursor.start().visible > visible_cursor.offset() {
1927 let text = visible_cursor.slice(cursor.start().visible);
1928 rope.append(text);
1929 }
1930
1931 if fragment.was_visible(version, &self.undo_map) {
1932 if fragment.visible {
1933 let text = visible_cursor.slice(cursor.end().visible);
1934 rope.append(text);
1935 } else {
1936 deleted_cursor.seek_forward(cursor.start().deleted);
1937 let text = deleted_cursor.slice(cursor.end().deleted);
1938 rope.append(text);
1939 }
1940 } else if fragment.visible {
1941 visible_cursor.seek_forward(cursor.end().visible);
1942 }
1943
1944 cursor.next();
1945 }
1946
1947 if cursor.start().visible > visible_cursor.offset() {
1948 let text = visible_cursor.slice(cursor.start().visible);
1949 rope.append(text);
1950 }
1951
1952 rope
1953 }
1954
1955 pub fn remote_id(&self) -> BufferId {
1956 self.remote_id
1957 }
1958
1959 pub fn replica_id(&self) -> ReplicaId {
1960 self.replica_id
1961 }
1962
1963 pub fn row_count(&self) -> u32 {
1964 self.max_point().row + 1
1965 }
1966
1967 pub fn len(&self) -> usize {
1968 self.visible_text.len()
1969 }
1970
1971 pub fn is_empty(&self) -> bool {
1972 self.len() == 0
1973 }
1974
1975 pub fn chars(&self) -> impl Iterator<Item = char> + '_ {
1976 self.chars_at(0)
1977 }
1978
1979 pub fn chars_for_range<T: ToOffset>(&self, range: Range<T>) -> impl Iterator<Item = char> + '_ {
1980 self.text_for_range(range).flat_map(str::chars)
1981 }
1982
1983 pub fn reversed_chars_for_range<T: ToOffset>(
1984 &self,
1985 range: Range<T>,
1986 ) -> impl Iterator<Item = char> + '_ {
1987 self.reversed_chunks_in_range(range)
1988 .flat_map(|chunk| chunk.chars().rev())
1989 }
1990
1991 pub fn contains_str_at<T>(&self, position: T, needle: &str) -> bool
1992 where
1993 T: ToOffset,
1994 {
1995 let position = position.to_offset(self);
1996 position == self.clip_offset(position, Bias::Left)
1997 && self
1998 .bytes_in_range(position..self.len())
1999 .flatten()
2000 .copied()
2001 .take(needle.len())
2002 .eq(needle.bytes())
2003 }
2004
2005 pub fn common_prefix_at<T>(&self, position: T, needle: &str) -> Range<T>
2006 where
2007 T: ToOffset + TextDimension,
2008 {
2009 let offset = position.to_offset(self);
2010 let common_prefix_len = needle
2011 .char_indices()
2012 .map(|(index, _)| index)
2013 .chain([needle.len()])
2014 .take_while(|&len| len <= offset)
2015 .filter(|&len| {
2016 let left = self
2017 .chars_for_range(offset - len..offset)
2018 .flat_map(char::to_lowercase);
2019 let right = needle[..len].chars().flat_map(char::to_lowercase);
2020 left.eq(right)
2021 })
2022 .last()
2023 .unwrap_or(0);
2024 let start_offset = offset - common_prefix_len;
2025 let start = self.text_summary_for_range(0..start_offset);
2026 start..position
2027 }
2028
2029 pub fn text(&self) -> String {
2030 self.visible_text.to_string()
2031 }
2032
2033 pub fn line_ending(&self) -> LineEnding {
2034 self.line_ending
2035 }
2036
2037 pub fn deleted_text(&self) -> String {
2038 self.deleted_text.to_string()
2039 }
2040
2041 pub fn fragments(&self) -> impl Iterator<Item = &Fragment> {
2042 self.fragments.iter()
2043 }
2044
2045 pub fn text_summary(&self) -> TextSummary {
2046 self.visible_text.summary()
2047 }
2048
2049 pub fn max_point(&self) -> Point {
2050 self.visible_text.max_point()
2051 }
2052
2053 pub fn max_point_utf16(&self) -> PointUtf16 {
2054 self.visible_text.max_point_utf16()
2055 }
2056
2057 pub fn point_to_offset(&self, point: Point) -> usize {
2058 self.visible_text.point_to_offset(point)
2059 }
2060
2061 pub fn point_to_offset_utf16(&self, point: Point) -> OffsetUtf16 {
2062 self.visible_text.point_to_offset_utf16(point)
2063 }
2064
2065 pub fn point_utf16_to_offset_utf16(&self, point: PointUtf16) -> OffsetUtf16 {
2066 self.visible_text.point_utf16_to_offset_utf16(point)
2067 }
2068
2069 pub fn point_utf16_to_offset(&self, point: PointUtf16) -> usize {
2070 self.visible_text.point_utf16_to_offset(point)
2071 }
2072
2073 pub fn unclipped_point_utf16_to_offset(&self, point: Unclipped<PointUtf16>) -> usize {
2074 self.visible_text.unclipped_point_utf16_to_offset(point)
2075 }
2076
2077 pub fn unclipped_point_utf16_to_point(&self, point: Unclipped<PointUtf16>) -> Point {
2078 self.visible_text.unclipped_point_utf16_to_point(point)
2079 }
2080
2081 pub fn offset_utf16_to_offset(&self, offset: OffsetUtf16) -> usize {
2082 self.visible_text.offset_utf16_to_offset(offset)
2083 }
2084
2085 pub fn offset_to_offset_utf16(&self, offset: usize) -> OffsetUtf16 {
2086 self.visible_text.offset_to_offset_utf16(offset)
2087 }
2088
2089 pub fn offset_to_point(&self, offset: usize) -> Point {
2090 self.visible_text.offset_to_point(offset)
2091 }
2092
2093 pub fn offset_to_point_utf16(&self, offset: usize) -> PointUtf16 {
2094 self.visible_text.offset_to_point_utf16(offset)
2095 }
2096
2097 pub fn point_to_point_utf16(&self, point: Point) -> PointUtf16 {
2098 self.visible_text.point_to_point_utf16(point)
2099 }
2100
2101 pub fn point_utf16_to_point(&self, point: PointUtf16) -> Point {
2102 self.visible_text.point_utf16_to_point(point)
2103 }
2104
2105 pub fn version(&self) -> &clock::Global {
2106 &self.version
2107 }
2108
2109 pub fn chars_at<T: ToOffset>(&self, position: T) -> impl Iterator<Item = char> + '_ {
2110 let offset = position.to_offset(self);
2111 self.visible_text.chars_at(offset)
2112 }
2113
2114 pub fn reversed_chars_at<T: ToOffset>(&self, position: T) -> impl Iterator<Item = char> + '_ {
2115 let offset = position.to_offset(self);
2116 self.visible_text.reversed_chars_at(offset)
2117 }
2118
2119 pub fn reversed_chunks_in_range<T: ToOffset>(&self, range: Range<T>) -> rope::Chunks<'_> {
2120 let range = range.start.to_offset(self)..range.end.to_offset(self);
2121 self.visible_text.reversed_chunks_in_range(range)
2122 }
2123
2124 pub fn bytes_in_range<T: ToOffset>(&self, range: Range<T>) -> rope::Bytes<'_> {
2125 let start = range.start.to_offset(self);
2126 let end = range.end.to_offset(self);
2127 self.visible_text.bytes_in_range(start..end)
2128 }
2129
2130 pub fn reversed_bytes_in_range<T: ToOffset>(&self, range: Range<T>) -> rope::Bytes<'_> {
2131 let start = range.start.to_offset(self);
2132 let end = range.end.to_offset(self);
2133 self.visible_text.reversed_bytes_in_range(start..end)
2134 }
2135
2136 pub fn text_for_range<T: ToOffset>(&self, range: Range<T>) -> Chunks<'_> {
2137 let start = range.start.to_offset(self);
2138 let end = range.end.to_offset(self);
2139 self.visible_text.chunks_in_range(start..end)
2140 }
2141
2142 pub fn line_len(&self, row: u32) -> u32 {
2143 let row_start_offset = Point::new(row, 0).to_offset(self);
2144 let row_end_offset = if row >= self.max_point().row {
2145 self.len()
2146 } else {
2147 Point::new(row + 1, 0).to_previous_offset(self)
2148 };
2149 (row_end_offset - row_start_offset) as u32
2150 }
2151
2152 pub fn line_indents_in_row_range(
2153 &self,
2154 row_range: Range<u32>,
2155 ) -> impl Iterator<Item = (u32, LineIndent)> + '_ {
2156 let start = Point::new(row_range.start, 0).to_offset(self);
2157 let end = Point::new(row_range.end, self.line_len(row_range.end)).to_offset(self);
2158
2159 let mut chunks = self.as_rope().chunks_in_range(start..end);
2160 let mut row = row_range.start;
2161 let mut done = false;
2162 std::iter::from_fn(move || {
2163 if done {
2164 None
2165 } else {
2166 let indent = (row, LineIndent::from_chunks(&mut chunks));
2167 done = !chunks.next_line();
2168 row += 1;
2169 Some(indent)
2170 }
2171 })
2172 }
2173
2174 /// Returns the line indents in the given row range, exclusive of end row, in reversed order.
2175 pub fn reversed_line_indents_in_row_range(
2176 &self,
2177 row_range: Range<u32>,
2178 ) -> impl Iterator<Item = (u32, LineIndent)> + '_ {
2179 let start = Point::new(row_range.start, 0).to_offset(self);
2180
2181 let end_point;
2182 let end;
2183 if row_range.end > row_range.start {
2184 end_point = Point::new(row_range.end - 1, self.line_len(row_range.end - 1));
2185 end = end_point.to_offset(self);
2186 } else {
2187 end_point = Point::new(row_range.start, 0);
2188 end = start;
2189 };
2190
2191 let mut chunks = self.as_rope().chunks_in_range(start..end);
2192 // Move the cursor to the start of the last line if it's not empty.
2193 chunks.seek(end);
2194 if end_point.column > 0 {
2195 chunks.prev_line();
2196 }
2197
2198 let mut row = end_point.row;
2199 let mut done = false;
2200 std::iter::from_fn(move || {
2201 if done {
2202 None
2203 } else {
2204 let initial_offset = chunks.offset();
2205 let indent = (row, LineIndent::from_chunks(&mut chunks));
2206 if chunks.offset() > initial_offset {
2207 chunks.prev_line();
2208 }
2209 done = !chunks.prev_line();
2210 if !done {
2211 row -= 1;
2212 }
2213
2214 Some(indent)
2215 }
2216 })
2217 }
2218
2219 pub fn line_indent_for_row(&self, row: u32) -> LineIndent {
2220 LineIndent::from_iter(self.chars_at(Point::new(row, 0)))
2221 }
2222
2223 pub fn is_line_blank(&self, row: u32) -> bool {
2224 self.text_for_range(Point::new(row, 0)..Point::new(row, self.line_len(row)))
2225 .all(|chunk| chunk.matches(|c: char| !c.is_whitespace()).next().is_none())
2226 }
2227
2228 pub fn text_summary_for_range<D, O: ToOffset>(&self, range: Range<O>) -> D
2229 where
2230 D: TextDimension,
2231 {
2232 self.visible_text
2233 .cursor(range.start.to_offset(self))
2234 .summary(range.end.to_offset(self))
2235 }
2236
2237 pub fn summaries_for_anchors<'a, D, A>(&'a self, anchors: A) -> impl 'a + Iterator<Item = D>
2238 where
2239 D: 'a + TextDimension,
2240 A: 'a + IntoIterator<Item = &'a Anchor>,
2241 {
2242 let anchors = anchors.into_iter();
2243 self.summaries_for_anchors_with_payload::<D, _, ()>(anchors.map(|a| (a, ())))
2244 .map(|d| d.0)
2245 }
2246
2247 pub fn summaries_for_anchors_with_payload<'a, D, A, T>(
2248 &'a self,
2249 anchors: A,
2250 ) -> impl 'a + Iterator<Item = (D, T)>
2251 where
2252 D: 'a + TextDimension,
2253 A: 'a + IntoIterator<Item = (&'a Anchor, T)>,
2254 {
2255 let anchors = anchors.into_iter();
2256 let mut fragment_cursor = self
2257 .fragments
2258 .cursor::<Dimensions<Option<&Locator>, usize>>(&None);
2259 let mut text_cursor = self.visible_text.cursor(0);
2260 let mut position = D::zero(());
2261
2262 anchors.map(move |(anchor, payload)| {
2263 if anchor.is_min() {
2264 return (D::zero(()), payload);
2265 } else if anchor.is_max() {
2266 return (D::from_text_summary(&self.visible_text.summary()), payload);
2267 }
2268
2269 let Some(insertion) = self.try_find_fragment(anchor) else {
2270 panic!(
2271 "invalid insertion for buffer {}@{:?} with anchor {:?}",
2272 self.remote_id(),
2273 self.version,
2274 anchor
2275 );
2276 };
2277 assert_eq!(
2278 insertion.timestamp,
2279 anchor.timestamp(),
2280 "invalid insertion for buffer {}@{:?} and anchor {:?}",
2281 self.remote_id(),
2282 self.version,
2283 anchor
2284 );
2285
2286 fragment_cursor.seek_forward(&Some(&insertion.fragment_id), Bias::Left);
2287 let fragment = fragment_cursor.item().unwrap();
2288 let mut fragment_offset = fragment_cursor.start().1;
2289 if fragment.visible {
2290 fragment_offset += anchor.offset - insertion.split_offset;
2291 }
2292
2293 position.add_assign(&text_cursor.summary(fragment_offset));
2294 (position, payload)
2295 })
2296 }
2297
2298 pub fn summary_for_anchor<D>(&self, anchor: &Anchor) -> D
2299 where
2300 D: TextDimension,
2301 {
2302 self.text_summary_for_range(0..self.offset_for_anchor(anchor))
2303 }
2304
2305 pub fn offset_for_anchor(&self, anchor: &Anchor) -> usize {
2306 if anchor.is_min() {
2307 0
2308 } else if anchor.is_max() {
2309 self.visible_text.len()
2310 } else {
2311 debug_assert_eq!(anchor.buffer_id, Some(self.remote_id));
2312 debug_assert!(
2313 self.version.observed(anchor.timestamp()),
2314 "Anchor timestamp {:?} not observed by buffer {:?}",
2315 anchor.timestamp(),
2316 self.version
2317 );
2318 let item = self.try_find_fragment(anchor);
2319 let Some(insertion) =
2320 item.filter(|insertion| insertion.timestamp == anchor.timestamp())
2321 else {
2322 self.panic_bad_anchor(anchor);
2323 };
2324
2325 let (start, _, item) = self
2326 .fragments
2327 .find::<Dimensions<Option<&Locator>, usize>, _>(
2328 &None,
2329 &Some(&insertion.fragment_id),
2330 Bias::Left,
2331 );
2332 let fragment = item.unwrap();
2333 let mut fragment_offset = start.1;
2334 if fragment.visible {
2335 fragment_offset += anchor.offset - insertion.split_offset;
2336 }
2337 fragment_offset
2338 }
2339 }
2340
2341 #[cold]
2342 fn panic_bad_anchor(&self, anchor: &Anchor) -> ! {
2343 if anchor.buffer_id.is_some_and(|id| id != self.remote_id) {
2344 panic!(
2345 "invalid anchor - buffer id does not match: anchor {anchor:?}; buffer id: {}, version: {:?}",
2346 self.remote_id, self.version
2347 );
2348 } else if !self.version.observed(anchor.timestamp()) {
2349 panic!(
2350 "invalid anchor - snapshot has not observed lamport: {:?}; version: {:?}",
2351 anchor, self.version
2352 );
2353 } else {
2354 panic!(
2355 "invalid anchor {:?}. buffer id: {}, version: {:?}",
2356 anchor, self.remote_id, self.version
2357 );
2358 }
2359 }
2360
2361 fn fragment_id_for_anchor(&self, anchor: &Anchor) -> &Locator {
2362 self.try_fragment_id_for_anchor(anchor)
2363 .unwrap_or_else(|| self.panic_bad_anchor(anchor))
2364 }
2365
2366 fn try_fragment_id_for_anchor(&self, anchor: &Anchor) -> Option<&Locator> {
2367 if anchor.is_min() {
2368 Some(Locator::min_ref())
2369 } else if anchor.is_max() {
2370 Some(Locator::max_ref())
2371 } else {
2372 let item = self.try_find_fragment(anchor);
2373 item.filter(|insertion| {
2374 !cfg!(debug_assertions) || insertion.timestamp == anchor.timestamp()
2375 })
2376 .map(|insertion| &insertion.fragment_id)
2377 }
2378 }
2379
2380 fn try_find_fragment(&self, anchor: &Anchor) -> Option<&InsertionFragment> {
2381 let anchor_key = InsertionFragmentKey {
2382 timestamp: anchor.timestamp(),
2383 split_offset: anchor.offset,
2384 };
2385 match self.insertions.find_with_prev::<InsertionFragmentKey, _>(
2386 (),
2387 &anchor_key,
2388 anchor.bias,
2389 ) {
2390 (_, _, Some((prev, insertion))) => {
2391 let comparison = sum_tree::KeyedItem::key(insertion).cmp(&anchor_key);
2392 if comparison == Ordering::Greater
2393 || (anchor.bias == Bias::Left
2394 && comparison == Ordering::Equal
2395 && anchor.offset > 0)
2396 {
2397 prev
2398 } else {
2399 Some(insertion)
2400 }
2401 }
2402 _ => self.insertions.last(),
2403 }
2404 }
2405
2406 /// Returns an anchor range for the given input position range that is anchored to the text inbetween.
2407 pub fn anchor_range_around<T: ToOffset>(&self, position: Range<T>) -> Range<Anchor> {
2408 self.anchor_before(position.start)..self.anchor_after(position.end)
2409 }
2410
2411 /// Returns an anchor range for the given input position range that is anchored to the text before the start position and after the end position.
2412 pub fn anchor_range_between<T: ToOffset>(&self, position: Range<T>) -> Range<Anchor> {
2413 self.anchor_before(position.start)..self.anchor_after(position.end)
2414 }
2415
2416 /// Returns an anchor for the given input position that is anchored to the text before the position.
2417 pub fn anchor_before<T: ToOffset>(&self, position: T) -> Anchor {
2418 self.anchor_at(position, Bias::Left)
2419 }
2420
2421 /// Returns an anchor for the given input position that is anchored to the text after the position.
2422 pub fn anchor_after<T: ToOffset>(&self, position: T) -> Anchor {
2423 self.anchor_at(position, Bias::Right)
2424 }
2425
2426 pub fn anchor_at<T: ToOffset>(&self, position: T, bias: Bias) -> Anchor {
2427 self.anchor_at_offset(position.to_offset(self), bias)
2428 }
2429
2430 fn anchor_at_offset(&self, mut offset: usize, bias: Bias) -> Anchor {
2431 if bias == Bias::Left && offset == 0 {
2432 Anchor::min_for_buffer(self.remote_id)
2433 } else if bias == Bias::Right
2434 && ((!cfg!(debug_assertions) && offset >= self.len()) || offset == self.len())
2435 {
2436 Anchor::max_for_buffer(self.remote_id)
2437 } else {
2438 if !self
2439 .visible_text
2440 .assert_char_boundary::<{ cfg!(debug_assertions) }>(offset)
2441 {
2442 offset = match bias {
2443 Bias::Left => self.visible_text.floor_char_boundary(offset),
2444 Bias::Right => self.visible_text.ceil_char_boundary(offset),
2445 };
2446 }
2447 let (start, _, item) = self.fragments.find::<usize, _>(&None, &offset, bias);
2448 let Some(fragment) = item else {
2449 // We got a bad offset, likely out of bounds
2450 debug_panic!(
2451 "Failed to find fragment at offset {} (len: {})",
2452 offset,
2453 self.len()
2454 );
2455 return Anchor::max_for_buffer(self.remote_id);
2456 };
2457 let overshoot = offset - start;
2458 Anchor::new(
2459 fragment.timestamp,
2460 fragment.insertion_offset + overshoot,
2461 bias,
2462 Some(self.remote_id),
2463 )
2464 }
2465 }
2466
2467 pub fn can_resolve(&self, anchor: &Anchor) -> bool {
2468 anchor.is_min()
2469 || anchor.is_max()
2470 || (Some(self.remote_id) == anchor.buffer_id
2471 && self.version.observed(anchor.timestamp()))
2472 }
2473
2474 pub fn clip_offset(&self, offset: usize, bias: Bias) -> usize {
2475 self.visible_text.clip_offset(offset, bias)
2476 }
2477
2478 pub fn clip_point(&self, point: Point, bias: Bias) -> Point {
2479 self.visible_text.clip_point(point, bias)
2480 }
2481
2482 pub fn clip_offset_utf16(&self, offset: OffsetUtf16, bias: Bias) -> OffsetUtf16 {
2483 self.visible_text.clip_offset_utf16(offset, bias)
2484 }
2485
2486 pub fn clip_point_utf16(&self, point: Unclipped<PointUtf16>, bias: Bias) -> PointUtf16 {
2487 self.visible_text.clip_point_utf16(point, bias)
2488 }
2489
2490 pub fn edits_since<'a, D>(
2491 &'a self,
2492 since: &'a clock::Global,
2493 ) -> impl 'a + Iterator<Item = Edit<D>>
2494 where
2495 D: TextDimension + Ord,
2496 {
2497 self.edits_since_in_range(since, Anchor::MIN..Anchor::MAX)
2498 }
2499
2500 pub fn anchored_edits_since<'a, D>(
2501 &'a self,
2502 since: &'a clock::Global,
2503 ) -> impl 'a + Iterator<Item = (Edit<D>, Range<Anchor>)>
2504 where
2505 D: TextDimension + Ord,
2506 {
2507 self.anchored_edits_since_in_range(since, Anchor::MIN..Anchor::MAX)
2508 }
2509
2510 pub fn edits_since_in_range<'a, D>(
2511 &'a self,
2512 since: &'a clock::Global,
2513 range: Range<Anchor>,
2514 ) -> impl 'a + Iterator<Item = Edit<D>>
2515 where
2516 D: TextDimension + Ord,
2517 {
2518 self.anchored_edits_since_in_range(since, range)
2519 .map(|item| item.0)
2520 }
2521
2522 pub fn anchored_edits_since_in_range<'a, D>(
2523 &'a self,
2524 since: &'a clock::Global,
2525 range: Range<Anchor>,
2526 ) -> impl 'a + Iterator<Item = (Edit<D>, Range<Anchor>)>
2527 where
2528 D: TextDimension + Ord,
2529 {
2530 if *since == self.version {
2531 return None.into_iter().flatten();
2532 }
2533 let mut cursor = self.fragments.filter(&None, move |summary| {
2534 !since.observed_all(&summary.max_version)
2535 });
2536 cursor.next();
2537 let fragments_cursor = Some(cursor);
2538 let start_fragment_id = self.fragment_id_for_anchor(&range.start);
2539 let (start, _, item) = self
2540 .fragments
2541 .find::<Dimensions<Option<&Locator>, FragmentTextSummary>, _>(
2542 &None,
2543 &Some(start_fragment_id),
2544 Bias::Left,
2545 );
2546 let mut visible_start = start.1.visible;
2547 let mut deleted_start = start.1.deleted;
2548 if let Some(fragment) = item {
2549 let overshoot = range.start.offset - fragment.insertion_offset;
2550 if fragment.visible {
2551 visible_start += overshoot;
2552 } else {
2553 deleted_start += overshoot;
2554 }
2555 }
2556 let end_fragment_id = self.fragment_id_for_anchor(&range.end);
2557
2558 Some(Edits {
2559 visible_cursor: self.visible_text.cursor(visible_start),
2560 deleted_cursor: self.deleted_text.cursor(deleted_start),
2561 fragments_cursor,
2562 undos: &self.undo_map,
2563 since,
2564 old_end: D::zero(()),
2565 new_end: D::zero(()),
2566 range: (start_fragment_id, range.start.offset)..(end_fragment_id, range.end.offset),
2567 buffer_id: self.remote_id,
2568 })
2569 .into_iter()
2570 .flatten()
2571 }
2572
2573 pub fn has_edits_since_in_range(&self, since: &clock::Global, range: Range<Anchor>) -> bool {
2574 if *since != self.version {
2575 let start_fragment_id = self.fragment_id_for_anchor(&range.start);
2576 let end_fragment_id = self.fragment_id_for_anchor(&range.end);
2577 let mut cursor = self.fragments.filter::<_, usize>(&None, move |summary| {
2578 !since.observed_all(&summary.max_version)
2579 });
2580 cursor.next();
2581 while let Some(fragment) = cursor.item() {
2582 if fragment.id > *end_fragment_id {
2583 break;
2584 }
2585 if fragment.id > *start_fragment_id {
2586 let was_visible = fragment.was_visible(since, &self.undo_map);
2587 let is_visible = fragment.visible;
2588 if was_visible != is_visible {
2589 return true;
2590 }
2591 }
2592 cursor.next();
2593 }
2594 }
2595 false
2596 }
2597
2598 pub fn has_edits_since(&self, since: &clock::Global) -> bool {
2599 if *since != self.version {
2600 let mut cursor = self.fragments.filter::<_, usize>(&None, move |summary| {
2601 !since.observed_all(&summary.max_version)
2602 });
2603 cursor.next();
2604 while let Some(fragment) = cursor.item() {
2605 let was_visible = fragment.was_visible(since, &self.undo_map);
2606 let is_visible = fragment.visible;
2607 if was_visible != is_visible {
2608 return true;
2609 }
2610 cursor.next();
2611 }
2612 }
2613 false
2614 }
2615
2616 pub fn range_to_version(&self, range: Range<usize>, version: &clock::Global) -> Range<usize> {
2617 let mut offsets = self.offsets_to_version([range.start, range.end], version);
2618 offsets.next().unwrap()..offsets.next().unwrap()
2619 }
2620
2621 /// Converts the given sequence of offsets into their corresponding offsets
2622 /// at a prior version of this buffer.
2623 pub fn offsets_to_version<'a>(
2624 &'a self,
2625 offsets: impl 'a + IntoIterator<Item = usize>,
2626 version: &'a clock::Global,
2627 ) -> impl 'a + Iterator<Item = usize> {
2628 let mut edits = self.edits_since(version).peekable();
2629 let mut last_old_end = 0;
2630 let mut last_new_end = 0;
2631 offsets.into_iter().map(move |new_offset| {
2632 while let Some(edit) = edits.peek() {
2633 if edit.new.start > new_offset {
2634 break;
2635 }
2636
2637 if edit.new.end <= new_offset {
2638 last_new_end = edit.new.end;
2639 last_old_end = edit.old.end;
2640 edits.next();
2641 continue;
2642 }
2643
2644 let overshoot = new_offset - edit.new.start;
2645 return (edit.old.start + overshoot).min(edit.old.end);
2646 }
2647
2648 last_old_end + new_offset.saturating_sub(last_new_end)
2649 })
2650 }
2651
2652 /// Visually annotates a position or range with the `Debug` representation of a value. The
2653 /// callsite of this function is used as a key - previous annotations will be removed.
2654 #[cfg(debug_assertions)]
2655 #[track_caller]
2656 pub fn debug<R, V>(&self, ranges: &R, value: V)
2657 where
2658 R: debug::ToDebugRanges,
2659 V: std::fmt::Debug,
2660 {
2661 self.debug_with_key(std::panic::Location::caller(), ranges, value);
2662 }
2663
2664 /// Visually annotates a position or range with the `Debug` representation of a value. Previous
2665 /// debug annotations with the same key will be removed. The key is also used to determine the
2666 /// annotation's color.
2667 #[cfg(debug_assertions)]
2668 pub fn debug_with_key<K, R, V>(&self, key: &K, ranges: &R, value: V)
2669 where
2670 K: std::hash::Hash + 'static,
2671 R: debug::ToDebugRanges,
2672 V: std::fmt::Debug,
2673 {
2674 let ranges = ranges
2675 .to_debug_ranges(self)
2676 .into_iter()
2677 .map(|range| self.anchor_after(range.start)..self.anchor_before(range.end))
2678 .collect();
2679 debug::GlobalDebugRanges::with_locked(|debug_ranges| {
2680 debug_ranges.insert(key, ranges, format!("{value:?}").into());
2681 });
2682 }
2683}
2684
2685struct RopeBuilder<'a> {
2686 old_visible_cursor: rope::Cursor<'a>,
2687 old_deleted_cursor: rope::Cursor<'a>,
2688 new_visible: Rope,
2689 new_deleted: Rope,
2690}
2691
2692impl<'a> RopeBuilder<'a> {
2693 fn new(old_visible_cursor: rope::Cursor<'a>, old_deleted_cursor: rope::Cursor<'a>) -> Self {
2694 Self {
2695 old_visible_cursor,
2696 old_deleted_cursor,
2697 new_visible: Rope::new(),
2698 new_deleted: Rope::new(),
2699 }
2700 }
2701
2702 fn append(&mut self, len: FragmentTextSummary) {
2703 self.push(len.visible, true, true);
2704 self.push(len.deleted, false, false);
2705 }
2706
2707 fn push_fragment(&mut self, fragment: &Fragment, was_visible: bool) {
2708 debug_assert!(fragment.len > 0);
2709 self.push(fragment.len, was_visible, fragment.visible)
2710 }
2711
2712 fn push(&mut self, len: usize, was_visible: bool, is_visible: bool) {
2713 let text = if was_visible {
2714 self.old_visible_cursor
2715 .slice(self.old_visible_cursor.offset() + len)
2716 } else {
2717 self.old_deleted_cursor
2718 .slice(self.old_deleted_cursor.offset() + len)
2719 };
2720 if is_visible {
2721 self.new_visible.append(text);
2722 } else {
2723 self.new_deleted.append(text);
2724 }
2725 }
2726
2727 fn push_str(&mut self, text: &str) {
2728 self.new_visible.push(text);
2729 }
2730
2731 fn finish(mut self) -> (Rope, Rope) {
2732 self.new_visible.append(self.old_visible_cursor.suffix());
2733 self.new_deleted.append(self.old_deleted_cursor.suffix());
2734 (self.new_visible, self.new_deleted)
2735 }
2736}
2737
2738impl<D: TextDimension + Ord, F: FnMut(&FragmentSummary) -> bool> Iterator for Edits<'_, D, F> {
2739 type Item = (Edit<D>, Range<Anchor>);
2740
2741 fn next(&mut self) -> Option<Self::Item> {
2742 let mut pending_edit: Option<Self::Item> = None;
2743 let cursor = self.fragments_cursor.as_mut()?;
2744
2745 while let Some(fragment) = cursor.item() {
2746 if fragment.id < *self.range.start.0 {
2747 cursor.next();
2748 continue;
2749 } else if fragment.id > *self.range.end.0 {
2750 break;
2751 }
2752
2753 if cursor.start().visible > self.visible_cursor.offset() {
2754 let summary = self.visible_cursor.summary(cursor.start().visible);
2755 self.old_end.add_assign(&summary);
2756 self.new_end.add_assign(&summary);
2757 }
2758
2759 if pending_edit
2760 .as_ref()
2761 .is_some_and(|(change, _)| change.new.end < self.new_end)
2762 {
2763 break;
2764 }
2765
2766 let start_anchor = Anchor::new(
2767 fragment.timestamp,
2768 fragment.insertion_offset,
2769 Bias::Right,
2770 Some(self.buffer_id),
2771 );
2772 let end_anchor = Anchor::new(
2773 fragment.timestamp,
2774 fragment.insertion_offset + fragment.len,
2775 Bias::Left,
2776 Some(self.buffer_id),
2777 );
2778
2779 if !fragment.was_visible(self.since, self.undos) && fragment.visible {
2780 let mut visible_end = cursor.end().visible;
2781 if fragment.id == *self.range.end.0 {
2782 visible_end = cmp::min(
2783 visible_end,
2784 cursor.start().visible + (self.range.end.1 - fragment.insertion_offset),
2785 );
2786 }
2787
2788 let fragment_summary = self.visible_cursor.summary(visible_end);
2789 let mut new_end = self.new_end;
2790 new_end.add_assign(&fragment_summary);
2791 if let Some((edit, range)) = pending_edit.as_mut() {
2792 edit.new.end = new_end;
2793 range.end = end_anchor;
2794 } else {
2795 pending_edit = Some((
2796 Edit {
2797 old: self.old_end..self.old_end,
2798 new: self.new_end..new_end,
2799 },
2800 start_anchor..end_anchor,
2801 ));
2802 }
2803
2804 self.new_end = new_end;
2805 } else if fragment.was_visible(self.since, self.undos) && !fragment.visible {
2806 let mut deleted_end = cursor.end().deleted;
2807 if fragment.id == *self.range.end.0 {
2808 deleted_end = cmp::min(
2809 deleted_end,
2810 cursor.start().deleted + (self.range.end.1 - fragment.insertion_offset),
2811 );
2812 }
2813
2814 if cursor.start().deleted > self.deleted_cursor.offset() {
2815 self.deleted_cursor.seek_forward(cursor.start().deleted);
2816 }
2817 let fragment_summary = self.deleted_cursor.summary(deleted_end);
2818 let mut old_end = self.old_end;
2819 old_end.add_assign(&fragment_summary);
2820 if let Some((edit, range)) = pending_edit.as_mut() {
2821 edit.old.end = old_end;
2822 range.end = end_anchor;
2823 } else {
2824 pending_edit = Some((
2825 Edit {
2826 old: self.old_end..old_end,
2827 new: self.new_end..self.new_end,
2828 },
2829 start_anchor..end_anchor,
2830 ));
2831 }
2832
2833 self.old_end = old_end;
2834 }
2835
2836 cursor.next();
2837 }
2838
2839 pending_edit
2840 }
2841}
2842
2843impl Fragment {
2844 fn is_visible(&self, undos: &UndoMap) -> bool {
2845 !undos.is_undone(self.timestamp) && self.deletions.iter().all(|d| undos.is_undone(*d))
2846 }
2847
2848 fn was_visible(&self, version: &clock::Global, undos: &UndoMap) -> bool {
2849 (version.observed(self.timestamp) && !undos.was_undone(self.timestamp, version))
2850 && self
2851 .deletions
2852 .iter()
2853 .all(|d| !version.observed(*d) || undos.was_undone(*d, version))
2854 }
2855}
2856
2857impl sum_tree::Item for Fragment {
2858 type Summary = FragmentSummary;
2859
2860 fn summary(&self, _cx: &Option<clock::Global>) -> Self::Summary {
2861 let mut max_version = clock::Global::new();
2862 max_version.observe(self.timestamp);
2863 for deletion in &self.deletions {
2864 max_version.observe(*deletion);
2865 }
2866 max_version.join(&self.max_undos);
2867
2868 let mut min_insertion_version = clock::Global::new();
2869 min_insertion_version.observe(self.timestamp);
2870 let max_insertion_version = min_insertion_version.clone();
2871 if self.visible {
2872 FragmentSummary {
2873 max_id: self.id.clone(),
2874 text: FragmentTextSummary {
2875 visible: self.len,
2876 deleted: 0,
2877 },
2878 max_version,
2879 min_insertion_version,
2880 max_insertion_version,
2881 }
2882 } else {
2883 FragmentSummary {
2884 max_id: self.id.clone(),
2885 text: FragmentTextSummary {
2886 visible: 0,
2887 deleted: self.len,
2888 },
2889 max_version,
2890 min_insertion_version,
2891 max_insertion_version,
2892 }
2893 }
2894 }
2895}
2896
2897impl sum_tree::Summary for FragmentSummary {
2898 type Context<'a> = &'a Option<clock::Global>;
2899
2900 fn zero(_cx: Self::Context<'_>) -> Self {
2901 Default::default()
2902 }
2903
2904 fn add_summary(&mut self, other: &Self, _: Self::Context<'_>) {
2905 self.max_id.assign(&other.max_id);
2906 self.text.visible += &other.text.visible;
2907 self.text.deleted += &other.text.deleted;
2908 self.max_version.join(&other.max_version);
2909 self.min_insertion_version
2910 .meet(&other.min_insertion_version);
2911 self.max_insertion_version
2912 .join(&other.max_insertion_version);
2913 }
2914}
2915
2916impl Default for FragmentSummary {
2917 fn default() -> Self {
2918 FragmentSummary {
2919 max_id: Locator::min(),
2920 text: FragmentTextSummary::default(),
2921 max_version: clock::Global::new(),
2922 min_insertion_version: clock::Global::new(),
2923 max_insertion_version: clock::Global::new(),
2924 }
2925 }
2926}
2927
2928impl sum_tree::Item for InsertionFragment {
2929 type Summary = InsertionFragmentKey;
2930
2931 fn summary(&self, _cx: ()) -> Self::Summary {
2932 InsertionFragmentKey {
2933 timestamp: self.timestamp,
2934 split_offset: self.split_offset,
2935 }
2936 }
2937}
2938
2939impl sum_tree::KeyedItem for InsertionFragment {
2940 type Key = InsertionFragmentKey;
2941
2942 fn key(&self) -> Self::Key {
2943 sum_tree::Item::summary(self, ())
2944 }
2945}
2946
2947impl InsertionFragment {
2948 fn new(fragment: &Fragment) -> Self {
2949 Self {
2950 timestamp: fragment.timestamp,
2951 split_offset: fragment.insertion_offset,
2952 fragment_id: fragment.id.clone(),
2953 }
2954 }
2955
2956 fn insert_new(fragment: &Fragment) -> sum_tree::Edit<Self> {
2957 sum_tree::Edit::Insert(Self::new(fragment))
2958 }
2959}
2960
2961impl sum_tree::ContextLessSummary for InsertionFragmentKey {
2962 fn zero() -> Self {
2963 InsertionFragmentKey {
2964 timestamp: Lamport::MIN,
2965 split_offset: 0,
2966 }
2967 }
2968
2969 fn add_summary(&mut self, summary: &Self) {
2970 *self = *summary;
2971 }
2972}
2973
2974#[derive(Copy, Clone, Debug, Default, PartialEq, Eq, PartialOrd, Ord, Hash)]
2975pub struct FullOffset(pub usize);
2976
2977impl ops::AddAssign<usize> for FullOffset {
2978 fn add_assign(&mut self, rhs: usize) {
2979 self.0 += rhs;
2980 }
2981}
2982
2983impl ops::Add<usize> for FullOffset {
2984 type Output = Self;
2985
2986 fn add(mut self, rhs: usize) -> Self::Output {
2987 self += rhs;
2988 self
2989 }
2990}
2991
2992impl ops::Sub for FullOffset {
2993 type Output = usize;
2994
2995 fn sub(self, rhs: Self) -> Self::Output {
2996 self.0 - rhs.0
2997 }
2998}
2999
3000impl sum_tree::Dimension<'_, FragmentSummary> for usize {
3001 fn zero(_: &Option<clock::Global>) -> Self {
3002 Default::default()
3003 }
3004
3005 fn add_summary(&mut self, summary: &FragmentSummary, _: &Option<clock::Global>) {
3006 *self += summary.text.visible;
3007 }
3008}
3009
3010impl sum_tree::Dimension<'_, FragmentSummary> for FullOffset {
3011 fn zero(_: &Option<clock::Global>) -> Self {
3012 Default::default()
3013 }
3014
3015 fn add_summary(&mut self, summary: &FragmentSummary, _: &Option<clock::Global>) {
3016 self.0 += summary.text.visible + summary.text.deleted;
3017 }
3018}
3019
3020impl<'a> sum_tree::Dimension<'a, FragmentSummary> for Option<&'a Locator> {
3021 fn zero(_: &Option<clock::Global>) -> Self {
3022 Default::default()
3023 }
3024
3025 fn add_summary(&mut self, summary: &'a FragmentSummary, _: &Option<clock::Global>) {
3026 *self = Some(&summary.max_id);
3027 }
3028}
3029
3030impl sum_tree::SeekTarget<'_, FragmentSummary, FragmentTextSummary> for usize {
3031 fn cmp(
3032 &self,
3033 cursor_location: &FragmentTextSummary,
3034 _: &Option<clock::Global>,
3035 ) -> cmp::Ordering {
3036 Ord::cmp(self, &cursor_location.visible)
3037 }
3038}
3039
3040#[derive(Copy, Clone, Debug, Eq, PartialEq)]
3041enum VersionedFullOffset {
3042 Offset(FullOffset),
3043 Invalid,
3044}
3045
3046impl VersionedFullOffset {
3047 fn full_offset(&self) -> FullOffset {
3048 if let Self::Offset(position) = self {
3049 *position
3050 } else {
3051 panic!("invalid version")
3052 }
3053 }
3054}
3055
3056impl Default for VersionedFullOffset {
3057 fn default() -> Self {
3058 Self::Offset(Default::default())
3059 }
3060}
3061
3062impl<'a> sum_tree::Dimension<'a, FragmentSummary> for VersionedFullOffset {
3063 fn zero(_cx: &Option<clock::Global>) -> Self {
3064 Default::default()
3065 }
3066
3067 fn add_summary(&mut self, summary: &'a FragmentSummary, cx: &Option<clock::Global>) {
3068 if let Self::Offset(offset) = self {
3069 let version = cx.as_ref().unwrap();
3070 if version.observed_all(&summary.max_insertion_version) {
3071 *offset += summary.text.visible + summary.text.deleted;
3072 } else if version.observed_any(&summary.min_insertion_version) {
3073 *self = Self::Invalid;
3074 }
3075 }
3076 }
3077}
3078
3079impl sum_tree::SeekTarget<'_, FragmentSummary, Self> for VersionedFullOffset {
3080 fn cmp(&self, cursor_position: &Self, _: &Option<clock::Global>) -> cmp::Ordering {
3081 match (self, cursor_position) {
3082 (Self::Offset(a), Self::Offset(b)) => Ord::cmp(a, b),
3083 (Self::Offset(_), Self::Invalid) => cmp::Ordering::Less,
3084 (Self::Invalid, _) => unreachable!(),
3085 }
3086 }
3087}
3088
3089impl Operation {
3090 fn replica_id(&self) -> ReplicaId {
3091 operation_queue::Operation::lamport_timestamp(self).replica_id
3092 }
3093
3094 pub fn timestamp(&self) -> clock::Lamport {
3095 match self {
3096 Operation::Edit(edit) => edit.timestamp,
3097 Operation::Undo(undo) => undo.timestamp,
3098 }
3099 }
3100
3101 pub fn as_edit(&self) -> Option<&EditOperation> {
3102 match self {
3103 Operation::Edit(edit) => Some(edit),
3104 _ => None,
3105 }
3106 }
3107
3108 pub fn is_edit(&self) -> bool {
3109 matches!(self, Operation::Edit { .. })
3110 }
3111}
3112
3113impl operation_queue::Operation for Operation {
3114 fn lamport_timestamp(&self) -> clock::Lamport {
3115 match self {
3116 Operation::Edit(edit) => edit.timestamp,
3117 Operation::Undo(undo) => undo.timestamp,
3118 }
3119 }
3120}
3121
3122pub trait ToOffset {
3123 fn to_offset(&self, snapshot: &BufferSnapshot) -> usize;
3124 /// Turns this point into the next offset in the buffer that comes after this, respecting utf8 boundaries.
3125 fn to_next_offset(&self, snapshot: &BufferSnapshot) -> usize {
3126 snapshot
3127 .visible_text
3128 .ceil_char_boundary(self.to_offset(snapshot) + 1)
3129 }
3130 /// Turns this point into the previous offset in the buffer that comes before this, respecting utf8 boundaries.
3131 fn to_previous_offset(&self, snapshot: &BufferSnapshot) -> usize {
3132 snapshot
3133 .visible_text
3134 .floor_char_boundary(self.to_offset(snapshot).saturating_sub(1))
3135 }
3136}
3137
3138impl ToOffset for Point {
3139 #[inline]
3140 fn to_offset(&self, snapshot: &BufferSnapshot) -> usize {
3141 snapshot.point_to_offset(*self)
3142 }
3143}
3144
3145impl ToOffset for usize {
3146 #[track_caller]
3147 fn to_offset(&self, snapshot: &BufferSnapshot) -> usize {
3148 if !snapshot
3149 .as_rope()
3150 .assert_char_boundary::<{ cfg!(debug_assertions) }>(*self)
3151 {
3152 snapshot.as_rope().floor_char_boundary(*self)
3153 } else {
3154 *self
3155 }
3156 }
3157}
3158
3159impl ToOffset for Anchor {
3160 #[inline]
3161 fn to_offset(&self, snapshot: &BufferSnapshot) -> usize {
3162 snapshot.summary_for_anchor(self)
3163 }
3164}
3165
3166impl<T: ToOffset> ToOffset for &T {
3167 #[inline]
3168 fn to_offset(&self, content: &BufferSnapshot) -> usize {
3169 (*self).to_offset(content)
3170 }
3171}
3172
3173impl ToOffset for PointUtf16 {
3174 #[inline]
3175 fn to_offset(&self, snapshot: &BufferSnapshot) -> usize {
3176 snapshot.point_utf16_to_offset(*self)
3177 }
3178}
3179
3180impl ToOffset for Unclipped<PointUtf16> {
3181 #[inline]
3182 fn to_offset(&self, snapshot: &BufferSnapshot) -> usize {
3183 snapshot.unclipped_point_utf16_to_offset(*self)
3184 }
3185}
3186
3187pub trait ToPoint {
3188 fn to_point(&self, snapshot: &BufferSnapshot) -> Point;
3189}
3190
3191impl ToPoint for Anchor {
3192 #[inline]
3193 fn to_point(&self, snapshot: &BufferSnapshot) -> Point {
3194 snapshot.summary_for_anchor(self)
3195 }
3196}
3197
3198impl ToPoint for usize {
3199 #[inline]
3200 fn to_point(&self, snapshot: &BufferSnapshot) -> Point {
3201 snapshot.offset_to_point(*self)
3202 }
3203}
3204
3205impl ToPoint for Point {
3206 #[inline]
3207 fn to_point(&self, _: &BufferSnapshot) -> Point {
3208 *self
3209 }
3210}
3211
3212impl ToPoint for Unclipped<PointUtf16> {
3213 #[inline]
3214 fn to_point(&self, snapshot: &BufferSnapshot) -> Point {
3215 snapshot.unclipped_point_utf16_to_point(*self)
3216 }
3217}
3218
3219pub trait ToPointUtf16 {
3220 fn to_point_utf16(&self, snapshot: &BufferSnapshot) -> PointUtf16;
3221}
3222
3223impl ToPointUtf16 for Anchor {
3224 #[inline]
3225 fn to_point_utf16(&self, snapshot: &BufferSnapshot) -> PointUtf16 {
3226 snapshot.summary_for_anchor(self)
3227 }
3228}
3229
3230impl ToPointUtf16 for usize {
3231 #[inline]
3232 fn to_point_utf16(&self, snapshot: &BufferSnapshot) -> PointUtf16 {
3233 snapshot.offset_to_point_utf16(*self)
3234 }
3235}
3236
3237impl ToPointUtf16 for PointUtf16 {
3238 #[inline]
3239 fn to_point_utf16(&self, _: &BufferSnapshot) -> PointUtf16 {
3240 *self
3241 }
3242}
3243
3244impl ToPointUtf16 for Point {
3245 #[inline]
3246 fn to_point_utf16(&self, snapshot: &BufferSnapshot) -> PointUtf16 {
3247 snapshot.point_to_point_utf16(*self)
3248 }
3249}
3250
3251pub trait ToOffsetUtf16 {
3252 fn to_offset_utf16(&self, snapshot: &BufferSnapshot) -> OffsetUtf16;
3253}
3254
3255impl ToOffsetUtf16 for Anchor {
3256 #[inline]
3257 fn to_offset_utf16(&self, snapshot: &BufferSnapshot) -> OffsetUtf16 {
3258 snapshot.summary_for_anchor(self)
3259 }
3260}
3261
3262impl ToOffsetUtf16 for usize {
3263 #[inline]
3264 fn to_offset_utf16(&self, snapshot: &BufferSnapshot) -> OffsetUtf16 {
3265 snapshot.offset_to_offset_utf16(*self)
3266 }
3267}
3268
3269impl ToOffsetUtf16 for OffsetUtf16 {
3270 #[inline]
3271 fn to_offset_utf16(&self, _snapshot: &BufferSnapshot) -> OffsetUtf16 {
3272 *self
3273 }
3274}
3275
3276pub trait FromAnchor {
3277 fn from_anchor(anchor: &Anchor, snapshot: &BufferSnapshot) -> Self;
3278}
3279
3280impl FromAnchor for Anchor {
3281 #[inline]
3282 fn from_anchor(anchor: &Anchor, _snapshot: &BufferSnapshot) -> Self {
3283 *anchor
3284 }
3285}
3286
3287impl FromAnchor for Point {
3288 #[inline]
3289 fn from_anchor(anchor: &Anchor, snapshot: &BufferSnapshot) -> Self {
3290 snapshot.summary_for_anchor(anchor)
3291 }
3292}
3293
3294impl FromAnchor for PointUtf16 {
3295 #[inline]
3296 fn from_anchor(anchor: &Anchor, snapshot: &BufferSnapshot) -> Self {
3297 snapshot.summary_for_anchor(anchor)
3298 }
3299}
3300
3301impl FromAnchor for usize {
3302 #[inline]
3303 fn from_anchor(anchor: &Anchor, snapshot: &BufferSnapshot) -> Self {
3304 snapshot.summary_for_anchor(anchor)
3305 }
3306}
3307
3308#[derive(Clone, Copy, Debug, PartialEq)]
3309pub enum LineEnding {
3310 Unix,
3311 Windows,
3312}
3313
3314impl Default for LineEnding {
3315 fn default() -> Self {
3316 #[cfg(unix)]
3317 return Self::Unix;
3318
3319 #[cfg(not(unix))]
3320 return Self::Windows;
3321 }
3322}
3323
3324impl LineEnding {
3325 pub fn as_str(&self) -> &'static str {
3326 match self {
3327 LineEnding::Unix => "\n",
3328 LineEnding::Windows => "\r\n",
3329 }
3330 }
3331
3332 pub fn label(&self) -> &'static str {
3333 match self {
3334 LineEnding::Unix => "LF",
3335 LineEnding::Windows => "CRLF",
3336 }
3337 }
3338
3339 pub fn detect(text: &str) -> Self {
3340 let mut max_ix = cmp::min(text.len(), 1000);
3341 while !text.is_char_boundary(max_ix) {
3342 max_ix -= 1;
3343 }
3344
3345 if let Some(ix) = text[..max_ix].find(['\n']) {
3346 if ix > 0 && text.as_bytes()[ix - 1] == b'\r' {
3347 Self::Windows
3348 } else {
3349 Self::Unix
3350 }
3351 } else {
3352 Self::default()
3353 }
3354 }
3355
3356 pub fn normalize(text: &mut String) {
3357 if let Cow::Owned(replaced) = LINE_SEPARATORS_REGEX.replace_all(text, "\n") {
3358 *text = replaced;
3359 }
3360 }
3361
3362 pub fn normalize_arc(text: Arc<str>) -> Arc<str> {
3363 if let Cow::Owned(replaced) = LINE_SEPARATORS_REGEX.replace_all(&text, "\n") {
3364 replaced.into()
3365 } else {
3366 text
3367 }
3368 }
3369
3370 pub fn normalize_cow(text: Cow<str>) -> Cow<str> {
3371 if let Cow::Owned(replaced) = LINE_SEPARATORS_REGEX.replace_all(&text, "\n") {
3372 replaced.into()
3373 } else {
3374 text
3375 }
3376 }
3377}
3378
3379pub fn chunks_with_line_ending(rope: &Rope, line_ending: LineEnding) -> impl Iterator<Item = &str> {
3380 rope.chunks().flat_map(move |chunk| {
3381 let mut newline = false;
3382 let end_with_newline = chunk.ends_with('\n').then_some(line_ending.as_str());
3383 chunk
3384 .lines()
3385 .flat_map(move |line| {
3386 let ending = if newline {
3387 Some(line_ending.as_str())
3388 } else {
3389 None
3390 };
3391 newline = true;
3392 ending.into_iter().chain([line])
3393 })
3394 .chain(end_with_newline)
3395 })
3396}
3397
3398#[cfg(debug_assertions)]
3399pub mod debug {
3400 use super::*;
3401 use parking_lot::Mutex;
3402 use std::any::TypeId;
3403 use std::hash::{Hash, Hasher};
3404
3405 static GLOBAL_DEBUG_RANGES: Mutex<Option<GlobalDebugRanges>> = Mutex::new(None);
3406
3407 pub struct GlobalDebugRanges {
3408 pub ranges: Vec<DebugRange>,
3409 key_to_occurrence_index: HashMap<Key, usize>,
3410 next_occurrence_index: usize,
3411 }
3412
3413 pub struct DebugRange {
3414 key: Key,
3415 pub ranges: Vec<Range<Anchor>>,
3416 pub value: Arc<str>,
3417 pub occurrence_index: usize,
3418 }
3419
3420 #[derive(Debug, Clone, PartialEq, Eq, Hash)]
3421 struct Key {
3422 type_id: TypeId,
3423 hash: u64,
3424 }
3425
3426 impl GlobalDebugRanges {
3427 pub fn with_locked<R>(f: impl FnOnce(&mut Self) -> R) -> R {
3428 let mut state = GLOBAL_DEBUG_RANGES.lock();
3429 if state.is_none() {
3430 *state = Some(GlobalDebugRanges {
3431 ranges: Vec::new(),
3432 key_to_occurrence_index: HashMap::default(),
3433 next_occurrence_index: 0,
3434 });
3435 }
3436 if let Some(global_debug_ranges) = state.as_mut() {
3437 f(global_debug_ranges)
3438 } else {
3439 unreachable!()
3440 }
3441 }
3442
3443 pub fn insert<K: Hash + 'static>(
3444 &mut self,
3445 key: &K,
3446 ranges: Vec<Range<Anchor>>,
3447 value: Arc<str>,
3448 ) {
3449 let occurrence_index = *self
3450 .key_to_occurrence_index
3451 .entry(Key::new(key))
3452 .or_insert_with(|| {
3453 let occurrence_index = self.next_occurrence_index;
3454 self.next_occurrence_index += 1;
3455 occurrence_index
3456 });
3457 let key = Key::new(key);
3458 let existing = self
3459 .ranges
3460 .iter()
3461 .enumerate()
3462 .rfind(|(_, existing)| existing.key == key);
3463 if let Some((existing_ix, _)) = existing {
3464 self.ranges.remove(existing_ix);
3465 }
3466 self.ranges.push(DebugRange {
3467 ranges,
3468 key,
3469 value,
3470 occurrence_index,
3471 });
3472 }
3473
3474 pub fn remove<K: Hash + 'static>(&mut self, key: &K) {
3475 self.remove_impl(&Key::new(key));
3476 }
3477
3478 fn remove_impl(&mut self, key: &Key) {
3479 let existing = self
3480 .ranges
3481 .iter()
3482 .enumerate()
3483 .rfind(|(_, existing)| &existing.key == key);
3484 if let Some((existing_ix, _)) = existing {
3485 self.ranges.remove(existing_ix);
3486 }
3487 }
3488
3489 pub fn remove_all_with_key_type<K: 'static>(&mut self) {
3490 self.ranges
3491 .retain(|item| item.key.type_id != TypeId::of::<K>());
3492 }
3493 }
3494
3495 impl Key {
3496 fn new<K: Hash + 'static>(key: &K) -> Self {
3497 let type_id = TypeId::of::<K>();
3498 let mut hasher = collections::FxHasher::default();
3499 key.hash(&mut hasher);
3500 Key {
3501 type_id,
3502 hash: hasher.finish(),
3503 }
3504 }
3505 }
3506
3507 pub trait ToDebugRanges {
3508 fn to_debug_ranges(&self, snapshot: &BufferSnapshot) -> Vec<Range<usize>>;
3509 }
3510
3511 impl<T: ToOffset> ToDebugRanges for T {
3512 fn to_debug_ranges(&self, snapshot: &BufferSnapshot) -> Vec<Range<usize>> {
3513 [self.to_offset(snapshot)].to_debug_ranges(snapshot)
3514 }
3515 }
3516
3517 impl<T: ToOffset + Clone> ToDebugRanges for Range<T> {
3518 fn to_debug_ranges(&self, snapshot: &BufferSnapshot) -> Vec<Range<usize>> {
3519 [self.clone()].to_debug_ranges(snapshot)
3520 }
3521 }
3522
3523 impl<T: ToOffset> ToDebugRanges for Vec<T> {
3524 fn to_debug_ranges(&self, snapshot: &BufferSnapshot) -> Vec<Range<usize>> {
3525 self.as_slice().to_debug_ranges(snapshot)
3526 }
3527 }
3528
3529 impl<T: ToOffset> ToDebugRanges for Vec<Range<T>> {
3530 fn to_debug_ranges(&self, snapshot: &BufferSnapshot) -> Vec<Range<usize>> {
3531 self.as_slice().to_debug_ranges(snapshot)
3532 }
3533 }
3534
3535 impl<T: ToOffset> ToDebugRanges for [T] {
3536 fn to_debug_ranges(&self, snapshot: &BufferSnapshot) -> Vec<Range<usize>> {
3537 self.iter()
3538 .map(|item| {
3539 let offset = item.to_offset(snapshot);
3540 offset..offset
3541 })
3542 .collect()
3543 }
3544 }
3545
3546 impl<T: ToOffset> ToDebugRanges for [Range<T>] {
3547 fn to_debug_ranges(&self, snapshot: &BufferSnapshot) -> Vec<Range<usize>> {
3548 self.iter()
3549 .map(|range| range.start.to_offset(snapshot)..range.end.to_offset(snapshot))
3550 .collect()
3551 }
3552 }
3553}