1mod anchor;
2pub mod locator;
3#[cfg(any(test, feature = "test-support"))]
4pub mod network;
5pub mod operation_queue;
6mod patch;
7mod selection;
8pub mod subscription;
9#[cfg(test)]
10mod tests;
11mod undo_map;
12
13pub use anchor::*;
14use anyhow::{Context as _, Result};
15use clock::Lamport;
16pub use clock::ReplicaId;
17use collections::{HashMap, HashSet};
18use locator::Locator;
19use operation_queue::OperationQueue;
20pub use patch::Patch;
21use postage::{oneshot, prelude::*};
22
23use regex::Regex;
24pub use rope::*;
25pub use selection::*;
26use std::{
27 borrow::Cow,
28 cmp::{self, Ordering, Reverse},
29 fmt::Display,
30 future::Future,
31 iter::Iterator,
32 num::NonZeroU64,
33 ops::{self, Deref, Range, Sub},
34 str,
35 sync::{Arc, LazyLock},
36 time::{Duration, Instant},
37};
38pub use subscription::*;
39pub use sum_tree::Bias;
40use sum_tree::{Dimensions, FilterCursor, SumTree, TreeMap, TreeSet};
41use undo_map::UndoMap;
42use util::debug_panic;
43
44#[cfg(any(test, feature = "test-support"))]
45use util::RandomCharIter;
46
47static LINE_SEPARATORS_REGEX: LazyLock<Regex> =
48 LazyLock::new(|| Regex::new(r"\r\n|\r").expect("Failed to create LINE_SEPARATORS_REGEX"));
49
50pub type TransactionId = clock::Lamport;
51
52pub struct Buffer {
53 snapshot: BufferSnapshot,
54 history: History,
55 deferred_ops: OperationQueue<Operation>,
56 deferred_replicas: HashSet<ReplicaId>,
57 pub lamport_clock: clock::Lamport,
58 subscriptions: Topic<usize>,
59 edit_id_resolvers: HashMap<clock::Lamport, Vec<oneshot::Sender<()>>>,
60 wait_for_version_txs: Vec<(clock::Global, oneshot::Sender<()>)>,
61}
62
63#[repr(transparent)]
64#[derive(Clone, Copy, Debug, Hash, PartialEq, PartialOrd, Ord, Eq)]
65pub struct BufferId(NonZeroU64);
66
67impl Display for BufferId {
68 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
69 write!(f, "{}", self.0)
70 }
71}
72
73impl From<NonZeroU64> for BufferId {
74 fn from(id: NonZeroU64) -> Self {
75 BufferId(id)
76 }
77}
78
79impl BufferId {
80 /// Returns Err if `id` is outside of BufferId domain.
81 pub fn new(id: u64) -> anyhow::Result<Self> {
82 let id = NonZeroU64::new(id).context("Buffer id cannot be 0.")?;
83 Ok(Self(id))
84 }
85
86 /// Increments this buffer id, returning the old value.
87 /// So that's a post-increment operator in disguise.
88 pub fn next(&mut self) -> Self {
89 let old = *self;
90 self.0 = self.0.saturating_add(1);
91 old
92 }
93
94 pub fn to_proto(self) -> u64 {
95 self.into()
96 }
97}
98
99impl From<BufferId> for u64 {
100 fn from(id: BufferId) -> Self {
101 id.0.get()
102 }
103}
104
105#[derive(Clone)]
106pub struct BufferSnapshot {
107 replica_id: ReplicaId,
108 remote_id: BufferId,
109 visible_text: Rope,
110 deleted_text: Rope,
111 line_ending: LineEnding,
112 undo_map: UndoMap,
113 fragments: SumTree<Fragment>,
114 insertions: SumTree<InsertionFragment>,
115 insertion_slices: TreeSet<InsertionSlice>,
116 pub version: clock::Global,
117}
118
119#[derive(Clone, Debug)]
120pub struct HistoryEntry {
121 transaction: Transaction,
122 first_edit_at: Instant,
123 last_edit_at: Instant,
124 suppress_grouping: bool,
125}
126
127#[derive(Clone, Debug)]
128pub struct Transaction {
129 pub id: TransactionId,
130 pub edit_ids: Vec<clock::Lamport>,
131 pub start: clock::Global,
132}
133
134impl Transaction {
135 pub fn merge_in(&mut self, other: Transaction) {
136 self.edit_ids.extend(other.edit_ids);
137 }
138}
139
140impl HistoryEntry {
141 pub fn transaction_id(&self) -> TransactionId {
142 self.transaction.id
143 }
144}
145
146struct History {
147 base_text: Rope,
148 operations: TreeMap<clock::Lamport, Operation>,
149 undo_stack: Vec<HistoryEntry>,
150 redo_stack: Vec<HistoryEntry>,
151 transaction_depth: usize,
152 group_interval: Duration,
153}
154
155#[derive(Clone, Debug, Eq, PartialEq)]
156struct InsertionSlice {
157 edit_id: clock::Lamport,
158 insertion_id: clock::Lamport,
159 range: Range<usize>,
160}
161
162impl Ord for InsertionSlice {
163 fn cmp(&self, other: &Self) -> Ordering {
164 self.edit_id
165 .cmp(&other.edit_id)
166 .then_with(|| self.insertion_id.cmp(&other.insertion_id))
167 .then_with(|| self.range.start.cmp(&other.range.start))
168 .then_with(|| self.range.end.cmp(&other.range.end))
169 }
170}
171
172impl PartialOrd for InsertionSlice {
173 fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
174 Some(self.cmp(other))
175 }
176}
177
178impl InsertionSlice {
179 fn from_fragment(edit_id: clock::Lamport, fragment: &Fragment) -> Self {
180 Self {
181 edit_id,
182 insertion_id: fragment.timestamp,
183 range: fragment.insertion_offset..fragment.insertion_offset + fragment.len,
184 }
185 }
186}
187
188impl History {
189 pub fn new(base_text: Rope) -> Self {
190 Self {
191 base_text,
192 operations: Default::default(),
193 undo_stack: Vec::new(),
194 redo_stack: Vec::new(),
195 transaction_depth: 0,
196 // Don't group transactions in tests unless we opt in, because it's a footgun.
197 #[cfg(any(test, feature = "test-support"))]
198 group_interval: Duration::ZERO,
199 #[cfg(not(any(test, feature = "test-support")))]
200 group_interval: Duration::from_millis(300),
201 }
202 }
203
204 fn push(&mut self, op: Operation) {
205 self.operations.insert(op.timestamp(), op);
206 }
207
208 fn start_transaction(
209 &mut self,
210 start: clock::Global,
211 now: Instant,
212 clock: &mut clock::Lamport,
213 ) -> Option<TransactionId> {
214 self.transaction_depth += 1;
215 if self.transaction_depth == 1 {
216 let id = clock.tick();
217 self.undo_stack.push(HistoryEntry {
218 transaction: Transaction {
219 id,
220 start,
221 edit_ids: Default::default(),
222 },
223 first_edit_at: now,
224 last_edit_at: now,
225 suppress_grouping: false,
226 });
227 Some(id)
228 } else {
229 None
230 }
231 }
232
233 fn end_transaction(&mut self, now: Instant) -> Option<&HistoryEntry> {
234 assert_ne!(self.transaction_depth, 0);
235 self.transaction_depth -= 1;
236 if self.transaction_depth == 0 {
237 if self
238 .undo_stack
239 .last()
240 .unwrap()
241 .transaction
242 .edit_ids
243 .is_empty()
244 {
245 self.undo_stack.pop();
246 None
247 } else {
248 self.redo_stack.clear();
249 let entry = self.undo_stack.last_mut().unwrap();
250 entry.last_edit_at = now;
251 Some(entry)
252 }
253 } else {
254 None
255 }
256 }
257
258 fn group(&mut self) -> Option<TransactionId> {
259 let mut count = 0;
260 let mut entries = self.undo_stack.iter();
261 if let Some(mut entry) = entries.next_back() {
262 while let Some(prev_entry) = entries.next_back() {
263 if !prev_entry.suppress_grouping
264 && entry.first_edit_at - prev_entry.last_edit_at < self.group_interval
265 {
266 entry = prev_entry;
267 count += 1;
268 } else {
269 break;
270 }
271 }
272 }
273 self.group_trailing(count)
274 }
275
276 fn group_until(&mut self, transaction_id: TransactionId) {
277 let mut count = 0;
278 for entry in self.undo_stack.iter().rev() {
279 if entry.transaction_id() == transaction_id {
280 self.group_trailing(count);
281 break;
282 } else if entry.suppress_grouping {
283 break;
284 } else {
285 count += 1;
286 }
287 }
288 }
289
290 fn group_trailing(&mut self, n: usize) -> Option<TransactionId> {
291 let new_len = self.undo_stack.len() - n;
292 let (entries_to_keep, entries_to_merge) = self.undo_stack.split_at_mut(new_len);
293 if let Some(last_entry) = entries_to_keep.last_mut() {
294 for entry in &*entries_to_merge {
295 for edit_id in &entry.transaction.edit_ids {
296 last_entry.transaction.edit_ids.push(*edit_id);
297 }
298 }
299
300 if let Some(entry) = entries_to_merge.last_mut() {
301 last_entry.last_edit_at = entry.last_edit_at;
302 }
303 }
304
305 self.undo_stack.truncate(new_len);
306 self.undo_stack.last().map(|e| e.transaction.id)
307 }
308
309 fn finalize_last_transaction(&mut self) -> Option<&Transaction> {
310 self.undo_stack.last_mut().map(|entry| {
311 entry.suppress_grouping = true;
312 &entry.transaction
313 })
314 }
315
316 fn push_transaction(&mut self, transaction: Transaction, now: Instant) {
317 assert_eq!(self.transaction_depth, 0);
318 self.undo_stack.push(HistoryEntry {
319 transaction,
320 first_edit_at: now,
321 last_edit_at: now,
322 suppress_grouping: false,
323 });
324 }
325
326 /// Differs from `push_transaction` in that it does not clear the redo
327 /// stack. Intended to be used to create a parent transaction to merge
328 /// potential child transactions into.
329 ///
330 /// The caller is responsible for removing it from the undo history using
331 /// `forget_transaction` if no edits are merged into it. Otherwise, if edits
332 /// are merged into this transaction, the caller is responsible for ensuring
333 /// the redo stack is cleared. The easiest way to ensure the redo stack is
334 /// cleared is to create transactions with the usual `start_transaction` and
335 /// `end_transaction` methods and merging the resulting transactions into
336 /// the transaction created by this method
337 fn push_empty_transaction(
338 &mut self,
339 start: clock::Global,
340 now: Instant,
341 clock: &mut clock::Lamport,
342 ) -> TransactionId {
343 assert_eq!(self.transaction_depth, 0);
344 let id = clock.tick();
345 let transaction = Transaction {
346 id,
347 start,
348 edit_ids: Vec::new(),
349 };
350 self.undo_stack.push(HistoryEntry {
351 transaction,
352 first_edit_at: now,
353 last_edit_at: now,
354 suppress_grouping: false,
355 });
356 id
357 }
358
359 fn push_undo(&mut self, op_id: clock::Lamport) {
360 assert_ne!(self.transaction_depth, 0);
361 if let Some(Operation::Edit(_)) = self.operations.get(&op_id) {
362 let last_transaction = self.undo_stack.last_mut().unwrap();
363 last_transaction.transaction.edit_ids.push(op_id);
364 }
365 }
366
367 fn pop_undo(&mut self) -> Option<&HistoryEntry> {
368 assert_eq!(self.transaction_depth, 0);
369 if let Some(entry) = self.undo_stack.pop() {
370 self.redo_stack.push(entry);
371 self.redo_stack.last()
372 } else {
373 None
374 }
375 }
376
377 fn remove_from_undo(&mut self, transaction_id: TransactionId) -> Option<&HistoryEntry> {
378 assert_eq!(self.transaction_depth, 0);
379
380 let entry_ix = self
381 .undo_stack
382 .iter()
383 .rposition(|entry| entry.transaction.id == transaction_id)?;
384 let entry = self.undo_stack.remove(entry_ix);
385 self.redo_stack.push(entry);
386 self.redo_stack.last()
387 }
388
389 fn remove_from_undo_until(&mut self, transaction_id: TransactionId) -> &[HistoryEntry] {
390 assert_eq!(self.transaction_depth, 0);
391
392 let redo_stack_start_len = self.redo_stack.len();
393 if let Some(entry_ix) = self
394 .undo_stack
395 .iter()
396 .rposition(|entry| entry.transaction.id == transaction_id)
397 {
398 self.redo_stack
399 .extend(self.undo_stack.drain(entry_ix..).rev());
400 }
401 &self.redo_stack[redo_stack_start_len..]
402 }
403
404 fn forget(&mut self, transaction_id: TransactionId) -> Option<Transaction> {
405 assert_eq!(self.transaction_depth, 0);
406 if let Some(entry_ix) = self
407 .undo_stack
408 .iter()
409 .rposition(|entry| entry.transaction.id == transaction_id)
410 {
411 Some(self.undo_stack.remove(entry_ix).transaction)
412 } else if let Some(entry_ix) = self
413 .redo_stack
414 .iter()
415 .rposition(|entry| entry.transaction.id == transaction_id)
416 {
417 Some(self.redo_stack.remove(entry_ix).transaction)
418 } else {
419 None
420 }
421 }
422
423 fn transaction(&self, transaction_id: TransactionId) -> Option<&Transaction> {
424 let entry = self
425 .undo_stack
426 .iter()
427 .rfind(|entry| entry.transaction.id == transaction_id)
428 .or_else(|| {
429 self.redo_stack
430 .iter()
431 .rfind(|entry| entry.transaction.id == transaction_id)
432 })?;
433 Some(&entry.transaction)
434 }
435
436 fn transaction_mut(&mut self, transaction_id: TransactionId) -> Option<&mut Transaction> {
437 let entry = self
438 .undo_stack
439 .iter_mut()
440 .rfind(|entry| entry.transaction.id == transaction_id)
441 .or_else(|| {
442 self.redo_stack
443 .iter_mut()
444 .rfind(|entry| entry.transaction.id == transaction_id)
445 })?;
446 Some(&mut entry.transaction)
447 }
448
449 fn merge_transactions(&mut self, transaction: TransactionId, destination: TransactionId) {
450 if let Some(transaction) = self.forget(transaction)
451 && let Some(destination) = self.transaction_mut(destination)
452 {
453 destination.edit_ids.extend(transaction.edit_ids);
454 }
455 }
456
457 fn pop_redo(&mut self) -> Option<&HistoryEntry> {
458 assert_eq!(self.transaction_depth, 0);
459 if let Some(entry) = self.redo_stack.pop() {
460 self.undo_stack.push(entry);
461 self.undo_stack.last()
462 } else {
463 None
464 }
465 }
466
467 fn remove_from_redo(&mut self, transaction_id: TransactionId) -> &[HistoryEntry] {
468 assert_eq!(self.transaction_depth, 0);
469
470 let undo_stack_start_len = self.undo_stack.len();
471 if let Some(entry_ix) = self
472 .redo_stack
473 .iter()
474 .rposition(|entry| entry.transaction.id == transaction_id)
475 {
476 self.undo_stack
477 .extend(self.redo_stack.drain(entry_ix..).rev());
478 }
479 &self.undo_stack[undo_stack_start_len..]
480 }
481}
482
483struct Edits<'a, D: TextDimension, F: FnMut(&FragmentSummary) -> bool> {
484 visible_cursor: rope::Cursor<'a>,
485 deleted_cursor: rope::Cursor<'a>,
486 fragments_cursor: Option<FilterCursor<'a, 'static, F, Fragment, FragmentTextSummary>>,
487 undos: &'a UndoMap,
488 since: &'a clock::Global,
489 old_end: D,
490 new_end: D,
491 range: Range<(&'a Locator, usize)>,
492 buffer_id: BufferId,
493}
494
495#[derive(Clone, Debug, Default, Eq, PartialEq)]
496pub struct Edit<D> {
497 pub old: Range<D>,
498 pub new: Range<D>,
499}
500impl<D> Edit<D>
501where
502 D: PartialEq,
503{
504 pub fn is_empty(&self) -> bool {
505 self.old.start == self.old.end && self.new.start == self.new.end
506 }
507}
508
509impl<D, DDelta> Edit<D>
510where
511 D: Sub<D, Output = DDelta> + Copy,
512{
513 pub fn old_len(&self) -> DDelta {
514 self.old.end - self.old.start
515 }
516
517 pub fn new_len(&self) -> DDelta {
518 self.new.end - self.new.start
519 }
520}
521
522impl<D1, D2> Edit<(D1, D2)> {
523 pub fn flatten(self) -> (Edit<D1>, Edit<D2>) {
524 (
525 Edit {
526 old: self.old.start.0..self.old.end.0,
527 new: self.new.start.0..self.new.end.0,
528 },
529 Edit {
530 old: self.old.start.1..self.old.end.1,
531 new: self.new.start.1..self.new.end.1,
532 },
533 )
534 }
535}
536
537#[derive(Eq, PartialEq, Clone, Debug)]
538pub struct Fragment {
539 pub id: Locator,
540 pub timestamp: clock::Lamport,
541 pub insertion_offset: usize,
542 pub len: usize,
543 pub visible: bool,
544 pub deletions: HashSet<clock::Lamport>,
545 pub max_undos: clock::Global,
546}
547
548#[derive(Eq, PartialEq, Clone, Debug)]
549pub struct FragmentSummary {
550 text: FragmentTextSummary,
551 max_id: Locator,
552 max_version: clock::Global,
553 min_insertion_version: clock::Global,
554 max_insertion_version: clock::Global,
555}
556
557#[derive(Copy, Default, Clone, Debug, PartialEq, Eq)]
558struct FragmentTextSummary {
559 visible: usize,
560 deleted: usize,
561}
562
563impl<'a> sum_tree::Dimension<'a, FragmentSummary> for FragmentTextSummary {
564 fn zero(_: &Option<clock::Global>) -> Self {
565 Default::default()
566 }
567
568 fn add_summary(&mut self, summary: &'a FragmentSummary, _: &Option<clock::Global>) {
569 self.visible += summary.text.visible;
570 self.deleted += summary.text.deleted;
571 }
572}
573
574#[derive(Eq, PartialEq, Clone, Debug)]
575struct InsertionFragment {
576 timestamp: clock::Lamport,
577 split_offset: usize,
578 fragment_id: Locator,
579}
580
581#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord)]
582struct InsertionFragmentKey {
583 timestamp: clock::Lamport,
584 split_offset: usize,
585}
586
587#[derive(Clone, Debug, Eq, PartialEq)]
588pub enum Operation {
589 Edit(EditOperation),
590 Undo(UndoOperation),
591}
592
593#[derive(Clone, Debug, Eq, PartialEq)]
594pub struct EditOperation {
595 pub timestamp: clock::Lamport,
596 pub version: clock::Global,
597 pub ranges: Vec<Range<FullOffset>>,
598 pub new_text: Vec<Arc<str>>,
599}
600
601#[derive(Clone, Debug, Eq, PartialEq)]
602pub struct UndoOperation {
603 pub timestamp: clock::Lamport,
604 pub version: clock::Global,
605 pub counts: HashMap<clock::Lamport, u32>,
606}
607
608/// Stores information about the indentation of a line (tabs and spaces).
609#[derive(Clone, Copy, Debug, Eq, PartialEq)]
610pub struct LineIndent {
611 pub tabs: u32,
612 pub spaces: u32,
613 pub line_blank: bool,
614}
615
616impl LineIndent {
617 pub fn from_chunks(chunks: &mut Chunks) -> Self {
618 let mut tabs = 0;
619 let mut spaces = 0;
620 let mut line_blank = true;
621
622 'outer: while let Some(chunk) = chunks.peek() {
623 for ch in chunk.chars() {
624 if ch == '\t' {
625 tabs += 1;
626 } else if ch == ' ' {
627 spaces += 1;
628 } else {
629 if ch != '\n' {
630 line_blank = false;
631 }
632 break 'outer;
633 }
634 }
635
636 chunks.next();
637 }
638
639 Self {
640 tabs,
641 spaces,
642 line_blank,
643 }
644 }
645
646 /// Constructs a new `LineIndent` which only contains spaces.
647 pub fn spaces(spaces: u32) -> Self {
648 Self {
649 tabs: 0,
650 spaces,
651 line_blank: true,
652 }
653 }
654
655 /// Constructs a new `LineIndent` which only contains tabs.
656 pub fn tabs(tabs: u32) -> Self {
657 Self {
658 tabs,
659 spaces: 0,
660 line_blank: true,
661 }
662 }
663
664 /// Indicates whether the line is empty.
665 pub fn is_line_empty(&self) -> bool {
666 self.tabs == 0 && self.spaces == 0 && self.line_blank
667 }
668
669 /// Indicates whether the line is blank (contains only whitespace).
670 pub fn is_line_blank(&self) -> bool {
671 self.line_blank
672 }
673
674 /// Returns the number of indentation characters (tabs or spaces).
675 pub fn raw_len(&self) -> u32 {
676 self.tabs + self.spaces
677 }
678
679 /// Returns the number of indentation characters (tabs or spaces), taking tab size into account.
680 pub fn len(&self, tab_size: u32) -> u32 {
681 self.tabs * tab_size + self.spaces
682 }
683}
684
685impl From<&str> for LineIndent {
686 fn from(value: &str) -> Self {
687 Self::from_iter(value.chars())
688 }
689}
690
691impl FromIterator<char> for LineIndent {
692 fn from_iter<T: IntoIterator<Item = char>>(chars: T) -> Self {
693 let mut tabs = 0;
694 let mut spaces = 0;
695 let mut line_blank = true;
696 for c in chars {
697 if c == '\t' {
698 tabs += 1;
699 } else if c == ' ' {
700 spaces += 1;
701 } else {
702 if c != '\n' {
703 line_blank = false;
704 }
705 break;
706 }
707 }
708 Self {
709 tabs,
710 spaces,
711 line_blank,
712 }
713 }
714}
715
716impl Buffer {
717 pub fn new(replica_id: ReplicaId, remote_id: BufferId, base_text: impl Into<String>) -> Buffer {
718 let mut base_text = base_text.into();
719 let line_ending = LineEnding::detect(&base_text);
720 LineEnding::normalize(&mut base_text);
721 Self::new_normalized(replica_id, remote_id, line_ending, Rope::from(&*base_text))
722 }
723
724 pub fn new_normalized(
725 replica_id: ReplicaId,
726 remote_id: BufferId,
727 line_ending: LineEnding,
728 normalized: Rope,
729 ) -> Buffer {
730 let history = History::new(normalized);
731 let mut fragments = SumTree::new(&None);
732 let mut insertions = SumTree::default();
733
734 let mut lamport_clock = clock::Lamport::new(replica_id);
735 let mut version = clock::Global::new();
736
737 let visible_text = history.base_text.clone();
738 if !visible_text.is_empty() {
739 let insertion_timestamp = clock::Lamport::new(ReplicaId::LOCAL);
740 lamport_clock.observe(insertion_timestamp);
741 version.observe(insertion_timestamp);
742 let fragment_id = Locator::between(&Locator::min(), &Locator::max());
743 let fragment = Fragment {
744 id: fragment_id,
745 timestamp: insertion_timestamp,
746 insertion_offset: 0,
747 len: visible_text.len(),
748 visible: true,
749 deletions: Default::default(),
750 max_undos: Default::default(),
751 };
752 insertions.push(InsertionFragment::new(&fragment), ());
753 fragments.push(fragment, &None);
754 }
755
756 Buffer {
757 snapshot: BufferSnapshot {
758 replica_id,
759 remote_id,
760 visible_text,
761 deleted_text: Rope::new(),
762 line_ending,
763 fragments,
764 insertions,
765 version,
766 undo_map: Default::default(),
767 insertion_slices: Default::default(),
768 },
769 history,
770 deferred_ops: OperationQueue::new(),
771 deferred_replicas: HashSet::default(),
772 lamport_clock,
773 subscriptions: Default::default(),
774 edit_id_resolvers: Default::default(),
775 wait_for_version_txs: Default::default(),
776 }
777 }
778
779 pub fn version(&self) -> clock::Global {
780 self.version.clone()
781 }
782
783 pub fn snapshot(&self) -> BufferSnapshot {
784 self.snapshot.clone()
785 }
786
787 pub fn branch(&self) -> Self {
788 Self {
789 snapshot: self.snapshot.clone(),
790 history: History::new(self.base_text().clone()),
791 deferred_ops: OperationQueue::new(),
792 deferred_replicas: HashSet::default(),
793 lamport_clock: clock::Lamport::new(ReplicaId::LOCAL_BRANCH),
794 subscriptions: Default::default(),
795 edit_id_resolvers: Default::default(),
796 wait_for_version_txs: Default::default(),
797 }
798 }
799
800 pub fn replica_id(&self) -> ReplicaId {
801 self.lamport_clock.replica_id
802 }
803
804 pub fn remote_id(&self) -> BufferId {
805 self.remote_id
806 }
807
808 pub fn deferred_ops_len(&self) -> usize {
809 self.deferred_ops.len()
810 }
811
812 pub fn transaction_group_interval(&self) -> Duration {
813 self.history.group_interval
814 }
815
816 pub fn edit<R, I, S, T>(&mut self, edits: R) -> Operation
817 where
818 R: IntoIterator<IntoIter = I>,
819 I: ExactSizeIterator<Item = (Range<S>, T)>,
820 S: ToOffset,
821 T: Into<Arc<str>>,
822 {
823 let edits = edits
824 .into_iter()
825 .map(|(range, new_text)| (range, new_text.into()));
826
827 self.start_transaction();
828 let timestamp = self.lamport_clock.tick();
829 let operation = Operation::Edit(self.apply_local_edit(edits, timestamp));
830
831 self.history.push(operation.clone());
832 self.history.push_undo(operation.timestamp());
833 self.snapshot.version.observe(operation.timestamp());
834 self.end_transaction();
835 operation
836 }
837
838 fn apply_local_edit<S: ToOffset, T: Into<Arc<str>>>(
839 &mut self,
840 edits: impl ExactSizeIterator<Item = (Range<S>, T)>,
841 timestamp: clock::Lamport,
842 ) -> EditOperation {
843 let mut edits_patch = Patch::default();
844 let mut edit_op = EditOperation {
845 timestamp,
846 version: self.version(),
847 ranges: Vec::with_capacity(edits.len()),
848 new_text: Vec::with_capacity(edits.len()),
849 };
850 let mut new_insertions = Vec::new();
851 let mut insertion_offset = 0;
852 let mut insertion_slices = Vec::new();
853
854 let mut edits = edits
855 .map(|(range, new_text)| (range.to_offset(&*self), new_text))
856 .peekable();
857
858 let mut new_ropes =
859 RopeBuilder::new(self.visible_text.cursor(0), self.deleted_text.cursor(0));
860 let mut old_fragments = self.fragments.cursor::<FragmentTextSummary>(&None);
861 let mut new_fragments = old_fragments.slice(&edits.peek().unwrap().0.start, Bias::Right);
862 new_ropes.append(new_fragments.summary().text);
863
864 let mut fragment_start = old_fragments.start().visible;
865 for (range, new_text) in edits {
866 let new_text = LineEnding::normalize_arc(new_text.into());
867 let fragment_end = old_fragments.end().visible;
868
869 // If the current fragment ends before this range, then jump ahead to the first fragment
870 // that extends past the start of this range, reusing any intervening fragments.
871 if fragment_end < range.start {
872 // If the current fragment has been partially consumed, then consume the rest of it
873 // and advance to the next fragment before slicing.
874 if fragment_start > old_fragments.start().visible {
875 if fragment_end > fragment_start {
876 let mut suffix = old_fragments.item().unwrap().clone();
877 suffix.len = fragment_end - fragment_start;
878 suffix.insertion_offset += fragment_start - old_fragments.start().visible;
879 new_insertions.push(InsertionFragment::insert_new(&suffix));
880 new_ropes.push_fragment(&suffix, suffix.visible);
881 new_fragments.push(suffix, &None);
882 }
883 old_fragments.next();
884 }
885
886 let slice = old_fragments.slice(&range.start, Bias::Right);
887 new_ropes.append(slice.summary().text);
888 new_fragments.append(slice, &None);
889 fragment_start = old_fragments.start().visible;
890 }
891
892 let full_range_start = FullOffset(range.start + old_fragments.start().deleted);
893
894 // Preserve any portion of the current fragment that precedes this range.
895 if fragment_start < range.start {
896 let mut prefix = old_fragments.item().unwrap().clone();
897 prefix.len = range.start - fragment_start;
898 prefix.insertion_offset += fragment_start - old_fragments.start().visible;
899 prefix.id = Locator::between(&new_fragments.summary().max_id, &prefix.id);
900 new_insertions.push(InsertionFragment::insert_new(&prefix));
901 new_ropes.push_fragment(&prefix, prefix.visible);
902 new_fragments.push(prefix, &None);
903 fragment_start = range.start;
904 }
905
906 // Insert the new text before any existing fragments within the range.
907 if !new_text.is_empty() {
908 let new_start = new_fragments.summary().text.visible;
909
910 let fragment = Fragment {
911 id: Locator::between(
912 &new_fragments.summary().max_id,
913 old_fragments
914 .item()
915 .map_or(&Locator::max(), |old_fragment| &old_fragment.id),
916 ),
917 timestamp,
918 insertion_offset,
919 len: new_text.len(),
920 deletions: Default::default(),
921 max_undos: Default::default(),
922 visible: true,
923 };
924 edits_patch.push(Edit {
925 old: fragment_start..fragment_start,
926 new: new_start..new_start + new_text.len(),
927 });
928 insertion_slices.push(InsertionSlice::from_fragment(timestamp, &fragment));
929 new_insertions.push(InsertionFragment::insert_new(&fragment));
930 new_ropes.push_str(new_text.as_ref());
931 new_fragments.push(fragment, &None);
932 insertion_offset += new_text.len();
933 }
934
935 // Advance through every fragment that intersects this range, marking the intersecting
936 // portions as deleted.
937 while fragment_start < range.end {
938 let fragment = old_fragments.item().unwrap();
939 let fragment_end = old_fragments.end().visible;
940 let mut intersection = fragment.clone();
941 let intersection_end = cmp::min(range.end, fragment_end);
942 if fragment.visible {
943 intersection.len = intersection_end - fragment_start;
944 intersection.insertion_offset += fragment_start - old_fragments.start().visible;
945 intersection.id =
946 Locator::between(&new_fragments.summary().max_id, &intersection.id);
947 intersection.deletions.insert(timestamp);
948 intersection.visible = false;
949 }
950 if intersection.len > 0 {
951 if fragment.visible && !intersection.visible {
952 let new_start = new_fragments.summary().text.visible;
953 edits_patch.push(Edit {
954 old: fragment_start..intersection_end,
955 new: new_start..new_start,
956 });
957 insertion_slices
958 .push(InsertionSlice::from_fragment(timestamp, &intersection));
959 }
960 new_insertions.push(InsertionFragment::insert_new(&intersection));
961 new_ropes.push_fragment(&intersection, fragment.visible);
962 new_fragments.push(intersection, &None);
963 fragment_start = intersection_end;
964 }
965 if fragment_end <= range.end {
966 old_fragments.next();
967 }
968 }
969
970 let full_range_end = FullOffset(range.end + old_fragments.start().deleted);
971 edit_op.ranges.push(full_range_start..full_range_end);
972 edit_op.new_text.push(new_text);
973 }
974
975 // If the current fragment has been partially consumed, then consume the rest of it
976 // and advance to the next fragment before slicing.
977 if fragment_start > old_fragments.start().visible {
978 let fragment_end = old_fragments.end().visible;
979 if fragment_end > fragment_start {
980 let mut suffix = old_fragments.item().unwrap().clone();
981 suffix.len = fragment_end - fragment_start;
982 suffix.insertion_offset += fragment_start - old_fragments.start().visible;
983 new_insertions.push(InsertionFragment::insert_new(&suffix));
984 new_ropes.push_fragment(&suffix, suffix.visible);
985 new_fragments.push(suffix, &None);
986 }
987 old_fragments.next();
988 }
989
990 let suffix = old_fragments.suffix();
991 new_ropes.append(suffix.summary().text);
992 new_fragments.append(suffix, &None);
993 let (visible_text, deleted_text) = new_ropes.finish();
994 drop(old_fragments);
995
996 self.snapshot.fragments = new_fragments;
997 self.snapshot.insertions.edit(new_insertions, ());
998 self.snapshot.visible_text = visible_text;
999 self.snapshot.deleted_text = deleted_text;
1000 self.subscriptions.publish_mut(&edits_patch);
1001 self.snapshot.insertion_slices.extend(insertion_slices);
1002 edit_op
1003 }
1004
1005 pub fn set_line_ending(&mut self, line_ending: LineEnding) {
1006 self.snapshot.line_ending = line_ending;
1007 }
1008
1009 pub fn apply_ops<I: IntoIterator<Item = Operation>>(&mut self, ops: I) {
1010 let mut deferred_ops = Vec::new();
1011 for op in ops {
1012 self.history.push(op.clone());
1013 if self.can_apply_op(&op) {
1014 self.apply_op(op);
1015 } else {
1016 self.deferred_replicas.insert(op.replica_id());
1017 deferred_ops.push(op);
1018 }
1019 }
1020 self.deferred_ops.insert(deferred_ops);
1021 self.flush_deferred_ops();
1022 }
1023
1024 fn apply_op(&mut self, op: Operation) {
1025 match op {
1026 Operation::Edit(edit) => {
1027 if !self.version.observed(edit.timestamp) {
1028 self.apply_remote_edit(
1029 &edit.version,
1030 &edit.ranges,
1031 &edit.new_text,
1032 edit.timestamp,
1033 );
1034 self.snapshot.version.observe(edit.timestamp);
1035 self.lamport_clock.observe(edit.timestamp);
1036 self.resolve_edit(edit.timestamp);
1037 }
1038 }
1039 Operation::Undo(undo) => {
1040 if !self.version.observed(undo.timestamp) {
1041 self.apply_undo(&undo);
1042 self.snapshot.version.observe(undo.timestamp);
1043 self.lamport_clock.observe(undo.timestamp);
1044 }
1045 }
1046 }
1047 self.wait_for_version_txs.retain_mut(|(version, tx)| {
1048 if self.snapshot.version().observed_all(version) {
1049 tx.try_send(()).ok();
1050 false
1051 } else {
1052 true
1053 }
1054 });
1055 }
1056
1057 fn apply_remote_edit(
1058 &mut self,
1059 version: &clock::Global,
1060 ranges: &[Range<FullOffset>],
1061 new_text: &[Arc<str>],
1062 timestamp: clock::Lamport,
1063 ) {
1064 if ranges.is_empty() {
1065 return;
1066 }
1067
1068 let edits = ranges.iter().zip(new_text.iter());
1069 let mut edits_patch = Patch::default();
1070 let mut insertion_slices = Vec::new();
1071 let cx = Some(version.clone());
1072 let mut new_insertions = Vec::new();
1073 let mut insertion_offset = 0;
1074 let mut new_ropes =
1075 RopeBuilder::new(self.visible_text.cursor(0), self.deleted_text.cursor(0));
1076 let mut old_fragments = self
1077 .fragments
1078 .cursor::<Dimensions<VersionedFullOffset, usize>>(&cx);
1079 let mut new_fragments =
1080 old_fragments.slice(&VersionedFullOffset::Offset(ranges[0].start), Bias::Left);
1081 new_ropes.append(new_fragments.summary().text);
1082
1083 let mut fragment_start = old_fragments.start().0.full_offset();
1084 for (range, new_text) in edits {
1085 let fragment_end = old_fragments.end().0.full_offset();
1086
1087 // If the current fragment ends before this range, then jump ahead to the first fragment
1088 // that extends past the start of this range, reusing any intervening fragments.
1089 if fragment_end < range.start {
1090 // If the current fragment has been partially consumed, then consume the rest of it
1091 // and advance to the next fragment before slicing.
1092 if fragment_start > old_fragments.start().0.full_offset() {
1093 if fragment_end > fragment_start {
1094 let mut suffix = old_fragments.item().unwrap().clone();
1095 suffix.len = fragment_end.0 - fragment_start.0;
1096 suffix.insertion_offset +=
1097 fragment_start - old_fragments.start().0.full_offset();
1098 new_insertions.push(InsertionFragment::insert_new(&suffix));
1099 new_ropes.push_fragment(&suffix, suffix.visible);
1100 new_fragments.push(suffix, &None);
1101 }
1102 old_fragments.next();
1103 }
1104
1105 let slice =
1106 old_fragments.slice(&VersionedFullOffset::Offset(range.start), Bias::Left);
1107 new_ropes.append(slice.summary().text);
1108 new_fragments.append(slice, &None);
1109 fragment_start = old_fragments.start().0.full_offset();
1110 }
1111
1112 // If we are at the end of a non-concurrent fragment, advance to the next one.
1113 let fragment_end = old_fragments.end().0.full_offset();
1114 if fragment_end == range.start && fragment_end > fragment_start {
1115 let mut fragment = old_fragments.item().unwrap().clone();
1116 fragment.len = fragment_end.0 - fragment_start.0;
1117 fragment.insertion_offset += fragment_start - old_fragments.start().0.full_offset();
1118 new_insertions.push(InsertionFragment::insert_new(&fragment));
1119 new_ropes.push_fragment(&fragment, fragment.visible);
1120 new_fragments.push(fragment, &None);
1121 old_fragments.next();
1122 fragment_start = old_fragments.start().0.full_offset();
1123 }
1124
1125 // Skip over insertions that are concurrent to this edit, but have a lower lamport
1126 // timestamp.
1127 while let Some(fragment) = old_fragments.item() {
1128 if fragment_start == range.start && fragment.timestamp > timestamp {
1129 new_ropes.push_fragment(fragment, fragment.visible);
1130 new_fragments.push(fragment.clone(), &None);
1131 old_fragments.next();
1132 debug_assert_eq!(fragment_start, range.start);
1133 } else {
1134 break;
1135 }
1136 }
1137 debug_assert!(fragment_start <= range.start);
1138
1139 // Preserve any portion of the current fragment that precedes this range.
1140 if fragment_start < range.start {
1141 let mut prefix = old_fragments.item().unwrap().clone();
1142 prefix.len = range.start.0 - fragment_start.0;
1143 prefix.insertion_offset += fragment_start - old_fragments.start().0.full_offset();
1144 prefix.id = Locator::between(&new_fragments.summary().max_id, &prefix.id);
1145 new_insertions.push(InsertionFragment::insert_new(&prefix));
1146 fragment_start = range.start;
1147 new_ropes.push_fragment(&prefix, prefix.visible);
1148 new_fragments.push(prefix, &None);
1149 }
1150
1151 // Insert the new text before any existing fragments within the range.
1152 if !new_text.is_empty() {
1153 let mut old_start = old_fragments.start().1;
1154 if old_fragments.item().is_some_and(|f| f.visible) {
1155 old_start += fragment_start.0 - old_fragments.start().0.full_offset().0;
1156 }
1157 let new_start = new_fragments.summary().text.visible;
1158 let fragment = Fragment {
1159 id: Locator::between(
1160 &new_fragments.summary().max_id,
1161 old_fragments
1162 .item()
1163 .map_or(&Locator::max(), |old_fragment| &old_fragment.id),
1164 ),
1165 timestamp,
1166 insertion_offset,
1167 len: new_text.len(),
1168 deletions: Default::default(),
1169 max_undos: Default::default(),
1170 visible: true,
1171 };
1172 edits_patch.push(Edit {
1173 old: old_start..old_start,
1174 new: new_start..new_start + new_text.len(),
1175 });
1176 insertion_slices.push(InsertionSlice::from_fragment(timestamp, &fragment));
1177 new_insertions.push(InsertionFragment::insert_new(&fragment));
1178 new_ropes.push_str(new_text);
1179 new_fragments.push(fragment, &None);
1180 insertion_offset += new_text.len();
1181 }
1182
1183 // Advance through every fragment that intersects this range, marking the intersecting
1184 // portions as deleted.
1185 while fragment_start < range.end {
1186 let fragment = old_fragments.item().unwrap();
1187 let fragment_end = old_fragments.end().0.full_offset();
1188 let mut intersection = fragment.clone();
1189 let intersection_end = cmp::min(range.end, fragment_end);
1190 if fragment.was_visible(version, &self.undo_map) {
1191 intersection.len = intersection_end.0 - fragment_start.0;
1192 intersection.insertion_offset +=
1193 fragment_start - old_fragments.start().0.full_offset();
1194 intersection.id =
1195 Locator::between(&new_fragments.summary().max_id, &intersection.id);
1196 intersection.deletions.insert(timestamp);
1197 intersection.visible = false;
1198 insertion_slices.push(InsertionSlice::from_fragment(timestamp, &intersection));
1199 }
1200 if intersection.len > 0 {
1201 if fragment.visible && !intersection.visible {
1202 let old_start = old_fragments.start().1
1203 + (fragment_start.0 - old_fragments.start().0.full_offset().0);
1204 let new_start = new_fragments.summary().text.visible;
1205 edits_patch.push(Edit {
1206 old: old_start..old_start + intersection.len,
1207 new: new_start..new_start,
1208 });
1209 }
1210 new_insertions.push(InsertionFragment::insert_new(&intersection));
1211 new_ropes.push_fragment(&intersection, fragment.visible);
1212 new_fragments.push(intersection, &None);
1213 fragment_start = intersection_end;
1214 }
1215 if fragment_end <= range.end {
1216 old_fragments.next();
1217 }
1218 }
1219 }
1220
1221 // If the current fragment has been partially consumed, then consume the rest of it
1222 // and advance to the next fragment before slicing.
1223 if fragment_start > old_fragments.start().0.full_offset() {
1224 let fragment_end = old_fragments.end().0.full_offset();
1225 if fragment_end > fragment_start {
1226 let mut suffix = old_fragments.item().unwrap().clone();
1227 suffix.len = fragment_end.0 - fragment_start.0;
1228 suffix.insertion_offset += fragment_start - old_fragments.start().0.full_offset();
1229 new_insertions.push(InsertionFragment::insert_new(&suffix));
1230 new_ropes.push_fragment(&suffix, suffix.visible);
1231 new_fragments.push(suffix, &None);
1232 }
1233 old_fragments.next();
1234 }
1235
1236 let suffix = old_fragments.suffix();
1237 new_ropes.append(suffix.summary().text);
1238 new_fragments.append(suffix, &None);
1239 let (visible_text, deleted_text) = new_ropes.finish();
1240 drop(old_fragments);
1241
1242 self.snapshot.fragments = new_fragments;
1243 self.snapshot.visible_text = visible_text;
1244 self.snapshot.deleted_text = deleted_text;
1245 self.snapshot.insertions.edit(new_insertions, ());
1246 self.snapshot.insertion_slices.extend(insertion_slices);
1247 self.subscriptions.publish_mut(&edits_patch)
1248 }
1249
1250 fn fragment_ids_for_edits<'a>(
1251 &'a self,
1252 edit_ids: impl Iterator<Item = &'a clock::Lamport>,
1253 ) -> Vec<&'a Locator> {
1254 // Get all of the insertion slices changed by the given edits.
1255 let mut insertion_slices = Vec::new();
1256 for edit_id in edit_ids {
1257 let insertion_slice = InsertionSlice {
1258 edit_id: *edit_id,
1259 insertion_id: clock::Lamport::MIN,
1260 range: 0..0,
1261 };
1262 let slices = self
1263 .snapshot
1264 .insertion_slices
1265 .iter_from(&insertion_slice)
1266 .take_while(|slice| slice.edit_id == *edit_id);
1267 insertion_slices.extend(slices)
1268 }
1269 insertion_slices
1270 .sort_unstable_by_key(|s| (s.insertion_id, s.range.start, Reverse(s.range.end)));
1271
1272 // Get all of the fragments corresponding to these insertion slices.
1273 let mut fragment_ids = Vec::new();
1274 let mut insertions_cursor = self.insertions.cursor::<InsertionFragmentKey>(());
1275 for insertion_slice in &insertion_slices {
1276 if insertion_slice.insertion_id != insertions_cursor.start().timestamp
1277 || insertion_slice.range.start > insertions_cursor.start().split_offset
1278 {
1279 insertions_cursor.seek_forward(
1280 &InsertionFragmentKey {
1281 timestamp: insertion_slice.insertion_id,
1282 split_offset: insertion_slice.range.start,
1283 },
1284 Bias::Left,
1285 );
1286 }
1287 while let Some(item) = insertions_cursor.item() {
1288 if item.timestamp != insertion_slice.insertion_id
1289 || item.split_offset >= insertion_slice.range.end
1290 {
1291 break;
1292 }
1293 fragment_ids.push(&item.fragment_id);
1294 insertions_cursor.next();
1295 }
1296 }
1297 fragment_ids.sort_unstable();
1298 fragment_ids
1299 }
1300
1301 fn apply_undo(&mut self, undo: &UndoOperation) {
1302 self.snapshot.undo_map.insert(undo);
1303
1304 let mut edits = Patch::default();
1305 let mut old_fragments = self
1306 .fragments
1307 .cursor::<Dimensions<Option<&Locator>, usize>>(&None);
1308 let mut new_fragments = SumTree::new(&None);
1309 let mut new_ropes =
1310 RopeBuilder::new(self.visible_text.cursor(0), self.deleted_text.cursor(0));
1311
1312 for fragment_id in self.fragment_ids_for_edits(undo.counts.keys()) {
1313 let preceding_fragments = old_fragments.slice(&Some(fragment_id), Bias::Left);
1314 new_ropes.append(preceding_fragments.summary().text);
1315 new_fragments.append(preceding_fragments, &None);
1316
1317 if let Some(fragment) = old_fragments.item() {
1318 let mut fragment = fragment.clone();
1319 let fragment_was_visible = fragment.visible;
1320
1321 fragment.visible = fragment.is_visible(&self.undo_map);
1322 fragment.max_undos.observe(undo.timestamp);
1323
1324 let old_start = old_fragments.start().1;
1325 let new_start = new_fragments.summary().text.visible;
1326 if fragment_was_visible && !fragment.visible {
1327 edits.push(Edit {
1328 old: old_start..old_start + fragment.len,
1329 new: new_start..new_start,
1330 });
1331 } else if !fragment_was_visible && fragment.visible {
1332 edits.push(Edit {
1333 old: old_start..old_start,
1334 new: new_start..new_start + fragment.len,
1335 });
1336 }
1337 new_ropes.push_fragment(&fragment, fragment_was_visible);
1338 new_fragments.push(fragment, &None);
1339
1340 old_fragments.next();
1341 }
1342 }
1343
1344 let suffix = old_fragments.suffix();
1345 new_ropes.append(suffix.summary().text);
1346 new_fragments.append(suffix, &None);
1347
1348 drop(old_fragments);
1349 let (visible_text, deleted_text) = new_ropes.finish();
1350 self.snapshot.fragments = new_fragments;
1351 self.snapshot.visible_text = visible_text;
1352 self.snapshot.deleted_text = deleted_text;
1353 self.subscriptions.publish_mut(&edits);
1354 }
1355
1356 fn flush_deferred_ops(&mut self) {
1357 self.deferred_replicas.clear();
1358 let mut deferred_ops = Vec::new();
1359 for op in self.deferred_ops.drain().iter().cloned() {
1360 if self.can_apply_op(&op) {
1361 self.apply_op(op);
1362 } else {
1363 self.deferred_replicas.insert(op.replica_id());
1364 deferred_ops.push(op);
1365 }
1366 }
1367 self.deferred_ops.insert(deferred_ops);
1368 }
1369
1370 fn can_apply_op(&self, op: &Operation) -> bool {
1371 if self.deferred_replicas.contains(&op.replica_id()) {
1372 false
1373 } else {
1374 self.version.observed_all(match op {
1375 Operation::Edit(edit) => &edit.version,
1376 Operation::Undo(undo) => &undo.version,
1377 })
1378 }
1379 }
1380
1381 pub fn has_deferred_ops(&self) -> bool {
1382 !self.deferred_ops.is_empty()
1383 }
1384
1385 pub fn peek_undo_stack(&self) -> Option<&HistoryEntry> {
1386 self.history.undo_stack.last()
1387 }
1388
1389 pub fn peek_redo_stack(&self) -> Option<&HistoryEntry> {
1390 self.history.redo_stack.last()
1391 }
1392
1393 pub fn start_transaction(&mut self) -> Option<TransactionId> {
1394 self.start_transaction_at(Instant::now())
1395 }
1396
1397 pub fn start_transaction_at(&mut self, now: Instant) -> Option<TransactionId> {
1398 self.history
1399 .start_transaction(self.version.clone(), now, &mut self.lamport_clock)
1400 }
1401
1402 pub fn end_transaction(&mut self) -> Option<(TransactionId, clock::Global)> {
1403 self.end_transaction_at(Instant::now())
1404 }
1405
1406 pub fn end_transaction_at(&mut self, now: Instant) -> Option<(TransactionId, clock::Global)> {
1407 if let Some(entry) = self.history.end_transaction(now) {
1408 let since = entry.transaction.start.clone();
1409 let id = self.history.group().unwrap();
1410 Some((id, since))
1411 } else {
1412 None
1413 }
1414 }
1415
1416 pub fn finalize_last_transaction(&mut self) -> Option<&Transaction> {
1417 self.history.finalize_last_transaction()
1418 }
1419
1420 pub fn group_until_transaction(&mut self, transaction_id: TransactionId) {
1421 self.history.group_until(transaction_id);
1422 }
1423
1424 pub fn base_text(&self) -> &Rope {
1425 &self.history.base_text
1426 }
1427
1428 pub fn operations(&self) -> &TreeMap<clock::Lamport, Operation> {
1429 &self.history.operations
1430 }
1431
1432 pub fn undo(&mut self) -> Option<(TransactionId, Operation)> {
1433 if let Some(entry) = self.history.pop_undo() {
1434 let transaction = entry.transaction.clone();
1435 let transaction_id = transaction.id;
1436 let op = self.undo_or_redo(transaction);
1437 Some((transaction_id, op))
1438 } else {
1439 None
1440 }
1441 }
1442
1443 pub fn undo_transaction(&mut self, transaction_id: TransactionId) -> Option<Operation> {
1444 let transaction = self
1445 .history
1446 .remove_from_undo(transaction_id)?
1447 .transaction
1448 .clone();
1449 Some(self.undo_or_redo(transaction))
1450 }
1451
1452 pub fn undo_to_transaction(&mut self, transaction_id: TransactionId) -> Vec<Operation> {
1453 let transactions = self
1454 .history
1455 .remove_from_undo_until(transaction_id)
1456 .iter()
1457 .map(|entry| entry.transaction.clone())
1458 .collect::<Vec<_>>();
1459
1460 transactions
1461 .into_iter()
1462 .map(|transaction| self.undo_or_redo(transaction))
1463 .collect()
1464 }
1465
1466 pub fn forget_transaction(&mut self, transaction_id: TransactionId) -> Option<Transaction> {
1467 self.history.forget(transaction_id)
1468 }
1469
1470 pub fn get_transaction(&self, transaction_id: TransactionId) -> Option<&Transaction> {
1471 self.history.transaction(transaction_id)
1472 }
1473
1474 pub fn merge_transactions(&mut self, transaction: TransactionId, destination: TransactionId) {
1475 self.history.merge_transactions(transaction, destination);
1476 }
1477
1478 pub fn redo(&mut self) -> Option<(TransactionId, Operation)> {
1479 if let Some(entry) = self.history.pop_redo() {
1480 let transaction = entry.transaction.clone();
1481 let transaction_id = transaction.id;
1482 let op = self.undo_or_redo(transaction);
1483 Some((transaction_id, op))
1484 } else {
1485 None
1486 }
1487 }
1488
1489 pub fn redo_to_transaction(&mut self, transaction_id: TransactionId) -> Vec<Operation> {
1490 let transactions = self
1491 .history
1492 .remove_from_redo(transaction_id)
1493 .iter()
1494 .map(|entry| entry.transaction.clone())
1495 .collect::<Vec<_>>();
1496
1497 transactions
1498 .into_iter()
1499 .map(|transaction| self.undo_or_redo(transaction))
1500 .collect()
1501 }
1502
1503 fn undo_or_redo(&mut self, transaction: Transaction) -> Operation {
1504 let mut counts = HashMap::default();
1505 for edit_id in transaction.edit_ids {
1506 counts.insert(edit_id, self.undo_map.undo_count(edit_id).saturating_add(1));
1507 }
1508
1509 let operation = self.undo_operations(counts);
1510 self.history.push(operation.clone());
1511 operation
1512 }
1513
1514 pub fn undo_operations(&mut self, counts: HashMap<clock::Lamport, u32>) -> Operation {
1515 let timestamp = self.lamport_clock.tick();
1516 let version = self.version();
1517 self.snapshot.version.observe(timestamp);
1518 let undo = UndoOperation {
1519 timestamp,
1520 version,
1521 counts,
1522 };
1523 self.apply_undo(&undo);
1524 Operation::Undo(undo)
1525 }
1526
1527 pub fn push_transaction(&mut self, transaction: Transaction, now: Instant) {
1528 self.history.push_transaction(transaction, now);
1529 }
1530
1531 /// Differs from `push_transaction` in that it does not clear the redo stack.
1532 /// The caller responsible for
1533 /// Differs from `push_transaction` in that it does not clear the redo
1534 /// stack. Intended to be used to create a parent transaction to merge
1535 /// potential child transactions into.
1536 ///
1537 /// The caller is responsible for removing it from the undo history using
1538 /// `forget_transaction` if no edits are merged into it. Otherwise, if edits
1539 /// are merged into this transaction, the caller is responsible for ensuring
1540 /// the redo stack is cleared. The easiest way to ensure the redo stack is
1541 /// cleared is to create transactions with the usual `start_transaction` and
1542 /// `end_transaction` methods and merging the resulting transactions into
1543 /// the transaction created by this method
1544 pub fn push_empty_transaction(&mut self, now: Instant) -> TransactionId {
1545 self.history
1546 .push_empty_transaction(self.version.clone(), now, &mut self.lamport_clock)
1547 }
1548
1549 pub fn edited_ranges_for_transaction_id<D>(
1550 &self,
1551 transaction_id: TransactionId,
1552 ) -> impl '_ + Iterator<Item = Range<D>>
1553 where
1554 D: TextDimension,
1555 {
1556 self.history
1557 .transaction(transaction_id)
1558 .into_iter()
1559 .flat_map(|transaction| self.edited_ranges_for_transaction(transaction))
1560 }
1561
1562 pub fn edited_ranges_for_edit_ids<'a, D>(
1563 &'a self,
1564 edit_ids: impl IntoIterator<Item = &'a clock::Lamport>,
1565 ) -> impl 'a + Iterator<Item = Range<D>>
1566 where
1567 D: TextDimension,
1568 {
1569 // get fragment ranges
1570 let mut cursor = self
1571 .fragments
1572 .cursor::<Dimensions<Option<&Locator>, usize>>(&None);
1573 let offset_ranges = self
1574 .fragment_ids_for_edits(edit_ids.into_iter())
1575 .into_iter()
1576 .filter_map(move |fragment_id| {
1577 cursor.seek_forward(&Some(fragment_id), Bias::Left);
1578 let fragment = cursor.item()?;
1579 let start_offset = cursor.start().1;
1580 let end_offset = start_offset + if fragment.visible { fragment.len } else { 0 };
1581 Some(start_offset..end_offset)
1582 });
1583
1584 // combine adjacent ranges
1585 let mut prev_range: Option<Range<usize>> = None;
1586 let disjoint_ranges = offset_ranges
1587 .map(Some)
1588 .chain([None])
1589 .filter_map(move |range| {
1590 if let Some((range, prev_range)) = range.as_ref().zip(prev_range.as_mut())
1591 && prev_range.end == range.start
1592 {
1593 prev_range.end = range.end;
1594 return None;
1595 }
1596 let result = prev_range.clone();
1597 prev_range = range;
1598 result
1599 });
1600
1601 // convert to the desired text dimension.
1602 let mut position = D::zero(());
1603 let mut rope_cursor = self.visible_text.cursor(0);
1604 disjoint_ranges.map(move |range| {
1605 position.add_assign(&rope_cursor.summary(range.start));
1606 let start = position;
1607 position.add_assign(&rope_cursor.summary(range.end));
1608 let end = position;
1609 start..end
1610 })
1611 }
1612
1613 pub fn edited_ranges_for_transaction<'a, D>(
1614 &'a self,
1615 transaction: &'a Transaction,
1616 ) -> impl 'a + Iterator<Item = Range<D>>
1617 where
1618 D: TextDimension,
1619 {
1620 self.edited_ranges_for_edit_ids(&transaction.edit_ids)
1621 }
1622
1623 pub fn subscribe(&mut self) -> Subscription<usize> {
1624 self.subscriptions.subscribe()
1625 }
1626
1627 pub fn wait_for_edits<It: IntoIterator<Item = clock::Lamport>>(
1628 &mut self,
1629 edit_ids: It,
1630 ) -> impl 'static + Future<Output = Result<()>> + use<It> {
1631 let mut futures = Vec::new();
1632 for edit_id in edit_ids {
1633 if !self.version.observed(edit_id) {
1634 let (tx, rx) = oneshot::channel();
1635 self.edit_id_resolvers.entry(edit_id).or_default().push(tx);
1636 futures.push(rx);
1637 }
1638 }
1639
1640 async move {
1641 for mut future in futures {
1642 if future.recv().await.is_none() {
1643 anyhow::bail!("gave up waiting for edits");
1644 }
1645 }
1646 Ok(())
1647 }
1648 }
1649
1650 pub fn wait_for_anchors<It: IntoIterator<Item = Anchor>>(
1651 &mut self,
1652 anchors: It,
1653 ) -> impl 'static + Future<Output = Result<()>> + use<It> {
1654 let mut futures = Vec::new();
1655 for anchor in anchors {
1656 if !self.version.observed(anchor.timestamp) && !anchor.is_max() && !anchor.is_min() {
1657 let (tx, rx) = oneshot::channel();
1658 self.edit_id_resolvers
1659 .entry(anchor.timestamp)
1660 .or_default()
1661 .push(tx);
1662 futures.push(rx);
1663 }
1664 }
1665
1666 async move {
1667 for mut future in futures {
1668 if future.recv().await.is_none() {
1669 anyhow::bail!("gave up waiting for anchors");
1670 }
1671 }
1672 Ok(())
1673 }
1674 }
1675
1676 pub fn wait_for_version(
1677 &mut self,
1678 version: clock::Global,
1679 ) -> impl Future<Output = Result<()>> + use<> {
1680 let mut rx = None;
1681 if !self.snapshot.version.observed_all(&version) {
1682 let channel = oneshot::channel();
1683 self.wait_for_version_txs.push((version, channel.0));
1684 rx = Some(channel.1);
1685 }
1686 async move {
1687 if let Some(mut rx) = rx
1688 && rx.recv().await.is_none()
1689 {
1690 anyhow::bail!("gave up waiting for version");
1691 }
1692 Ok(())
1693 }
1694 }
1695
1696 pub fn give_up_waiting(&mut self) {
1697 self.edit_id_resolvers.clear();
1698 self.wait_for_version_txs.clear();
1699 }
1700
1701 fn resolve_edit(&mut self, edit_id: clock::Lamport) {
1702 for mut tx in self
1703 .edit_id_resolvers
1704 .remove(&edit_id)
1705 .into_iter()
1706 .flatten()
1707 {
1708 tx.try_send(()).ok();
1709 }
1710 }
1711}
1712
1713#[cfg(any(test, feature = "test-support"))]
1714impl Buffer {
1715 #[track_caller]
1716 pub fn edit_via_marked_text(&mut self, marked_string: &str) {
1717 let edits = self.edits_for_marked_text(marked_string);
1718 self.edit(edits);
1719 }
1720
1721 #[track_caller]
1722 pub fn edits_for_marked_text(&self, marked_string: &str) -> Vec<(Range<usize>, String)> {
1723 let old_text = self.text();
1724 let (new_text, mut ranges) = util::test::marked_text_ranges(marked_string, false);
1725 if ranges.is_empty() {
1726 ranges.push(0..new_text.len());
1727 }
1728
1729 assert_eq!(
1730 old_text[..ranges[0].start],
1731 new_text[..ranges[0].start],
1732 "invalid edit"
1733 );
1734
1735 let mut delta = 0;
1736 let mut edits = Vec::new();
1737 let mut ranges = ranges.into_iter().peekable();
1738
1739 while let Some(inserted_range) = ranges.next() {
1740 let new_start = inserted_range.start;
1741 let old_start = (new_start as isize - delta) as usize;
1742
1743 let following_text = if let Some(next_range) = ranges.peek() {
1744 &new_text[inserted_range.end..next_range.start]
1745 } else {
1746 &new_text[inserted_range.end..]
1747 };
1748
1749 let inserted_len = inserted_range.len();
1750 let deleted_len = old_text[old_start..]
1751 .find(following_text)
1752 .expect("invalid edit");
1753
1754 let old_range = old_start..old_start + deleted_len;
1755 edits.push((old_range, new_text[inserted_range].to_string()));
1756 delta += inserted_len as isize - deleted_len as isize;
1757 }
1758
1759 assert_eq!(
1760 old_text.len() as isize + delta,
1761 new_text.len() as isize,
1762 "invalid edit"
1763 );
1764
1765 edits
1766 }
1767
1768 pub fn check_invariants(&self) {
1769 // Ensure every fragment is ordered by locator in the fragment tree and corresponds
1770 // to an insertion fragment in the insertions tree.
1771 let mut prev_fragment_id = Locator::min();
1772 for fragment in self.snapshot.fragments.items(&None) {
1773 assert!(fragment.id > prev_fragment_id);
1774 prev_fragment_id = fragment.id.clone();
1775
1776 let insertion_fragment = self
1777 .snapshot
1778 .insertions
1779 .get(
1780 &InsertionFragmentKey {
1781 timestamp: fragment.timestamp,
1782 split_offset: fragment.insertion_offset,
1783 },
1784 (),
1785 )
1786 .unwrap();
1787 assert_eq!(
1788 insertion_fragment.fragment_id, fragment.id,
1789 "fragment: {:?}\ninsertion: {:?}",
1790 fragment, insertion_fragment
1791 );
1792 }
1793
1794 let mut cursor = self.snapshot.fragments.cursor::<Option<&Locator>>(&None);
1795 for insertion_fragment in self.snapshot.insertions.cursor::<()>(()) {
1796 cursor.seek(&Some(&insertion_fragment.fragment_id), Bias::Left);
1797 let fragment = cursor.item().unwrap();
1798 assert_eq!(insertion_fragment.fragment_id, fragment.id);
1799 assert_eq!(insertion_fragment.split_offset, fragment.insertion_offset);
1800 }
1801
1802 let fragment_summary = self.snapshot.fragments.summary();
1803 assert_eq!(
1804 fragment_summary.text.visible,
1805 self.snapshot.visible_text.len()
1806 );
1807 assert_eq!(
1808 fragment_summary.text.deleted,
1809 self.snapshot.deleted_text.len()
1810 );
1811
1812 assert!(!self.text().contains("\r\n"));
1813 }
1814
1815 pub fn set_group_interval(&mut self, group_interval: Duration) {
1816 self.history.group_interval = group_interval;
1817 }
1818
1819 pub fn random_byte_range(&self, start_offset: usize, rng: &mut impl rand::Rng) -> Range<usize> {
1820 let end = self.clip_offset(rng.random_range(start_offset..=self.len()), Bias::Right);
1821 let start = self.clip_offset(rng.random_range(start_offset..=end), Bias::Right);
1822 start..end
1823 }
1824
1825 pub fn get_random_edits<T>(
1826 &self,
1827 rng: &mut T,
1828 edit_count: usize,
1829 ) -> Vec<(Range<usize>, Arc<str>)>
1830 where
1831 T: rand::Rng,
1832 {
1833 let mut edits: Vec<(Range<usize>, Arc<str>)> = Vec::new();
1834 let mut last_end = None;
1835 for _ in 0..edit_count {
1836 if last_end.is_some_and(|last_end| last_end >= self.len()) {
1837 break;
1838 }
1839 let new_start = last_end.map_or(0, |last_end| last_end + 1);
1840 let range = self.random_byte_range(new_start, rng);
1841 last_end = Some(range.end);
1842
1843 let new_text_len = rng.random_range(0..10);
1844 let new_text: String = RandomCharIter::new(&mut *rng).take(new_text_len).collect();
1845
1846 edits.push((range, new_text.into()));
1847 }
1848 edits
1849 }
1850
1851 pub fn randomly_edit<T>(
1852 &mut self,
1853 rng: &mut T,
1854 edit_count: usize,
1855 ) -> (Vec<(Range<usize>, Arc<str>)>, Operation)
1856 where
1857 T: rand::Rng,
1858 {
1859 let mut edits = self.get_random_edits(rng, edit_count);
1860 log::info!("mutating buffer {:?} with {:?}", self.replica_id, edits);
1861
1862 let op = self.edit(edits.iter().cloned());
1863 if let Operation::Edit(edit) = &op {
1864 assert_eq!(edits.len(), edit.new_text.len());
1865 for (edit, new_text) in edits.iter_mut().zip(&edit.new_text) {
1866 edit.1 = new_text.clone();
1867 }
1868 } else {
1869 unreachable!()
1870 }
1871
1872 (edits, op)
1873 }
1874
1875 pub fn randomly_undo_redo(&mut self, rng: &mut impl rand::Rng) -> Vec<Operation> {
1876 use rand::prelude::*;
1877
1878 let mut ops = Vec::new();
1879 for _ in 0..rng.random_range(1..=5) {
1880 if let Some(entry) = self.history.undo_stack.choose(rng) {
1881 let transaction = entry.transaction.clone();
1882 log::info!(
1883 "undoing buffer {:?} transaction {:?}",
1884 self.replica_id,
1885 transaction
1886 );
1887 ops.push(self.undo_or_redo(transaction));
1888 }
1889 }
1890 ops
1891 }
1892}
1893
1894impl Deref for Buffer {
1895 type Target = BufferSnapshot;
1896
1897 fn deref(&self) -> &Self::Target {
1898 &self.snapshot
1899 }
1900}
1901
1902impl BufferSnapshot {
1903 pub fn as_rope(&self) -> &Rope {
1904 &self.visible_text
1905 }
1906
1907 pub fn rope_for_version(&self, version: &clock::Global) -> Rope {
1908 let mut rope = Rope::new();
1909
1910 let mut cursor = self
1911 .fragments
1912 .filter::<_, FragmentTextSummary>(&None, move |summary| {
1913 !version.observed_all(&summary.max_version)
1914 });
1915 cursor.next();
1916
1917 let mut visible_cursor = self.visible_text.cursor(0);
1918 let mut deleted_cursor = self.deleted_text.cursor(0);
1919
1920 while let Some(fragment) = cursor.item() {
1921 if cursor.start().visible > visible_cursor.offset() {
1922 let text = visible_cursor.slice(cursor.start().visible);
1923 rope.append(text);
1924 }
1925
1926 if fragment.was_visible(version, &self.undo_map) {
1927 if fragment.visible {
1928 let text = visible_cursor.slice(cursor.end().visible);
1929 rope.append(text);
1930 } else {
1931 deleted_cursor.seek_forward(cursor.start().deleted);
1932 let text = deleted_cursor.slice(cursor.end().deleted);
1933 rope.append(text);
1934 }
1935 } else if fragment.visible {
1936 visible_cursor.seek_forward(cursor.end().visible);
1937 }
1938
1939 cursor.next();
1940 }
1941
1942 if cursor.start().visible > visible_cursor.offset() {
1943 let text = visible_cursor.slice(cursor.start().visible);
1944 rope.append(text);
1945 }
1946
1947 rope
1948 }
1949
1950 pub fn remote_id(&self) -> BufferId {
1951 self.remote_id
1952 }
1953
1954 pub fn replica_id(&self) -> ReplicaId {
1955 self.replica_id
1956 }
1957
1958 pub fn row_count(&self) -> u32 {
1959 self.max_point().row + 1
1960 }
1961
1962 pub fn len(&self) -> usize {
1963 self.visible_text.len()
1964 }
1965
1966 pub fn is_empty(&self) -> bool {
1967 self.len() == 0
1968 }
1969
1970 pub fn chars(&self) -> impl Iterator<Item = char> + '_ {
1971 self.chars_at(0)
1972 }
1973
1974 pub fn chars_for_range<T: ToOffset>(&self, range: Range<T>) -> impl Iterator<Item = char> + '_ {
1975 self.text_for_range(range).flat_map(str::chars)
1976 }
1977
1978 pub fn reversed_chars_for_range<T: ToOffset>(
1979 &self,
1980 range: Range<T>,
1981 ) -> impl Iterator<Item = char> + '_ {
1982 self.reversed_chunks_in_range(range)
1983 .flat_map(|chunk| chunk.chars().rev())
1984 }
1985
1986 pub fn contains_str_at<T>(&self, position: T, needle: &str) -> bool
1987 where
1988 T: ToOffset,
1989 {
1990 let position = position.to_offset(self);
1991 position == self.clip_offset(position, Bias::Left)
1992 && self
1993 .bytes_in_range(position..self.len())
1994 .flatten()
1995 .copied()
1996 .take(needle.len())
1997 .eq(needle.bytes())
1998 }
1999
2000 pub fn common_prefix_at<T>(&self, position: T, needle: &str) -> Range<T>
2001 where
2002 T: ToOffset + TextDimension,
2003 {
2004 let offset = position.to_offset(self);
2005 let common_prefix_len = needle
2006 .char_indices()
2007 .map(|(index, _)| index)
2008 .chain([needle.len()])
2009 .take_while(|&len| len <= offset)
2010 .filter(|&len| {
2011 let left = self
2012 .chars_for_range(offset - len..offset)
2013 .flat_map(char::to_lowercase);
2014 let right = needle[..len].chars().flat_map(char::to_lowercase);
2015 left.eq(right)
2016 })
2017 .last()
2018 .unwrap_or(0);
2019 let start_offset = offset - common_prefix_len;
2020 let start = self.text_summary_for_range(0..start_offset);
2021 start..position
2022 }
2023
2024 pub fn text(&self) -> String {
2025 self.visible_text.to_string()
2026 }
2027
2028 pub fn line_ending(&self) -> LineEnding {
2029 self.line_ending
2030 }
2031
2032 pub fn deleted_text(&self) -> String {
2033 self.deleted_text.to_string()
2034 }
2035
2036 pub fn fragments(&self) -> impl Iterator<Item = &Fragment> {
2037 self.fragments.iter()
2038 }
2039
2040 pub fn text_summary(&self) -> TextSummary {
2041 self.visible_text.summary()
2042 }
2043
2044 pub fn max_point(&self) -> Point {
2045 self.visible_text.max_point()
2046 }
2047
2048 pub fn max_point_utf16(&self) -> PointUtf16 {
2049 self.visible_text.max_point_utf16()
2050 }
2051
2052 pub fn point_to_offset(&self, point: Point) -> usize {
2053 self.visible_text.point_to_offset(point)
2054 }
2055
2056 pub fn point_to_offset_utf16(&self, point: Point) -> OffsetUtf16 {
2057 self.visible_text.point_to_offset_utf16(point)
2058 }
2059
2060 pub fn point_utf16_to_offset_utf16(&self, point: PointUtf16) -> OffsetUtf16 {
2061 self.visible_text.point_utf16_to_offset_utf16(point)
2062 }
2063
2064 pub fn point_utf16_to_offset(&self, point: PointUtf16) -> usize {
2065 self.visible_text.point_utf16_to_offset(point)
2066 }
2067
2068 pub fn unclipped_point_utf16_to_offset(&self, point: Unclipped<PointUtf16>) -> usize {
2069 self.visible_text.unclipped_point_utf16_to_offset(point)
2070 }
2071
2072 pub fn unclipped_point_utf16_to_point(&self, point: Unclipped<PointUtf16>) -> Point {
2073 self.visible_text.unclipped_point_utf16_to_point(point)
2074 }
2075
2076 pub fn offset_utf16_to_offset(&self, offset: OffsetUtf16) -> usize {
2077 self.visible_text.offset_utf16_to_offset(offset)
2078 }
2079
2080 pub fn offset_to_offset_utf16(&self, offset: usize) -> OffsetUtf16 {
2081 self.visible_text.offset_to_offset_utf16(offset)
2082 }
2083
2084 pub fn offset_to_point(&self, offset: usize) -> Point {
2085 self.visible_text.offset_to_point(offset)
2086 }
2087
2088 pub fn offset_to_point_utf16(&self, offset: usize) -> PointUtf16 {
2089 self.visible_text.offset_to_point_utf16(offset)
2090 }
2091
2092 pub fn point_to_point_utf16(&self, point: Point) -> PointUtf16 {
2093 self.visible_text.point_to_point_utf16(point)
2094 }
2095
2096 pub fn point_utf16_to_point(&self, point: PointUtf16) -> Point {
2097 self.visible_text.point_utf16_to_point(point)
2098 }
2099
2100 pub fn version(&self) -> &clock::Global {
2101 &self.version
2102 }
2103
2104 pub fn chars_at<T: ToOffset>(&self, position: T) -> impl Iterator<Item = char> + '_ {
2105 let offset = position.to_offset(self);
2106 self.visible_text.chars_at(offset)
2107 }
2108
2109 pub fn reversed_chars_at<T: ToOffset>(&self, position: T) -> impl Iterator<Item = char> + '_ {
2110 let offset = position.to_offset(self);
2111 self.visible_text.reversed_chars_at(offset)
2112 }
2113
2114 pub fn reversed_chunks_in_range<T: ToOffset>(&self, range: Range<T>) -> rope::Chunks<'_> {
2115 let range = range.start.to_offset(self)..range.end.to_offset(self);
2116 self.visible_text.reversed_chunks_in_range(range)
2117 }
2118
2119 pub fn bytes_in_range<T: ToOffset>(&self, range: Range<T>) -> rope::Bytes<'_> {
2120 let start = range.start.to_offset(self);
2121 let end = range.end.to_offset(self);
2122 self.visible_text.bytes_in_range(start..end)
2123 }
2124
2125 pub fn reversed_bytes_in_range<T: ToOffset>(&self, range: Range<T>) -> rope::Bytes<'_> {
2126 let start = range.start.to_offset(self);
2127 let end = range.end.to_offset(self);
2128 self.visible_text.reversed_bytes_in_range(start..end)
2129 }
2130
2131 pub fn text_for_range<T: ToOffset>(&self, range: Range<T>) -> Chunks<'_> {
2132 let start = range.start.to_offset(self);
2133 let end = range.end.to_offset(self);
2134 self.visible_text.chunks_in_range(start..end)
2135 }
2136
2137 pub fn line_len(&self, row: u32) -> u32 {
2138 let row_start_offset = Point::new(row, 0).to_offset(self);
2139 let row_end_offset = if row >= self.max_point().row {
2140 self.len()
2141 } else {
2142 Point::new(row + 1, 0).to_previous_offset(self)
2143 };
2144 (row_end_offset - row_start_offset) as u32
2145 }
2146
2147 pub fn line_indents_in_row_range(
2148 &self,
2149 row_range: Range<u32>,
2150 ) -> impl Iterator<Item = (u32, LineIndent)> + '_ {
2151 let start = Point::new(row_range.start, 0).to_offset(self);
2152 let end = Point::new(row_range.end, self.line_len(row_range.end)).to_offset(self);
2153
2154 let mut chunks = self.as_rope().chunks_in_range(start..end);
2155 let mut row = row_range.start;
2156 let mut done = false;
2157 std::iter::from_fn(move || {
2158 if done {
2159 None
2160 } else {
2161 let indent = (row, LineIndent::from_chunks(&mut chunks));
2162 done = !chunks.next_line();
2163 row += 1;
2164 Some(indent)
2165 }
2166 })
2167 }
2168
2169 /// Returns the line indents in the given row range, exclusive of end row, in reversed order.
2170 pub fn reversed_line_indents_in_row_range(
2171 &self,
2172 row_range: Range<u32>,
2173 ) -> impl Iterator<Item = (u32, LineIndent)> + '_ {
2174 let start = Point::new(row_range.start, 0).to_offset(self);
2175
2176 let end_point;
2177 let end;
2178 if row_range.end > row_range.start {
2179 end_point = Point::new(row_range.end - 1, self.line_len(row_range.end - 1));
2180 end = end_point.to_offset(self);
2181 } else {
2182 end_point = Point::new(row_range.start, 0);
2183 end = start;
2184 };
2185
2186 let mut chunks = self.as_rope().chunks_in_range(start..end);
2187 // Move the cursor to the start of the last line if it's not empty.
2188 chunks.seek(end);
2189 if end_point.column > 0 {
2190 chunks.prev_line();
2191 }
2192
2193 let mut row = end_point.row;
2194 let mut done = false;
2195 std::iter::from_fn(move || {
2196 if done {
2197 None
2198 } else {
2199 let initial_offset = chunks.offset();
2200 let indent = (row, LineIndent::from_chunks(&mut chunks));
2201 if chunks.offset() > initial_offset {
2202 chunks.prev_line();
2203 }
2204 done = !chunks.prev_line();
2205 if !done {
2206 row -= 1;
2207 }
2208
2209 Some(indent)
2210 }
2211 })
2212 }
2213
2214 pub fn line_indent_for_row(&self, row: u32) -> LineIndent {
2215 LineIndent::from_iter(self.chars_at(Point::new(row, 0)))
2216 }
2217
2218 pub fn is_line_blank(&self, row: u32) -> bool {
2219 self.text_for_range(Point::new(row, 0)..Point::new(row, self.line_len(row)))
2220 .all(|chunk| chunk.matches(|c: char| !c.is_whitespace()).next().is_none())
2221 }
2222
2223 pub fn text_summary_for_range<D, O: ToOffset>(&self, range: Range<O>) -> D
2224 where
2225 D: TextDimension,
2226 {
2227 self.visible_text
2228 .cursor(range.start.to_offset(self))
2229 .summary(range.end.to_offset(self))
2230 }
2231
2232 pub fn summaries_for_anchors<'a, D, A>(&'a self, anchors: A) -> impl 'a + Iterator<Item = D>
2233 where
2234 D: 'a + TextDimension,
2235 A: 'a + IntoIterator<Item = &'a Anchor>,
2236 {
2237 let anchors = anchors.into_iter();
2238 self.summaries_for_anchors_with_payload::<D, _, ()>(anchors.map(|a| (a, ())))
2239 .map(|d| d.0)
2240 }
2241
2242 pub fn summaries_for_anchors_with_payload<'a, D, A, T>(
2243 &'a self,
2244 anchors: A,
2245 ) -> impl 'a + Iterator<Item = (D, T)>
2246 where
2247 D: 'a + TextDimension,
2248 A: 'a + IntoIterator<Item = (&'a Anchor, T)>,
2249 {
2250 let anchors = anchors.into_iter();
2251 let mut insertion_cursor = self.insertions.cursor::<InsertionFragmentKey>(());
2252 let mut fragment_cursor = self
2253 .fragments
2254 .cursor::<Dimensions<Option<&Locator>, usize>>(&None);
2255 let mut text_cursor = self.visible_text.cursor(0);
2256 let mut position = D::zero(());
2257
2258 anchors.map(move |(anchor, payload)| {
2259 if anchor.is_min() {
2260 return (D::zero(()), payload);
2261 } else if anchor.is_max() {
2262 return (D::from_text_summary(&self.visible_text.summary()), payload);
2263 }
2264
2265 let anchor_key = InsertionFragmentKey {
2266 timestamp: anchor.timestamp,
2267 split_offset: anchor.offset,
2268 };
2269 insertion_cursor.seek(&anchor_key, anchor.bias);
2270 if let Some(insertion) = insertion_cursor.item() {
2271 let comparison = sum_tree::KeyedItem::key(insertion).cmp(&anchor_key);
2272 if comparison == Ordering::Greater
2273 || (anchor.bias == Bias::Left
2274 && comparison == Ordering::Equal
2275 && anchor.offset > 0)
2276 {
2277 insertion_cursor.prev();
2278 }
2279 } else {
2280 insertion_cursor.prev();
2281 }
2282 let Some(insertion) = insertion_cursor.item() else {
2283 panic!(
2284 "invalid insertion for buffer {}@{:?} with anchor {:?}",
2285 self.remote_id(),
2286 self.version,
2287 anchor
2288 );
2289 };
2290 assert_eq!(
2291 insertion.timestamp,
2292 anchor.timestamp,
2293 "invalid insertion for buffer {}@{:?} and anchor {:?}",
2294 self.remote_id(),
2295 self.version,
2296 anchor
2297 );
2298
2299 fragment_cursor.seek_forward(&Some(&insertion.fragment_id), Bias::Left);
2300 let fragment = fragment_cursor.item().unwrap();
2301 let mut fragment_offset = fragment_cursor.start().1;
2302 if fragment.visible {
2303 fragment_offset += anchor.offset - insertion.split_offset;
2304 }
2305
2306 position.add_assign(&text_cursor.summary(fragment_offset));
2307 (position, payload)
2308 })
2309 }
2310
2311 pub fn summary_for_anchor<D>(&self, anchor: &Anchor) -> D
2312 where
2313 D: TextDimension,
2314 {
2315 self.text_summary_for_range(0..self.offset_for_anchor(anchor))
2316 }
2317
2318 pub fn offset_for_anchor(&self, anchor: &Anchor) -> usize {
2319 if anchor.is_min() {
2320 0
2321 } else if anchor.is_max() {
2322 self.visible_text.len()
2323 } else {
2324 debug_assert_eq!(anchor.buffer_id, Some(self.remote_id));
2325 debug_assert!(
2326 self.version.observed(anchor.timestamp),
2327 "Anchor timestamp {:?} not observed by buffer {:?}",
2328 anchor.timestamp,
2329 self.version
2330 );
2331 let anchor_key = InsertionFragmentKey {
2332 timestamp: anchor.timestamp,
2333 split_offset: anchor.offset,
2334 };
2335 let mut insertion_cursor = self.insertions.cursor::<InsertionFragmentKey>(());
2336 insertion_cursor.seek(&anchor_key, anchor.bias);
2337 if let Some(insertion) = insertion_cursor.item() {
2338 let comparison = sum_tree::KeyedItem::key(insertion).cmp(&anchor_key);
2339 if comparison == Ordering::Greater
2340 || (anchor.bias == Bias::Left
2341 && comparison == Ordering::Equal
2342 && anchor.offset > 0)
2343 {
2344 insertion_cursor.prev();
2345 }
2346 } else {
2347 insertion_cursor.prev();
2348 }
2349
2350 let Some(insertion) = insertion_cursor
2351 .item()
2352 .filter(|insertion| insertion.timestamp == anchor.timestamp)
2353 else {
2354 self.panic_bad_anchor(anchor);
2355 };
2356
2357 let (start, _, item) = self
2358 .fragments
2359 .find::<Dimensions<Option<&Locator>, usize>, _>(
2360 &None,
2361 &Some(&insertion.fragment_id),
2362 Bias::Left,
2363 );
2364 let fragment = item.unwrap();
2365 let mut fragment_offset = start.1;
2366 if fragment.visible {
2367 fragment_offset += anchor.offset - insertion.split_offset;
2368 }
2369 fragment_offset
2370 }
2371 }
2372
2373 #[cold]
2374 fn panic_bad_anchor(&self, anchor: &Anchor) -> ! {
2375 if anchor.buffer_id.is_some_and(|id| id != self.remote_id) {
2376 panic!(
2377 "invalid anchor - buffer id does not match: anchor {anchor:?}; buffer id: {}, version: {:?}",
2378 self.remote_id, self.version
2379 );
2380 } else if !self.version.observed(anchor.timestamp) {
2381 panic!(
2382 "invalid anchor - snapshot has not observed lamport: {:?}; version: {:?}",
2383 anchor, self.version
2384 );
2385 } else {
2386 panic!(
2387 "invalid anchor {:?}. buffer id: {}, version: {:?}",
2388 anchor, self.remote_id, self.version
2389 );
2390 }
2391 }
2392
2393 fn fragment_id_for_anchor(&self, anchor: &Anchor) -> &Locator {
2394 self.try_fragment_id_for_anchor(anchor)
2395 .unwrap_or_else(|| self.panic_bad_anchor(anchor))
2396 }
2397
2398 fn try_fragment_id_for_anchor(&self, anchor: &Anchor) -> Option<&Locator> {
2399 if anchor.is_min() {
2400 Some(Locator::min_ref())
2401 } else if anchor.is_max() {
2402 Some(Locator::max_ref())
2403 } else {
2404 let anchor_key = InsertionFragmentKey {
2405 timestamp: anchor.timestamp,
2406 split_offset: anchor.offset,
2407 };
2408 let mut insertion_cursor = self.insertions.cursor::<InsertionFragmentKey>(());
2409 insertion_cursor.seek(&anchor_key, anchor.bias);
2410 if let Some(insertion) = insertion_cursor.item() {
2411 let comparison = sum_tree::KeyedItem::key(insertion).cmp(&anchor_key);
2412 if comparison == Ordering::Greater
2413 || (anchor.bias == Bias::Left
2414 && comparison == Ordering::Equal
2415 && anchor.offset > 0)
2416 {
2417 insertion_cursor.prev();
2418 }
2419 } else {
2420 insertion_cursor.prev();
2421 }
2422
2423 insertion_cursor
2424 .item()
2425 .filter(|insertion| {
2426 !cfg!(debug_assertions) || insertion.timestamp == anchor.timestamp
2427 })
2428 .map(|insertion| &insertion.fragment_id)
2429 }
2430 }
2431
2432 pub fn anchor_before<T: ToOffset>(&self, position: T) -> Anchor {
2433 self.anchor_at(position, Bias::Left)
2434 }
2435
2436 pub fn anchor_after<T: ToOffset>(&self, position: T) -> Anchor {
2437 self.anchor_at(position, Bias::Right)
2438 }
2439
2440 pub fn anchor_at<T: ToOffset>(&self, position: T, bias: Bias) -> Anchor {
2441 self.anchor_at_offset(position.to_offset(self), bias)
2442 }
2443
2444 fn anchor_at_offset(&self, mut offset: usize, bias: Bias) -> Anchor {
2445 if bias == Bias::Left && offset == 0 {
2446 Anchor::min_for_buffer(self.remote_id)
2447 } else if bias == Bias::Right
2448 && ((!cfg!(debug_assertions) && offset >= self.len()) || offset == self.len())
2449 {
2450 Anchor::max_for_buffer(self.remote_id)
2451 } else {
2452 if self
2453 .visible_text
2454 .assert_char_boundary::<{ cfg!(debug_assertions) }>(offset)
2455 {
2456 offset = match bias {
2457 Bias::Left => self.visible_text.floor_char_boundary(offset),
2458 Bias::Right => self.visible_text.ceil_char_boundary(offset),
2459 };
2460 }
2461 let (start, _, item) = self.fragments.find::<usize, _>(&None, &offset, bias);
2462 let Some(fragment) = item else {
2463 // We got a bad offset, likely out of bounds
2464 debug_panic!(
2465 "Failed to find fragment at offset {} (len: {})",
2466 offset,
2467 self.len()
2468 );
2469 return Anchor::max_for_buffer(self.remote_id);
2470 };
2471 let overshoot = offset - start;
2472 Anchor {
2473 timestamp: fragment.timestamp,
2474 offset: fragment.insertion_offset + overshoot,
2475 bias,
2476 buffer_id: Some(self.remote_id),
2477 }
2478 }
2479 }
2480
2481 pub fn can_resolve(&self, anchor: &Anchor) -> bool {
2482 anchor.is_min()
2483 || anchor.is_max()
2484 || (Some(self.remote_id) == anchor.buffer_id && self.version.observed(anchor.timestamp))
2485 }
2486
2487 pub fn clip_offset(&self, offset: usize, bias: Bias) -> usize {
2488 self.visible_text.clip_offset(offset, bias)
2489 }
2490
2491 pub fn clip_point(&self, point: Point, bias: Bias) -> Point {
2492 self.visible_text.clip_point(point, bias)
2493 }
2494
2495 pub fn clip_offset_utf16(&self, offset: OffsetUtf16, bias: Bias) -> OffsetUtf16 {
2496 self.visible_text.clip_offset_utf16(offset, bias)
2497 }
2498
2499 pub fn clip_point_utf16(&self, point: Unclipped<PointUtf16>, bias: Bias) -> PointUtf16 {
2500 self.visible_text.clip_point_utf16(point, bias)
2501 }
2502
2503 pub fn edits_since<'a, D>(
2504 &'a self,
2505 since: &'a clock::Global,
2506 ) -> impl 'a + Iterator<Item = Edit<D>>
2507 where
2508 D: TextDimension + Ord,
2509 {
2510 self.edits_since_in_range(since, Anchor::MIN..Anchor::MAX)
2511 }
2512
2513 pub fn anchored_edits_since<'a, D>(
2514 &'a self,
2515 since: &'a clock::Global,
2516 ) -> impl 'a + Iterator<Item = (Edit<D>, Range<Anchor>)>
2517 where
2518 D: TextDimension + Ord,
2519 {
2520 self.anchored_edits_since_in_range(since, Anchor::MIN..Anchor::MAX)
2521 }
2522
2523 pub fn edits_since_in_range<'a, D>(
2524 &'a self,
2525 since: &'a clock::Global,
2526 range: Range<Anchor>,
2527 ) -> impl 'a + Iterator<Item = Edit<D>>
2528 where
2529 D: TextDimension + Ord,
2530 {
2531 self.anchored_edits_since_in_range(since, range)
2532 .map(|item| item.0)
2533 }
2534
2535 pub fn anchored_edits_since_in_range<'a, D>(
2536 &'a self,
2537 since: &'a clock::Global,
2538 range: Range<Anchor>,
2539 ) -> impl 'a + Iterator<Item = (Edit<D>, Range<Anchor>)>
2540 where
2541 D: TextDimension + Ord,
2542 {
2543 let fragments_cursor = if *since == self.version {
2544 None
2545 } else {
2546 let mut cursor = self.fragments.filter(&None, move |summary| {
2547 !since.observed_all(&summary.max_version)
2548 });
2549 cursor.next();
2550 Some(cursor)
2551 };
2552 let start_fragment_id = self.fragment_id_for_anchor(&range.start);
2553 let (start, _, item) = self
2554 .fragments
2555 .find::<Dimensions<Option<&Locator>, FragmentTextSummary>, _>(
2556 &None,
2557 &Some(start_fragment_id),
2558 Bias::Left,
2559 );
2560 let mut visible_start = start.1.visible;
2561 let mut deleted_start = start.1.deleted;
2562 if let Some(fragment) = item {
2563 let overshoot = range.start.offset - fragment.insertion_offset;
2564 if fragment.visible {
2565 visible_start += overshoot;
2566 } else {
2567 deleted_start += overshoot;
2568 }
2569 }
2570 let end_fragment_id = self.fragment_id_for_anchor(&range.end);
2571
2572 Edits {
2573 visible_cursor: self.visible_text.cursor(visible_start),
2574 deleted_cursor: self.deleted_text.cursor(deleted_start),
2575 fragments_cursor,
2576 undos: &self.undo_map,
2577 since,
2578 old_end: D::zero(()),
2579 new_end: D::zero(()),
2580 range: (start_fragment_id, range.start.offset)..(end_fragment_id, range.end.offset),
2581 buffer_id: self.remote_id,
2582 }
2583 }
2584
2585 pub fn has_edits_since_in_range(&self, since: &clock::Global, range: Range<Anchor>) -> bool {
2586 if *since != self.version {
2587 let start_fragment_id = self.fragment_id_for_anchor(&range.start);
2588 let end_fragment_id = self.fragment_id_for_anchor(&range.end);
2589 let mut cursor = self.fragments.filter::<_, usize>(&None, move |summary| {
2590 !since.observed_all(&summary.max_version)
2591 });
2592 cursor.next();
2593 while let Some(fragment) = cursor.item() {
2594 if fragment.id > *end_fragment_id {
2595 break;
2596 }
2597 if fragment.id > *start_fragment_id {
2598 let was_visible = fragment.was_visible(since, &self.undo_map);
2599 let is_visible = fragment.visible;
2600 if was_visible != is_visible {
2601 return true;
2602 }
2603 }
2604 cursor.next();
2605 }
2606 }
2607 false
2608 }
2609
2610 pub fn has_edits_since(&self, since: &clock::Global) -> bool {
2611 if *since != self.version {
2612 let mut cursor = self.fragments.filter::<_, usize>(&None, move |summary| {
2613 !since.observed_all(&summary.max_version)
2614 });
2615 cursor.next();
2616 while let Some(fragment) = cursor.item() {
2617 let was_visible = fragment.was_visible(since, &self.undo_map);
2618 let is_visible = fragment.visible;
2619 if was_visible != is_visible {
2620 return true;
2621 }
2622 cursor.next();
2623 }
2624 }
2625 false
2626 }
2627
2628 pub fn range_to_version(&self, range: Range<usize>, version: &clock::Global) -> Range<usize> {
2629 let mut offsets = self.offsets_to_version([range.start, range.end], version);
2630 offsets.next().unwrap()..offsets.next().unwrap()
2631 }
2632
2633 /// Converts the given sequence of offsets into their corresponding offsets
2634 /// at a prior version of this buffer.
2635 pub fn offsets_to_version<'a>(
2636 &'a self,
2637 offsets: impl 'a + IntoIterator<Item = usize>,
2638 version: &'a clock::Global,
2639 ) -> impl 'a + Iterator<Item = usize> {
2640 let mut edits = self.edits_since(version).peekable();
2641 let mut last_old_end = 0;
2642 let mut last_new_end = 0;
2643 offsets.into_iter().map(move |new_offset| {
2644 while let Some(edit) = edits.peek() {
2645 if edit.new.start > new_offset {
2646 break;
2647 }
2648
2649 if edit.new.end <= new_offset {
2650 last_new_end = edit.new.end;
2651 last_old_end = edit.old.end;
2652 edits.next();
2653 continue;
2654 }
2655
2656 let overshoot = new_offset - edit.new.start;
2657 return (edit.old.start + overshoot).min(edit.old.end);
2658 }
2659
2660 last_old_end + new_offset.saturating_sub(last_new_end)
2661 })
2662 }
2663
2664 /// Visually annotates a position or range with the `Debug` representation of a value. The
2665 /// callsite of this function is used as a key - previous annotations will be removed.
2666 #[cfg(debug_assertions)]
2667 #[track_caller]
2668 pub fn debug<R, V>(&self, ranges: &R, value: V)
2669 where
2670 R: debug::ToDebugRanges,
2671 V: std::fmt::Debug,
2672 {
2673 self.debug_with_key(std::panic::Location::caller(), ranges, value);
2674 }
2675
2676 /// Visually annotates a position or range with the `Debug` representation of a value. Previous
2677 /// debug annotations with the same key will be removed. The key is also used to determine the
2678 /// annotation's color.
2679 #[cfg(debug_assertions)]
2680 pub fn debug_with_key<K, R, V>(&self, key: &K, ranges: &R, value: V)
2681 where
2682 K: std::hash::Hash + 'static,
2683 R: debug::ToDebugRanges,
2684 V: std::fmt::Debug,
2685 {
2686 let ranges = ranges
2687 .to_debug_ranges(self)
2688 .into_iter()
2689 .map(|range| self.anchor_after(range.start)..self.anchor_before(range.end))
2690 .collect();
2691 debug::GlobalDebugRanges::with_locked(|debug_ranges| {
2692 debug_ranges.insert(key, ranges, format!("{value:?}").into());
2693 });
2694 }
2695}
2696
2697struct RopeBuilder<'a> {
2698 old_visible_cursor: rope::Cursor<'a>,
2699 old_deleted_cursor: rope::Cursor<'a>,
2700 new_visible: Rope,
2701 new_deleted: Rope,
2702}
2703
2704impl<'a> RopeBuilder<'a> {
2705 fn new(old_visible_cursor: rope::Cursor<'a>, old_deleted_cursor: rope::Cursor<'a>) -> Self {
2706 Self {
2707 old_visible_cursor,
2708 old_deleted_cursor,
2709 new_visible: Rope::new(),
2710 new_deleted: Rope::new(),
2711 }
2712 }
2713
2714 fn append(&mut self, len: FragmentTextSummary) {
2715 self.push(len.visible, true, true);
2716 self.push(len.deleted, false, false);
2717 }
2718
2719 fn push_fragment(&mut self, fragment: &Fragment, was_visible: bool) {
2720 debug_assert!(fragment.len > 0);
2721 self.push(fragment.len, was_visible, fragment.visible)
2722 }
2723
2724 fn push(&mut self, len: usize, was_visible: bool, is_visible: bool) {
2725 let text = if was_visible {
2726 self.old_visible_cursor
2727 .slice(self.old_visible_cursor.offset() + len)
2728 } else {
2729 self.old_deleted_cursor
2730 .slice(self.old_deleted_cursor.offset() + len)
2731 };
2732 if is_visible {
2733 self.new_visible.append(text);
2734 } else {
2735 self.new_deleted.append(text);
2736 }
2737 }
2738
2739 fn push_str(&mut self, text: &str) {
2740 self.new_visible.push(text);
2741 }
2742
2743 fn finish(mut self) -> (Rope, Rope) {
2744 self.new_visible.append(self.old_visible_cursor.suffix());
2745 self.new_deleted.append(self.old_deleted_cursor.suffix());
2746 (self.new_visible, self.new_deleted)
2747 }
2748}
2749
2750impl<D: TextDimension + Ord, F: FnMut(&FragmentSummary) -> bool> Iterator for Edits<'_, D, F> {
2751 type Item = (Edit<D>, Range<Anchor>);
2752
2753 fn next(&mut self) -> Option<Self::Item> {
2754 let mut pending_edit: Option<Self::Item> = None;
2755 let cursor = self.fragments_cursor.as_mut()?;
2756
2757 while let Some(fragment) = cursor.item() {
2758 if fragment.id < *self.range.start.0 {
2759 cursor.next();
2760 continue;
2761 } else if fragment.id > *self.range.end.0 {
2762 break;
2763 }
2764
2765 if cursor.start().visible > self.visible_cursor.offset() {
2766 let summary = self.visible_cursor.summary(cursor.start().visible);
2767 self.old_end.add_assign(&summary);
2768 self.new_end.add_assign(&summary);
2769 }
2770
2771 if pending_edit
2772 .as_ref()
2773 .is_some_and(|(change, _)| change.new.end < self.new_end)
2774 {
2775 break;
2776 }
2777
2778 let start_anchor = Anchor {
2779 timestamp: fragment.timestamp,
2780 offset: fragment.insertion_offset,
2781 bias: Bias::Right,
2782 buffer_id: Some(self.buffer_id),
2783 };
2784 let end_anchor = Anchor {
2785 timestamp: fragment.timestamp,
2786 offset: fragment.insertion_offset + fragment.len,
2787 bias: Bias::Left,
2788 buffer_id: Some(self.buffer_id),
2789 };
2790
2791 if !fragment.was_visible(self.since, self.undos) && fragment.visible {
2792 let mut visible_end = cursor.end().visible;
2793 if fragment.id == *self.range.end.0 {
2794 visible_end = cmp::min(
2795 visible_end,
2796 cursor.start().visible + (self.range.end.1 - fragment.insertion_offset),
2797 );
2798 }
2799
2800 let fragment_summary = self.visible_cursor.summary(visible_end);
2801 let mut new_end = self.new_end;
2802 new_end.add_assign(&fragment_summary);
2803 if let Some((edit, range)) = pending_edit.as_mut() {
2804 edit.new.end = new_end;
2805 range.end = end_anchor;
2806 } else {
2807 pending_edit = Some((
2808 Edit {
2809 old: self.old_end..self.old_end,
2810 new: self.new_end..new_end,
2811 },
2812 start_anchor..end_anchor,
2813 ));
2814 }
2815
2816 self.new_end = new_end;
2817 } else if fragment.was_visible(self.since, self.undos) && !fragment.visible {
2818 let mut deleted_end = cursor.end().deleted;
2819 if fragment.id == *self.range.end.0 {
2820 deleted_end = cmp::min(
2821 deleted_end,
2822 cursor.start().deleted + (self.range.end.1 - fragment.insertion_offset),
2823 );
2824 }
2825
2826 if cursor.start().deleted > self.deleted_cursor.offset() {
2827 self.deleted_cursor.seek_forward(cursor.start().deleted);
2828 }
2829 let fragment_summary = self.deleted_cursor.summary(deleted_end);
2830 let mut old_end = self.old_end;
2831 old_end.add_assign(&fragment_summary);
2832 if let Some((edit, range)) = pending_edit.as_mut() {
2833 edit.old.end = old_end;
2834 range.end = end_anchor;
2835 } else {
2836 pending_edit = Some((
2837 Edit {
2838 old: self.old_end..old_end,
2839 new: self.new_end..self.new_end,
2840 },
2841 start_anchor..end_anchor,
2842 ));
2843 }
2844
2845 self.old_end = old_end;
2846 }
2847
2848 cursor.next();
2849 }
2850
2851 pending_edit
2852 }
2853}
2854
2855impl Fragment {
2856 fn is_visible(&self, undos: &UndoMap) -> bool {
2857 !undos.is_undone(self.timestamp) && self.deletions.iter().all(|d| undos.is_undone(*d))
2858 }
2859
2860 fn was_visible(&self, version: &clock::Global, undos: &UndoMap) -> bool {
2861 (version.observed(self.timestamp) && !undos.was_undone(self.timestamp, version))
2862 && self
2863 .deletions
2864 .iter()
2865 .all(|d| !version.observed(*d) || undos.was_undone(*d, version))
2866 }
2867}
2868
2869impl sum_tree::Item for Fragment {
2870 type Summary = FragmentSummary;
2871
2872 fn summary(&self, _cx: &Option<clock::Global>) -> Self::Summary {
2873 let mut max_version = clock::Global::new();
2874 max_version.observe(self.timestamp);
2875 for deletion in &self.deletions {
2876 max_version.observe(*deletion);
2877 }
2878 max_version.join(&self.max_undos);
2879
2880 let mut min_insertion_version = clock::Global::new();
2881 min_insertion_version.observe(self.timestamp);
2882 let max_insertion_version = min_insertion_version.clone();
2883 if self.visible {
2884 FragmentSummary {
2885 max_id: self.id.clone(),
2886 text: FragmentTextSummary {
2887 visible: self.len,
2888 deleted: 0,
2889 },
2890 max_version,
2891 min_insertion_version,
2892 max_insertion_version,
2893 }
2894 } else {
2895 FragmentSummary {
2896 max_id: self.id.clone(),
2897 text: FragmentTextSummary {
2898 visible: 0,
2899 deleted: self.len,
2900 },
2901 max_version,
2902 min_insertion_version,
2903 max_insertion_version,
2904 }
2905 }
2906 }
2907}
2908
2909impl sum_tree::Summary for FragmentSummary {
2910 type Context<'a> = &'a Option<clock::Global>;
2911
2912 fn zero(_cx: Self::Context<'_>) -> Self {
2913 Default::default()
2914 }
2915
2916 fn add_summary(&mut self, other: &Self, _: Self::Context<'_>) {
2917 self.max_id.assign(&other.max_id);
2918 self.text.visible += &other.text.visible;
2919 self.text.deleted += &other.text.deleted;
2920 self.max_version.join(&other.max_version);
2921 self.min_insertion_version
2922 .meet(&other.min_insertion_version);
2923 self.max_insertion_version
2924 .join(&other.max_insertion_version);
2925 }
2926}
2927
2928impl Default for FragmentSummary {
2929 fn default() -> Self {
2930 FragmentSummary {
2931 max_id: Locator::min(),
2932 text: FragmentTextSummary::default(),
2933 max_version: clock::Global::new(),
2934 min_insertion_version: clock::Global::new(),
2935 max_insertion_version: clock::Global::new(),
2936 }
2937 }
2938}
2939
2940impl sum_tree::Item for InsertionFragment {
2941 type Summary = InsertionFragmentKey;
2942
2943 fn summary(&self, _cx: ()) -> Self::Summary {
2944 InsertionFragmentKey {
2945 timestamp: self.timestamp,
2946 split_offset: self.split_offset,
2947 }
2948 }
2949}
2950
2951impl sum_tree::KeyedItem for InsertionFragment {
2952 type Key = InsertionFragmentKey;
2953
2954 fn key(&self) -> Self::Key {
2955 sum_tree::Item::summary(self, ())
2956 }
2957}
2958
2959impl InsertionFragment {
2960 fn new(fragment: &Fragment) -> Self {
2961 Self {
2962 timestamp: fragment.timestamp,
2963 split_offset: fragment.insertion_offset,
2964 fragment_id: fragment.id.clone(),
2965 }
2966 }
2967
2968 fn insert_new(fragment: &Fragment) -> sum_tree::Edit<Self> {
2969 sum_tree::Edit::Insert(Self::new(fragment))
2970 }
2971}
2972
2973impl sum_tree::ContextLessSummary for InsertionFragmentKey {
2974 fn zero() -> Self {
2975 InsertionFragmentKey {
2976 timestamp: Lamport::MIN,
2977 split_offset: 0,
2978 }
2979 }
2980
2981 fn add_summary(&mut self, summary: &Self) {
2982 *self = *summary;
2983 }
2984}
2985
2986#[derive(Copy, Clone, Debug, Default, PartialEq, Eq, PartialOrd, Ord, Hash)]
2987pub struct FullOffset(pub usize);
2988
2989impl ops::AddAssign<usize> for FullOffset {
2990 fn add_assign(&mut self, rhs: usize) {
2991 self.0 += rhs;
2992 }
2993}
2994
2995impl ops::Add<usize> for FullOffset {
2996 type Output = Self;
2997
2998 fn add(mut self, rhs: usize) -> Self::Output {
2999 self += rhs;
3000 self
3001 }
3002}
3003
3004impl ops::Sub for FullOffset {
3005 type Output = usize;
3006
3007 fn sub(self, rhs: Self) -> Self::Output {
3008 self.0 - rhs.0
3009 }
3010}
3011
3012impl sum_tree::Dimension<'_, FragmentSummary> for usize {
3013 fn zero(_: &Option<clock::Global>) -> Self {
3014 Default::default()
3015 }
3016
3017 fn add_summary(&mut self, summary: &FragmentSummary, _: &Option<clock::Global>) {
3018 *self += summary.text.visible;
3019 }
3020}
3021
3022impl sum_tree::Dimension<'_, FragmentSummary> for FullOffset {
3023 fn zero(_: &Option<clock::Global>) -> Self {
3024 Default::default()
3025 }
3026
3027 fn add_summary(&mut self, summary: &FragmentSummary, _: &Option<clock::Global>) {
3028 self.0 += summary.text.visible + summary.text.deleted;
3029 }
3030}
3031
3032impl<'a> sum_tree::Dimension<'a, FragmentSummary> for Option<&'a Locator> {
3033 fn zero(_: &Option<clock::Global>) -> Self {
3034 Default::default()
3035 }
3036
3037 fn add_summary(&mut self, summary: &'a FragmentSummary, _: &Option<clock::Global>) {
3038 *self = Some(&summary.max_id);
3039 }
3040}
3041
3042impl sum_tree::SeekTarget<'_, FragmentSummary, FragmentTextSummary> for usize {
3043 fn cmp(
3044 &self,
3045 cursor_location: &FragmentTextSummary,
3046 _: &Option<clock::Global>,
3047 ) -> cmp::Ordering {
3048 Ord::cmp(self, &cursor_location.visible)
3049 }
3050}
3051
3052#[derive(Copy, Clone, Debug, Eq, PartialEq)]
3053enum VersionedFullOffset {
3054 Offset(FullOffset),
3055 Invalid,
3056}
3057
3058impl VersionedFullOffset {
3059 fn full_offset(&self) -> FullOffset {
3060 if let Self::Offset(position) = self {
3061 *position
3062 } else {
3063 panic!("invalid version")
3064 }
3065 }
3066}
3067
3068impl Default for VersionedFullOffset {
3069 fn default() -> Self {
3070 Self::Offset(Default::default())
3071 }
3072}
3073
3074impl<'a> sum_tree::Dimension<'a, FragmentSummary> for VersionedFullOffset {
3075 fn zero(_cx: &Option<clock::Global>) -> Self {
3076 Default::default()
3077 }
3078
3079 fn add_summary(&mut self, summary: &'a FragmentSummary, cx: &Option<clock::Global>) {
3080 if let Self::Offset(offset) = self {
3081 let version = cx.as_ref().unwrap();
3082 if version.observed_all(&summary.max_insertion_version) {
3083 *offset += summary.text.visible + summary.text.deleted;
3084 } else if version.observed_any(&summary.min_insertion_version) {
3085 *self = Self::Invalid;
3086 }
3087 }
3088 }
3089}
3090
3091impl sum_tree::SeekTarget<'_, FragmentSummary, Self> for VersionedFullOffset {
3092 fn cmp(&self, cursor_position: &Self, _: &Option<clock::Global>) -> cmp::Ordering {
3093 match (self, cursor_position) {
3094 (Self::Offset(a), Self::Offset(b)) => Ord::cmp(a, b),
3095 (Self::Offset(_), Self::Invalid) => cmp::Ordering::Less,
3096 (Self::Invalid, _) => unreachable!(),
3097 }
3098 }
3099}
3100
3101impl Operation {
3102 fn replica_id(&self) -> ReplicaId {
3103 operation_queue::Operation::lamport_timestamp(self).replica_id
3104 }
3105
3106 pub fn timestamp(&self) -> clock::Lamport {
3107 match self {
3108 Operation::Edit(edit) => edit.timestamp,
3109 Operation::Undo(undo) => undo.timestamp,
3110 }
3111 }
3112
3113 pub fn as_edit(&self) -> Option<&EditOperation> {
3114 match self {
3115 Operation::Edit(edit) => Some(edit),
3116 _ => None,
3117 }
3118 }
3119
3120 pub fn is_edit(&self) -> bool {
3121 matches!(self, Operation::Edit { .. })
3122 }
3123}
3124
3125impl operation_queue::Operation for Operation {
3126 fn lamport_timestamp(&self) -> clock::Lamport {
3127 match self {
3128 Operation::Edit(edit) => edit.timestamp,
3129 Operation::Undo(undo) => undo.timestamp,
3130 }
3131 }
3132}
3133
3134pub trait ToOffset {
3135 fn to_offset(&self, snapshot: &BufferSnapshot) -> usize;
3136 /// Turns this point into the next offset in the buffer that comes after this, respecting utf8 boundaries.
3137 fn to_next_offset(&self, snapshot: &BufferSnapshot) -> usize {
3138 snapshot
3139 .visible_text
3140 .ceil_char_boundary(self.to_offset(snapshot) + 1)
3141 }
3142 /// Turns this point into the previous offset in the buffer that comes before this, respecting utf8 boundaries.
3143 fn to_previous_offset(&self, snapshot: &BufferSnapshot) -> usize {
3144 snapshot
3145 .visible_text
3146 .floor_char_boundary(self.to_offset(snapshot).saturating_sub(1))
3147 }
3148}
3149
3150impl ToOffset for Point {
3151 #[inline]
3152 fn to_offset(&self, snapshot: &BufferSnapshot) -> usize {
3153 snapshot.point_to_offset(*self)
3154 }
3155}
3156
3157impl ToOffset for usize {
3158 fn to_offset(&self, snapshot: &BufferSnapshot) -> usize {
3159 if snapshot
3160 .as_rope()
3161 .assert_char_boundary::<{ cfg!(debug_assertions) }>(*self)
3162 {
3163 snapshot.as_rope().floor_char_boundary(*self)
3164 } else {
3165 *self
3166 }
3167 }
3168}
3169
3170impl ToOffset for Anchor {
3171 #[inline]
3172 fn to_offset(&self, snapshot: &BufferSnapshot) -> usize {
3173 snapshot.summary_for_anchor(self)
3174 }
3175}
3176
3177impl<T: ToOffset> ToOffset for &T {
3178 #[inline]
3179 fn to_offset(&self, content: &BufferSnapshot) -> usize {
3180 (*self).to_offset(content)
3181 }
3182}
3183
3184impl ToOffset for PointUtf16 {
3185 #[inline]
3186 fn to_offset(&self, snapshot: &BufferSnapshot) -> usize {
3187 snapshot.point_utf16_to_offset(*self)
3188 }
3189}
3190
3191impl ToOffset for Unclipped<PointUtf16> {
3192 #[inline]
3193 fn to_offset(&self, snapshot: &BufferSnapshot) -> usize {
3194 snapshot.unclipped_point_utf16_to_offset(*self)
3195 }
3196}
3197
3198pub trait ToPoint {
3199 fn to_point(&self, snapshot: &BufferSnapshot) -> Point;
3200}
3201
3202impl ToPoint for Anchor {
3203 #[inline]
3204 fn to_point(&self, snapshot: &BufferSnapshot) -> Point {
3205 snapshot.summary_for_anchor(self)
3206 }
3207}
3208
3209impl ToPoint for usize {
3210 #[inline]
3211 fn to_point(&self, snapshot: &BufferSnapshot) -> Point {
3212 snapshot.offset_to_point(*self)
3213 }
3214}
3215
3216impl ToPoint for Point {
3217 #[inline]
3218 fn to_point(&self, _: &BufferSnapshot) -> Point {
3219 *self
3220 }
3221}
3222
3223impl ToPoint for Unclipped<PointUtf16> {
3224 #[inline]
3225 fn to_point(&self, snapshot: &BufferSnapshot) -> Point {
3226 snapshot.unclipped_point_utf16_to_point(*self)
3227 }
3228}
3229
3230pub trait ToPointUtf16 {
3231 fn to_point_utf16(&self, snapshot: &BufferSnapshot) -> PointUtf16;
3232}
3233
3234impl ToPointUtf16 for Anchor {
3235 #[inline]
3236 fn to_point_utf16(&self, snapshot: &BufferSnapshot) -> PointUtf16 {
3237 snapshot.summary_for_anchor(self)
3238 }
3239}
3240
3241impl ToPointUtf16 for usize {
3242 #[inline]
3243 fn to_point_utf16(&self, snapshot: &BufferSnapshot) -> PointUtf16 {
3244 snapshot.offset_to_point_utf16(*self)
3245 }
3246}
3247
3248impl ToPointUtf16 for PointUtf16 {
3249 #[inline]
3250 fn to_point_utf16(&self, _: &BufferSnapshot) -> PointUtf16 {
3251 *self
3252 }
3253}
3254
3255impl ToPointUtf16 for Point {
3256 #[inline]
3257 fn to_point_utf16(&self, snapshot: &BufferSnapshot) -> PointUtf16 {
3258 snapshot.point_to_point_utf16(*self)
3259 }
3260}
3261
3262pub trait ToOffsetUtf16 {
3263 fn to_offset_utf16(&self, snapshot: &BufferSnapshot) -> OffsetUtf16;
3264}
3265
3266impl ToOffsetUtf16 for Anchor {
3267 #[inline]
3268 fn to_offset_utf16(&self, snapshot: &BufferSnapshot) -> OffsetUtf16 {
3269 snapshot.summary_for_anchor(self)
3270 }
3271}
3272
3273impl ToOffsetUtf16 for usize {
3274 #[inline]
3275 fn to_offset_utf16(&self, snapshot: &BufferSnapshot) -> OffsetUtf16 {
3276 snapshot.offset_to_offset_utf16(*self)
3277 }
3278}
3279
3280impl ToOffsetUtf16 for OffsetUtf16 {
3281 #[inline]
3282 fn to_offset_utf16(&self, _snapshot: &BufferSnapshot) -> OffsetUtf16 {
3283 *self
3284 }
3285}
3286
3287pub trait FromAnchor {
3288 fn from_anchor(anchor: &Anchor, snapshot: &BufferSnapshot) -> Self;
3289}
3290
3291impl FromAnchor for Anchor {
3292 #[inline]
3293 fn from_anchor(anchor: &Anchor, _snapshot: &BufferSnapshot) -> Self {
3294 *anchor
3295 }
3296}
3297
3298impl FromAnchor for Point {
3299 #[inline]
3300 fn from_anchor(anchor: &Anchor, snapshot: &BufferSnapshot) -> Self {
3301 snapshot.summary_for_anchor(anchor)
3302 }
3303}
3304
3305impl FromAnchor for PointUtf16 {
3306 #[inline]
3307 fn from_anchor(anchor: &Anchor, snapshot: &BufferSnapshot) -> Self {
3308 snapshot.summary_for_anchor(anchor)
3309 }
3310}
3311
3312impl FromAnchor for usize {
3313 #[inline]
3314 fn from_anchor(anchor: &Anchor, snapshot: &BufferSnapshot) -> Self {
3315 snapshot.summary_for_anchor(anchor)
3316 }
3317}
3318
3319#[derive(Clone, Copy, Debug, PartialEq)]
3320pub enum LineEnding {
3321 Unix,
3322 Windows,
3323}
3324
3325impl Default for LineEnding {
3326 fn default() -> Self {
3327 #[cfg(unix)]
3328 return Self::Unix;
3329
3330 #[cfg(not(unix))]
3331 return Self::Windows;
3332 }
3333}
3334
3335impl LineEnding {
3336 pub fn as_str(&self) -> &'static str {
3337 match self {
3338 LineEnding::Unix => "\n",
3339 LineEnding::Windows => "\r\n",
3340 }
3341 }
3342
3343 pub fn label(&self) -> &'static str {
3344 match self {
3345 LineEnding::Unix => "LF",
3346 LineEnding::Windows => "CRLF",
3347 }
3348 }
3349
3350 pub fn detect(text: &str) -> Self {
3351 let mut max_ix = cmp::min(text.len(), 1000);
3352 while !text.is_char_boundary(max_ix) {
3353 max_ix -= 1;
3354 }
3355
3356 if let Some(ix) = text[..max_ix].find(['\n']) {
3357 if ix > 0 && text.as_bytes()[ix - 1] == b'\r' {
3358 Self::Windows
3359 } else {
3360 Self::Unix
3361 }
3362 } else {
3363 Self::default()
3364 }
3365 }
3366
3367 pub fn normalize(text: &mut String) {
3368 if let Cow::Owned(replaced) = LINE_SEPARATORS_REGEX.replace_all(text, "\n") {
3369 *text = replaced;
3370 }
3371 }
3372
3373 pub fn normalize_arc(text: Arc<str>) -> Arc<str> {
3374 if let Cow::Owned(replaced) = LINE_SEPARATORS_REGEX.replace_all(&text, "\n") {
3375 replaced.into()
3376 } else {
3377 text
3378 }
3379 }
3380
3381 pub fn normalize_cow(text: Cow<str>) -> Cow<str> {
3382 if let Cow::Owned(replaced) = LINE_SEPARATORS_REGEX.replace_all(&text, "\n") {
3383 replaced.into()
3384 } else {
3385 text
3386 }
3387 }
3388}
3389
3390pub fn chunks_with_line_ending(rope: &Rope, line_ending: LineEnding) -> impl Iterator<Item = &str> {
3391 rope.chunks().flat_map(move |chunk| {
3392 let mut newline = false;
3393 let end_with_newline = chunk.ends_with('\n').then_some(line_ending.as_str());
3394 chunk
3395 .lines()
3396 .flat_map(move |line| {
3397 let ending = if newline {
3398 Some(line_ending.as_str())
3399 } else {
3400 None
3401 };
3402 newline = true;
3403 ending.into_iter().chain([line])
3404 })
3405 .chain(end_with_newline)
3406 })
3407}
3408
3409#[cfg(debug_assertions)]
3410pub mod debug {
3411 use super::*;
3412 use parking_lot::Mutex;
3413 use std::any::TypeId;
3414 use std::hash::{Hash, Hasher};
3415
3416 static GLOBAL_DEBUG_RANGES: Mutex<Option<GlobalDebugRanges>> = Mutex::new(None);
3417
3418 pub struct GlobalDebugRanges {
3419 pub ranges: Vec<DebugRange>,
3420 key_to_occurrence_index: HashMap<Key, usize>,
3421 next_occurrence_index: usize,
3422 }
3423
3424 pub struct DebugRange {
3425 key: Key,
3426 pub ranges: Vec<Range<Anchor>>,
3427 pub value: Arc<str>,
3428 pub occurrence_index: usize,
3429 }
3430
3431 #[derive(Debug, Clone, PartialEq, Eq, Hash)]
3432 struct Key {
3433 type_id: TypeId,
3434 hash: u64,
3435 }
3436
3437 impl GlobalDebugRanges {
3438 pub fn with_locked<R>(f: impl FnOnce(&mut Self) -> R) -> R {
3439 let mut state = GLOBAL_DEBUG_RANGES.lock();
3440 if state.is_none() {
3441 *state = Some(GlobalDebugRanges {
3442 ranges: Vec::new(),
3443 key_to_occurrence_index: HashMap::default(),
3444 next_occurrence_index: 0,
3445 });
3446 }
3447 if let Some(global_debug_ranges) = state.as_mut() {
3448 f(global_debug_ranges)
3449 } else {
3450 unreachable!()
3451 }
3452 }
3453
3454 pub fn insert<K: Hash + 'static>(
3455 &mut self,
3456 key: &K,
3457 ranges: Vec<Range<Anchor>>,
3458 value: Arc<str>,
3459 ) {
3460 let occurrence_index = *self
3461 .key_to_occurrence_index
3462 .entry(Key::new(key))
3463 .or_insert_with(|| {
3464 let occurrence_index = self.next_occurrence_index;
3465 self.next_occurrence_index += 1;
3466 occurrence_index
3467 });
3468 let key = Key::new(key);
3469 let existing = self
3470 .ranges
3471 .iter()
3472 .enumerate()
3473 .rfind(|(_, existing)| existing.key == key);
3474 if let Some((existing_ix, _)) = existing {
3475 self.ranges.remove(existing_ix);
3476 }
3477 self.ranges.push(DebugRange {
3478 ranges,
3479 key,
3480 value,
3481 occurrence_index,
3482 });
3483 }
3484
3485 pub fn remove<K: Hash + 'static>(&mut self, key: &K) {
3486 self.remove_impl(&Key::new(key));
3487 }
3488
3489 fn remove_impl(&mut self, key: &Key) {
3490 let existing = self
3491 .ranges
3492 .iter()
3493 .enumerate()
3494 .rfind(|(_, existing)| &existing.key == key);
3495 if let Some((existing_ix, _)) = existing {
3496 self.ranges.remove(existing_ix);
3497 }
3498 }
3499
3500 pub fn remove_all_with_key_type<K: 'static>(&mut self) {
3501 self.ranges
3502 .retain(|item| item.key.type_id != TypeId::of::<K>());
3503 }
3504 }
3505
3506 impl Key {
3507 fn new<K: Hash + 'static>(key: &K) -> Self {
3508 let type_id = TypeId::of::<K>();
3509 let mut hasher = collections::FxHasher::default();
3510 key.hash(&mut hasher);
3511 Key {
3512 type_id,
3513 hash: hasher.finish(),
3514 }
3515 }
3516 }
3517
3518 pub trait ToDebugRanges {
3519 fn to_debug_ranges(&self, snapshot: &BufferSnapshot) -> Vec<Range<usize>>;
3520 }
3521
3522 impl<T: ToOffset> ToDebugRanges for T {
3523 fn to_debug_ranges(&self, snapshot: &BufferSnapshot) -> Vec<Range<usize>> {
3524 [self.to_offset(snapshot)].to_debug_ranges(snapshot)
3525 }
3526 }
3527
3528 impl<T: ToOffset + Clone> ToDebugRanges for Range<T> {
3529 fn to_debug_ranges(&self, snapshot: &BufferSnapshot) -> Vec<Range<usize>> {
3530 [self.clone()].to_debug_ranges(snapshot)
3531 }
3532 }
3533
3534 impl<T: ToOffset> ToDebugRanges for Vec<T> {
3535 fn to_debug_ranges(&self, snapshot: &BufferSnapshot) -> Vec<Range<usize>> {
3536 self.as_slice().to_debug_ranges(snapshot)
3537 }
3538 }
3539
3540 impl<T: ToOffset> ToDebugRanges for Vec<Range<T>> {
3541 fn to_debug_ranges(&self, snapshot: &BufferSnapshot) -> Vec<Range<usize>> {
3542 self.as_slice().to_debug_ranges(snapshot)
3543 }
3544 }
3545
3546 impl<T: ToOffset> ToDebugRanges for [T] {
3547 fn to_debug_ranges(&self, snapshot: &BufferSnapshot) -> Vec<Range<usize>> {
3548 self.iter()
3549 .map(|item| {
3550 let offset = item.to_offset(snapshot);
3551 offset..offset
3552 })
3553 .collect()
3554 }
3555 }
3556
3557 impl<T: ToOffset> ToDebugRanges for [Range<T>] {
3558 fn to_debug_ranges(&self, snapshot: &BufferSnapshot) -> Vec<Range<usize>> {
3559 self.iter()
3560 .map(|range| range.start.to_offset(snapshot)..range.end.to_offset(snapshot))
3561 .collect()
3562 }
3563 }
3564}