1mod anchor;
2pub mod locator;
3#[cfg(any(test, feature = "test-support"))]
4pub mod network;
5pub mod operation_queue;
6mod patch;
7mod selection;
8pub mod subscription;
9#[cfg(test)]
10mod tests;
11mod undo_map;
12
13pub use anchor::*;
14use anyhow::{Context as _, Result};
15use clock::Lamport;
16pub use clock::ReplicaId;
17use collections::{HashMap, HashSet};
18use gpui::BackgroundExecutor;
19use locator::Locator;
20use operation_queue::OperationQueue;
21pub use patch::Patch;
22use postage::{oneshot, prelude::*};
23
24use regex::Regex;
25pub use rope::*;
26pub use selection::*;
27use std::{
28 borrow::Cow,
29 cmp::{self, Ordering, Reverse},
30 fmt::Display,
31 future::Future,
32 iter::Iterator,
33 num::NonZeroU64,
34 ops::{self, Deref, Range, Sub},
35 str,
36 sync::{Arc, LazyLock},
37 time::{Duration, Instant},
38};
39pub use subscription::*;
40pub use sum_tree::Bias;
41use sum_tree::{Dimensions, FilterCursor, SumTree, TreeMap, TreeSet};
42use undo_map::UndoMap;
43
44#[cfg(any(test, feature = "test-support"))]
45use util::RandomCharIter;
46
47static LINE_SEPARATORS_REGEX: LazyLock<Regex> =
48 LazyLock::new(|| Regex::new(r"\r\n|\r").expect("Failed to create LINE_SEPARATORS_REGEX"));
49
50pub type TransactionId = clock::Lamport;
51
52pub struct Buffer {
53 snapshot: BufferSnapshot,
54 history: History,
55 deferred_ops: OperationQueue<Operation>,
56 deferred_replicas: HashSet<ReplicaId>,
57 pub lamport_clock: clock::Lamport,
58 subscriptions: Topic,
59 edit_id_resolvers: HashMap<clock::Lamport, Vec<oneshot::Sender<()>>>,
60 wait_for_version_txs: Vec<(clock::Global, oneshot::Sender<()>)>,
61}
62
63#[repr(transparent)]
64#[derive(Clone, Copy, Debug, Hash, PartialEq, PartialOrd, Ord, Eq)]
65pub struct BufferId(NonZeroU64);
66
67impl Display for BufferId {
68 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
69 write!(f, "{}", self.0)
70 }
71}
72
73impl From<NonZeroU64> for BufferId {
74 fn from(id: NonZeroU64) -> Self {
75 BufferId(id)
76 }
77}
78
79impl BufferId {
80 /// Returns Err if `id` is outside of BufferId domain.
81 pub fn new(id: u64) -> anyhow::Result<Self> {
82 let id = NonZeroU64::new(id).context("Buffer id cannot be 0.")?;
83 Ok(Self(id))
84 }
85
86 /// Increments this buffer id, returning the old value.
87 /// So that's a post-increment operator in disguise.
88 pub fn next(&mut self) -> Self {
89 let old = *self;
90 self.0 = self.0.saturating_add(1);
91 old
92 }
93
94 pub fn to_proto(self) -> u64 {
95 self.into()
96 }
97}
98
99impl From<BufferId> for u64 {
100 fn from(id: BufferId) -> Self {
101 id.0.get()
102 }
103}
104
105#[derive(Clone)]
106pub struct BufferSnapshot {
107 replica_id: ReplicaId,
108 remote_id: BufferId,
109 visible_text: Rope,
110 deleted_text: Rope,
111 line_ending: LineEnding,
112 undo_map: UndoMap,
113 fragments: SumTree<Fragment>,
114 insertions: SumTree<InsertionFragment>,
115 insertion_slices: TreeSet<InsertionSlice>,
116 pub version: clock::Global,
117}
118
119#[derive(Clone, Debug)]
120pub struct HistoryEntry {
121 transaction: Transaction,
122 first_edit_at: Instant,
123 last_edit_at: Instant,
124 suppress_grouping: bool,
125}
126
127#[derive(Clone, Debug)]
128pub struct Transaction {
129 pub id: TransactionId,
130 pub edit_ids: Vec<clock::Lamport>,
131 pub start: clock::Global,
132}
133
134impl Transaction {
135 pub fn merge_in(&mut self, other: Transaction) {
136 self.edit_ids.extend(other.edit_ids);
137 }
138}
139
140impl HistoryEntry {
141 pub fn transaction_id(&self) -> TransactionId {
142 self.transaction.id
143 }
144}
145
146struct History {
147 base_text: Rope,
148 operations: TreeMap<clock::Lamport, Operation>,
149 undo_stack: Vec<HistoryEntry>,
150 redo_stack: Vec<HistoryEntry>,
151 transaction_depth: usize,
152 group_interval: Duration,
153}
154
155#[derive(Clone, Debug, Eq, PartialEq)]
156struct InsertionSlice {
157 edit_id: clock::Lamport,
158 insertion_id: clock::Lamport,
159 range: Range<usize>,
160}
161
162impl Ord for InsertionSlice {
163 fn cmp(&self, other: &Self) -> Ordering {
164 self.edit_id
165 .cmp(&other.edit_id)
166 .then_with(|| self.insertion_id.cmp(&other.insertion_id))
167 .then_with(|| self.range.start.cmp(&other.range.start))
168 .then_with(|| self.range.end.cmp(&other.range.end))
169 }
170}
171
172impl PartialOrd for InsertionSlice {
173 fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
174 Some(self.cmp(other))
175 }
176}
177
178impl InsertionSlice {
179 fn from_fragment(edit_id: clock::Lamport, fragment: &Fragment) -> Self {
180 Self {
181 edit_id,
182 insertion_id: fragment.timestamp,
183 range: fragment.insertion_offset..fragment.insertion_offset + fragment.len,
184 }
185 }
186}
187
188impl History {
189 pub fn new(base_text: Rope) -> Self {
190 Self {
191 base_text,
192 operations: Default::default(),
193 undo_stack: Vec::new(),
194 redo_stack: Vec::new(),
195 transaction_depth: 0,
196 // Don't group transactions in tests unless we opt in, because it's a footgun.
197 #[cfg(any(test, feature = "test-support"))]
198 group_interval: Duration::ZERO,
199 #[cfg(not(any(test, feature = "test-support")))]
200 group_interval: Duration::from_millis(300),
201 }
202 }
203
204 fn push(&mut self, op: Operation) {
205 self.operations.insert(op.timestamp(), op);
206 }
207
208 fn start_transaction(
209 &mut self,
210 start: clock::Global,
211 now: Instant,
212 clock: &mut clock::Lamport,
213 ) -> Option<TransactionId> {
214 self.transaction_depth += 1;
215 if self.transaction_depth == 1 {
216 let id = clock.tick();
217 self.undo_stack.push(HistoryEntry {
218 transaction: Transaction {
219 id,
220 start,
221 edit_ids: Default::default(),
222 },
223 first_edit_at: now,
224 last_edit_at: now,
225 suppress_grouping: false,
226 });
227 Some(id)
228 } else {
229 None
230 }
231 }
232
233 fn end_transaction(&mut self, now: Instant) -> Option<&HistoryEntry> {
234 assert_ne!(self.transaction_depth, 0);
235 self.transaction_depth -= 1;
236 if self.transaction_depth == 0 {
237 if self
238 .undo_stack
239 .last()
240 .unwrap()
241 .transaction
242 .edit_ids
243 .is_empty()
244 {
245 self.undo_stack.pop();
246 None
247 } else {
248 self.redo_stack.clear();
249 let entry = self.undo_stack.last_mut().unwrap();
250 entry.last_edit_at = now;
251 Some(entry)
252 }
253 } else {
254 None
255 }
256 }
257
258 fn group(&mut self) -> Option<TransactionId> {
259 let mut count = 0;
260 let mut entries = self.undo_stack.iter();
261 if let Some(mut entry) = entries.next_back() {
262 while let Some(prev_entry) = entries.next_back() {
263 if !prev_entry.suppress_grouping
264 && entry.first_edit_at - prev_entry.last_edit_at < self.group_interval
265 {
266 entry = prev_entry;
267 count += 1;
268 } else {
269 break;
270 }
271 }
272 }
273 self.group_trailing(count)
274 }
275
276 fn group_until(&mut self, transaction_id: TransactionId) {
277 let mut count = 0;
278 for entry in self.undo_stack.iter().rev() {
279 if entry.transaction_id() == transaction_id {
280 self.group_trailing(count);
281 break;
282 } else if entry.suppress_grouping {
283 break;
284 } else {
285 count += 1;
286 }
287 }
288 }
289
290 fn group_trailing(&mut self, n: usize) -> Option<TransactionId> {
291 let new_len = self.undo_stack.len() - n;
292 let (entries_to_keep, entries_to_merge) = self.undo_stack.split_at_mut(new_len);
293 if let Some(last_entry) = entries_to_keep.last_mut() {
294 for entry in &*entries_to_merge {
295 for edit_id in &entry.transaction.edit_ids {
296 last_entry.transaction.edit_ids.push(*edit_id);
297 }
298 }
299
300 if let Some(entry) = entries_to_merge.last_mut() {
301 last_entry.last_edit_at = entry.last_edit_at;
302 }
303 }
304
305 self.undo_stack.truncate(new_len);
306 self.undo_stack.last().map(|e| e.transaction.id)
307 }
308
309 fn finalize_last_transaction(&mut self) -> Option<&Transaction> {
310 self.undo_stack.last_mut().map(|entry| {
311 entry.suppress_grouping = true;
312 &entry.transaction
313 })
314 }
315
316 fn push_transaction(&mut self, transaction: Transaction, now: Instant) {
317 assert_eq!(self.transaction_depth, 0);
318 self.undo_stack.push(HistoryEntry {
319 transaction,
320 first_edit_at: now,
321 last_edit_at: now,
322 suppress_grouping: false,
323 });
324 }
325
326 /// Differs from `push_transaction` in that it does not clear the redo
327 /// stack. Intended to be used to create a parent transaction to merge
328 /// potential child transactions into.
329 ///
330 /// The caller is responsible for removing it from the undo history using
331 /// `forget_transaction` if no edits are merged into it. Otherwise, if edits
332 /// are merged into this transaction, the caller is responsible for ensuring
333 /// the redo stack is cleared. The easiest way to ensure the redo stack is
334 /// cleared is to create transactions with the usual `start_transaction` and
335 /// `end_transaction` methods and merging the resulting transactions into
336 /// the transaction created by this method
337 fn push_empty_transaction(
338 &mut self,
339 start: clock::Global,
340 now: Instant,
341 clock: &mut clock::Lamport,
342 ) -> TransactionId {
343 assert_eq!(self.transaction_depth, 0);
344 let id = clock.tick();
345 let transaction = Transaction {
346 id,
347 start,
348 edit_ids: Vec::new(),
349 };
350 self.undo_stack.push(HistoryEntry {
351 transaction,
352 first_edit_at: now,
353 last_edit_at: now,
354 suppress_grouping: false,
355 });
356 id
357 }
358
359 fn push_undo(&mut self, op_id: clock::Lamport) {
360 assert_ne!(self.transaction_depth, 0);
361 if let Some(Operation::Edit(_)) = self.operations.get(&op_id) {
362 let last_transaction = self.undo_stack.last_mut().unwrap();
363 last_transaction.transaction.edit_ids.push(op_id);
364 }
365 }
366
367 fn pop_undo(&mut self) -> Option<&HistoryEntry> {
368 assert_eq!(self.transaction_depth, 0);
369 if let Some(entry) = self.undo_stack.pop() {
370 self.redo_stack.push(entry);
371 self.redo_stack.last()
372 } else {
373 None
374 }
375 }
376
377 fn remove_from_undo(&mut self, transaction_id: TransactionId) -> Option<&HistoryEntry> {
378 assert_eq!(self.transaction_depth, 0);
379
380 let entry_ix = self
381 .undo_stack
382 .iter()
383 .rposition(|entry| entry.transaction.id == transaction_id)?;
384 let entry = self.undo_stack.remove(entry_ix);
385 self.redo_stack.push(entry);
386 self.redo_stack.last()
387 }
388
389 fn remove_from_undo_until(&mut self, transaction_id: TransactionId) -> &[HistoryEntry] {
390 assert_eq!(self.transaction_depth, 0);
391
392 let redo_stack_start_len = self.redo_stack.len();
393 if let Some(entry_ix) = self
394 .undo_stack
395 .iter()
396 .rposition(|entry| entry.transaction.id == transaction_id)
397 {
398 self.redo_stack
399 .extend(self.undo_stack.drain(entry_ix..).rev());
400 }
401 &self.redo_stack[redo_stack_start_len..]
402 }
403
404 fn forget(&mut self, transaction_id: TransactionId) -> Option<Transaction> {
405 assert_eq!(self.transaction_depth, 0);
406 if let Some(entry_ix) = self
407 .undo_stack
408 .iter()
409 .rposition(|entry| entry.transaction.id == transaction_id)
410 {
411 Some(self.undo_stack.remove(entry_ix).transaction)
412 } else if let Some(entry_ix) = self
413 .redo_stack
414 .iter()
415 .rposition(|entry| entry.transaction.id == transaction_id)
416 {
417 Some(self.redo_stack.remove(entry_ix).transaction)
418 } else {
419 None
420 }
421 }
422
423 fn transaction(&self, transaction_id: TransactionId) -> Option<&Transaction> {
424 let entry = self
425 .undo_stack
426 .iter()
427 .rfind(|entry| entry.transaction.id == transaction_id)
428 .or_else(|| {
429 self.redo_stack
430 .iter()
431 .rfind(|entry| entry.transaction.id == transaction_id)
432 })?;
433 Some(&entry.transaction)
434 }
435
436 fn transaction_mut(&mut self, transaction_id: TransactionId) -> Option<&mut Transaction> {
437 let entry = self
438 .undo_stack
439 .iter_mut()
440 .rfind(|entry| entry.transaction.id == transaction_id)
441 .or_else(|| {
442 self.redo_stack
443 .iter_mut()
444 .rfind(|entry| entry.transaction.id == transaction_id)
445 })?;
446 Some(&mut entry.transaction)
447 }
448
449 fn merge_transactions(&mut self, transaction: TransactionId, destination: TransactionId) {
450 if let Some(transaction) = self.forget(transaction)
451 && let Some(destination) = self.transaction_mut(destination)
452 {
453 destination.edit_ids.extend(transaction.edit_ids);
454 }
455 }
456
457 fn pop_redo(&mut self) -> Option<&HistoryEntry> {
458 assert_eq!(self.transaction_depth, 0);
459 if let Some(entry) = self.redo_stack.pop() {
460 self.undo_stack.push(entry);
461 self.undo_stack.last()
462 } else {
463 None
464 }
465 }
466
467 fn remove_from_redo(&mut self, transaction_id: TransactionId) -> &[HistoryEntry] {
468 assert_eq!(self.transaction_depth, 0);
469
470 let undo_stack_start_len = self.undo_stack.len();
471 if let Some(entry_ix) = self
472 .redo_stack
473 .iter()
474 .rposition(|entry| entry.transaction.id == transaction_id)
475 {
476 self.undo_stack
477 .extend(self.redo_stack.drain(entry_ix..).rev());
478 }
479 &self.undo_stack[undo_stack_start_len..]
480 }
481}
482
483struct Edits<'a, D: TextDimension, F: FnMut(&FragmentSummary) -> bool> {
484 visible_cursor: rope::Cursor<'a>,
485 deleted_cursor: rope::Cursor<'a>,
486 fragments_cursor: Option<FilterCursor<'a, 'static, F, Fragment, FragmentTextSummary>>,
487 undos: &'a UndoMap,
488 since: &'a clock::Global,
489 old_end: D,
490 new_end: D,
491 range: Range<(&'a Locator, usize)>,
492 buffer_id: BufferId,
493}
494
495#[derive(Clone, Debug, Default, Eq, PartialEq)]
496pub struct Edit<D> {
497 pub old: Range<D>,
498 pub new: Range<D>,
499}
500
501impl<D> Edit<D>
502where
503 D: Sub<D, Output = D> + PartialEq + Copy,
504{
505 pub fn old_len(&self) -> D {
506 self.old.end - self.old.start
507 }
508
509 pub fn new_len(&self) -> D {
510 self.new.end - self.new.start
511 }
512
513 pub fn is_empty(&self) -> bool {
514 self.old.start == self.old.end && self.new.start == self.new.end
515 }
516}
517
518impl<D1, D2> Edit<(D1, D2)> {
519 pub fn flatten(self) -> (Edit<D1>, Edit<D2>) {
520 (
521 Edit {
522 old: self.old.start.0..self.old.end.0,
523 new: self.new.start.0..self.new.end.0,
524 },
525 Edit {
526 old: self.old.start.1..self.old.end.1,
527 new: self.new.start.1..self.new.end.1,
528 },
529 )
530 }
531}
532
533#[derive(Eq, PartialEq, Clone, Debug)]
534pub struct Fragment {
535 pub id: Locator,
536 pub timestamp: clock::Lamport,
537 pub insertion_offset: usize,
538 pub len: usize,
539 pub visible: bool,
540 pub deletions: HashSet<clock::Lamport>,
541 pub max_undos: clock::Global,
542}
543
544#[derive(Eq, PartialEq, Clone, Debug)]
545pub struct FragmentSummary {
546 text: FragmentTextSummary,
547 max_id: Locator,
548 max_version: clock::Global,
549 min_insertion_version: clock::Global,
550 max_insertion_version: clock::Global,
551}
552
553#[derive(Copy, Default, Clone, Debug, PartialEq, Eq)]
554struct FragmentTextSummary {
555 visible: usize,
556 deleted: usize,
557}
558
559impl<'a> sum_tree::Dimension<'a, FragmentSummary> for FragmentTextSummary {
560 fn zero(_: &Option<clock::Global>) -> Self {
561 Default::default()
562 }
563
564 fn add_summary(&mut self, summary: &'a FragmentSummary, _: &Option<clock::Global>) {
565 self.visible += summary.text.visible;
566 self.deleted += summary.text.deleted;
567 }
568}
569
570#[derive(Eq, PartialEq, Clone, Debug)]
571struct InsertionFragment {
572 timestamp: clock::Lamport,
573 split_offset: usize,
574 fragment_id: Locator,
575}
576
577#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord)]
578struct InsertionFragmentKey {
579 timestamp: clock::Lamport,
580 split_offset: usize,
581}
582
583#[derive(Clone, Debug, Eq, PartialEq)]
584pub enum Operation {
585 Edit(EditOperation),
586 Undo(UndoOperation),
587}
588
589#[derive(Clone, Debug, Eq, PartialEq)]
590pub struct EditOperation {
591 pub timestamp: clock::Lamport,
592 pub version: clock::Global,
593 pub ranges: Vec<Range<FullOffset>>,
594 pub new_text: Vec<Arc<str>>,
595}
596
597#[derive(Clone, Debug, Eq, PartialEq)]
598pub struct UndoOperation {
599 pub timestamp: clock::Lamport,
600 pub version: clock::Global,
601 pub counts: HashMap<clock::Lamport, u32>,
602}
603
604/// Stores information about the indentation of a line (tabs and spaces).
605#[derive(Clone, Copy, Debug, Eq, PartialEq)]
606pub struct LineIndent {
607 pub tabs: u32,
608 pub spaces: u32,
609 pub line_blank: bool,
610}
611
612impl LineIndent {
613 pub fn from_chunks(chunks: &mut Chunks) -> Self {
614 let mut tabs = 0;
615 let mut spaces = 0;
616 let mut line_blank = true;
617
618 'outer: while let Some(chunk) = chunks.peek() {
619 for ch in chunk.chars() {
620 if ch == '\t' {
621 tabs += 1;
622 } else if ch == ' ' {
623 spaces += 1;
624 } else {
625 if ch != '\n' {
626 line_blank = false;
627 }
628 break 'outer;
629 }
630 }
631
632 chunks.next();
633 }
634
635 Self {
636 tabs,
637 spaces,
638 line_blank,
639 }
640 }
641
642 /// Constructs a new `LineIndent` which only contains spaces.
643 pub fn spaces(spaces: u32) -> Self {
644 Self {
645 tabs: 0,
646 spaces,
647 line_blank: true,
648 }
649 }
650
651 /// Constructs a new `LineIndent` which only contains tabs.
652 pub fn tabs(tabs: u32) -> Self {
653 Self {
654 tabs,
655 spaces: 0,
656 line_blank: true,
657 }
658 }
659
660 /// Indicates whether the line is empty.
661 pub fn is_line_empty(&self) -> bool {
662 self.tabs == 0 && self.spaces == 0 && self.line_blank
663 }
664
665 /// Indicates whether the line is blank (contains only whitespace).
666 pub fn is_line_blank(&self) -> bool {
667 self.line_blank
668 }
669
670 /// Returns the number of indentation characters (tabs or spaces).
671 pub fn raw_len(&self) -> u32 {
672 self.tabs + self.spaces
673 }
674
675 /// Returns the number of indentation characters (tabs or spaces), taking tab size into account.
676 pub fn len(&self, tab_size: u32) -> u32 {
677 self.tabs * tab_size + self.spaces
678 }
679}
680
681impl From<&str> for LineIndent {
682 fn from(value: &str) -> Self {
683 Self::from_iter(value.chars())
684 }
685}
686
687impl FromIterator<char> for LineIndent {
688 fn from_iter<T: IntoIterator<Item = char>>(chars: T) -> Self {
689 let mut tabs = 0;
690 let mut spaces = 0;
691 let mut line_blank = true;
692 for c in chars {
693 if c == '\t' {
694 tabs += 1;
695 } else if c == ' ' {
696 spaces += 1;
697 } else {
698 if c != '\n' {
699 line_blank = false;
700 }
701 break;
702 }
703 }
704 Self {
705 tabs,
706 spaces,
707 line_blank,
708 }
709 }
710}
711
712impl Buffer {
713 /// Create a new buffer from a string.
714 pub fn new(
715 replica_id: ReplicaId,
716 remote_id: BufferId,
717 base_text: impl Into<String>,
718 executor: &BackgroundExecutor,
719 ) -> Buffer {
720 let mut base_text = base_text.into();
721 let line_ending = LineEnding::detect(&base_text);
722 LineEnding::normalize(&mut base_text);
723 Self::new_normalized(
724 replica_id,
725 remote_id,
726 line_ending,
727 Rope::from_str(&base_text, executor),
728 )
729 }
730
731 /// Create a new buffer from a string.
732 ///
733 /// Unlike [`Buffer::new`], this does not construct the backing rope in parallel if it is large enough.
734 pub fn new_slow(
735 replica_id: ReplicaId,
736 remote_id: BufferId,
737 base_text: impl Into<String>,
738 ) -> Buffer {
739 let mut base_text = base_text.into();
740 let line_ending = LineEnding::detect(&base_text);
741 LineEnding::normalize(&mut base_text);
742 Self::new_normalized(
743 replica_id,
744 remote_id,
745 line_ending,
746 Rope::from_str_small(&base_text),
747 )
748 }
749
750 pub fn new_normalized(
751 replica_id: ReplicaId,
752 remote_id: BufferId,
753 line_ending: LineEnding,
754 normalized: Rope,
755 ) -> Buffer {
756 let history = History::new(normalized);
757 let mut fragments = SumTree::new(&None);
758 let mut insertions = SumTree::default();
759
760 let mut lamport_clock = clock::Lamport::new(replica_id);
761 let mut version = clock::Global::new();
762
763 let visible_text = history.base_text.clone();
764 if !visible_text.is_empty() {
765 let insertion_timestamp = clock::Lamport::new(ReplicaId::LOCAL);
766 lamport_clock.observe(insertion_timestamp);
767 version.observe(insertion_timestamp);
768 let fragment_id = Locator::between(&Locator::min(), &Locator::max());
769 let fragment = Fragment {
770 id: fragment_id,
771 timestamp: insertion_timestamp,
772 insertion_offset: 0,
773 len: visible_text.len(),
774 visible: true,
775 deletions: Default::default(),
776 max_undos: Default::default(),
777 };
778 insertions.push(InsertionFragment::new(&fragment), ());
779 fragments.push(fragment, &None);
780 }
781
782 Buffer {
783 snapshot: BufferSnapshot {
784 replica_id,
785 remote_id,
786 visible_text,
787 deleted_text: Rope::new(),
788 line_ending,
789 fragments,
790 insertions,
791 version,
792 undo_map: Default::default(),
793 insertion_slices: Default::default(),
794 },
795 history,
796 deferred_ops: OperationQueue::new(),
797 deferred_replicas: HashSet::default(),
798 lamport_clock,
799 subscriptions: Default::default(),
800 edit_id_resolvers: Default::default(),
801 wait_for_version_txs: Default::default(),
802 }
803 }
804
805 pub fn version(&self) -> clock::Global {
806 self.version.clone()
807 }
808
809 pub fn snapshot(&self) -> BufferSnapshot {
810 self.snapshot.clone()
811 }
812
813 pub fn branch(&self) -> Self {
814 Self {
815 snapshot: self.snapshot.clone(),
816 history: History::new(self.base_text().clone()),
817 deferred_ops: OperationQueue::new(),
818 deferred_replicas: HashSet::default(),
819 lamport_clock: clock::Lamport::new(ReplicaId::LOCAL_BRANCH),
820 subscriptions: Default::default(),
821 edit_id_resolvers: Default::default(),
822 wait_for_version_txs: Default::default(),
823 }
824 }
825
826 pub fn replica_id(&self) -> ReplicaId {
827 self.lamport_clock.replica_id
828 }
829
830 pub fn remote_id(&self) -> BufferId {
831 self.remote_id
832 }
833
834 pub fn deferred_ops_len(&self) -> usize {
835 self.deferred_ops.len()
836 }
837
838 pub fn transaction_group_interval(&self) -> Duration {
839 self.history.group_interval
840 }
841
842 pub fn edit<R, I, S, T>(&mut self, edits: R, cx: &BackgroundExecutor) -> Operation
843 where
844 R: IntoIterator<IntoIter = I>,
845 I: ExactSizeIterator<Item = (Range<S>, T)>,
846 S: ToOffset,
847 T: Into<Arc<str>>,
848 {
849 let edits = edits
850 .into_iter()
851 .map(|(range, new_text)| (range, new_text.into()));
852
853 self.start_transaction();
854 let timestamp = self.lamport_clock.tick();
855 let operation = Operation::Edit(self.apply_local_edit(edits, timestamp, cx));
856
857 self.history.push(operation.clone());
858 self.history.push_undo(operation.timestamp());
859 self.snapshot.version.observe(operation.timestamp());
860 self.end_transaction();
861 operation
862 }
863
864 fn apply_local_edit<S: ToOffset, T: Into<Arc<str>>>(
865 &mut self,
866 edits: impl ExactSizeIterator<Item = (Range<S>, T)>,
867 timestamp: clock::Lamport,
868 executor: &BackgroundExecutor,
869 ) -> EditOperation {
870 let mut edits_patch = Patch::default();
871 let mut edit_op = EditOperation {
872 timestamp,
873 version: self.version(),
874 ranges: Vec::with_capacity(edits.len()),
875 new_text: Vec::with_capacity(edits.len()),
876 };
877 let mut new_insertions = Vec::new();
878 let mut insertion_offset = 0;
879 let mut insertion_slices = Vec::new();
880
881 let mut edits = edits
882 .map(|(range, new_text)| (range.to_offset(&*self), new_text))
883 .peekable();
884
885 let mut new_ropes =
886 RopeBuilder::new(self.visible_text.cursor(0), self.deleted_text.cursor(0));
887 let mut old_fragments = self.fragments.cursor::<FragmentTextSummary>(&None);
888 let mut new_fragments = old_fragments.slice(&edits.peek().unwrap().0.start, Bias::Right);
889 new_ropes.append(new_fragments.summary().text);
890
891 let mut fragment_start = old_fragments.start().visible;
892 for (range, new_text) in edits {
893 let new_text = LineEnding::normalize_arc(new_text.into());
894 let fragment_end = old_fragments.end().visible;
895
896 // If the current fragment ends before this range, then jump ahead to the first fragment
897 // that extends past the start of this range, reusing any intervening fragments.
898 if fragment_end < range.start {
899 // If the current fragment has been partially consumed, then consume the rest of it
900 // and advance to the next fragment before slicing.
901 if fragment_start > old_fragments.start().visible {
902 if fragment_end > fragment_start {
903 let mut suffix = old_fragments.item().unwrap().clone();
904 suffix.len = fragment_end - fragment_start;
905 suffix.insertion_offset += fragment_start - old_fragments.start().visible;
906 new_insertions.push(InsertionFragment::insert_new(&suffix));
907 new_ropes.push_fragment(&suffix, suffix.visible);
908 new_fragments.push(suffix, &None);
909 }
910 old_fragments.next();
911 }
912
913 let slice = old_fragments.slice(&range.start, Bias::Right);
914 new_ropes.append(slice.summary().text);
915 new_fragments.append(slice, &None);
916 fragment_start = old_fragments.start().visible;
917 }
918
919 let full_range_start = FullOffset(range.start + old_fragments.start().deleted);
920
921 // Preserve any portion of the current fragment that precedes this range.
922 if fragment_start < range.start {
923 let mut prefix = old_fragments.item().unwrap().clone();
924 prefix.len = range.start - fragment_start;
925 prefix.insertion_offset += fragment_start - old_fragments.start().visible;
926 prefix.id = Locator::between(&new_fragments.summary().max_id, &prefix.id);
927 new_insertions.push(InsertionFragment::insert_new(&prefix));
928 new_ropes.push_fragment(&prefix, prefix.visible);
929 new_fragments.push(prefix, &None);
930 fragment_start = range.start;
931 }
932
933 // Insert the new text before any existing fragments within the range.
934 if !new_text.is_empty() {
935 let new_start = new_fragments.summary().text.visible;
936
937 let fragment = Fragment {
938 id: Locator::between(
939 &new_fragments.summary().max_id,
940 old_fragments
941 .item()
942 .map_or(&Locator::max(), |old_fragment| &old_fragment.id),
943 ),
944 timestamp,
945 insertion_offset,
946 len: new_text.len(),
947 deletions: Default::default(),
948 max_undos: Default::default(),
949 visible: true,
950 };
951 edits_patch.push(Edit {
952 old: fragment_start..fragment_start,
953 new: new_start..new_start + new_text.len(),
954 });
955 insertion_slices.push(InsertionSlice::from_fragment(timestamp, &fragment));
956 new_insertions.push(InsertionFragment::insert_new(&fragment));
957 new_ropes.push_str(new_text.as_ref(), executor);
958 new_fragments.push(fragment, &None);
959 insertion_offset += new_text.len();
960 }
961
962 // Advance through every fragment that intersects this range, marking the intersecting
963 // portions as deleted.
964 while fragment_start < range.end {
965 let fragment = old_fragments.item().unwrap();
966 let fragment_end = old_fragments.end().visible;
967 let mut intersection = fragment.clone();
968 let intersection_end = cmp::min(range.end, fragment_end);
969 if fragment.visible {
970 intersection.len = intersection_end - fragment_start;
971 intersection.insertion_offset += fragment_start - old_fragments.start().visible;
972 intersection.id =
973 Locator::between(&new_fragments.summary().max_id, &intersection.id);
974 intersection.deletions.insert(timestamp);
975 intersection.visible = false;
976 }
977 if intersection.len > 0 {
978 if fragment.visible && !intersection.visible {
979 let new_start = new_fragments.summary().text.visible;
980 edits_patch.push(Edit {
981 old: fragment_start..intersection_end,
982 new: new_start..new_start,
983 });
984 insertion_slices
985 .push(InsertionSlice::from_fragment(timestamp, &intersection));
986 }
987 new_insertions.push(InsertionFragment::insert_new(&intersection));
988 new_ropes.push_fragment(&intersection, fragment.visible);
989 new_fragments.push(intersection, &None);
990 fragment_start = intersection_end;
991 }
992 if fragment_end <= range.end {
993 old_fragments.next();
994 }
995 }
996
997 let full_range_end = FullOffset(range.end + old_fragments.start().deleted);
998 edit_op.ranges.push(full_range_start..full_range_end);
999 edit_op.new_text.push(new_text);
1000 }
1001
1002 // If the current fragment has been partially consumed, then consume the rest of it
1003 // and advance to the next fragment before slicing.
1004 if fragment_start > old_fragments.start().visible {
1005 let fragment_end = old_fragments.end().visible;
1006 if fragment_end > fragment_start {
1007 let mut suffix = old_fragments.item().unwrap().clone();
1008 suffix.len = fragment_end - fragment_start;
1009 suffix.insertion_offset += fragment_start - old_fragments.start().visible;
1010 new_insertions.push(InsertionFragment::insert_new(&suffix));
1011 new_ropes.push_fragment(&suffix, suffix.visible);
1012 new_fragments.push(suffix, &None);
1013 }
1014 old_fragments.next();
1015 }
1016
1017 let suffix = old_fragments.suffix();
1018 new_ropes.append(suffix.summary().text);
1019 new_fragments.append(suffix, &None);
1020 let (visible_text, deleted_text) = new_ropes.finish();
1021 drop(old_fragments);
1022
1023 self.snapshot.fragments = new_fragments;
1024 self.snapshot.insertions.edit(new_insertions, ());
1025 self.snapshot.visible_text = visible_text;
1026 self.snapshot.deleted_text = deleted_text;
1027 self.subscriptions.publish_mut(&edits_patch);
1028 self.snapshot.insertion_slices.extend(insertion_slices);
1029 edit_op
1030 }
1031
1032 pub fn set_line_ending(&mut self, line_ending: LineEnding) {
1033 self.snapshot.line_ending = line_ending;
1034 }
1035
1036 pub fn apply_ops<I: IntoIterator<Item = Operation>>(
1037 &mut self,
1038 ops: I,
1039 executor: Option<&BackgroundExecutor>,
1040 ) {
1041 let mut deferred_ops = Vec::new();
1042 for op in ops {
1043 self.history.push(op.clone());
1044 if self.can_apply_op(&op) {
1045 self.apply_op(op, executor);
1046 } else {
1047 self.deferred_replicas.insert(op.replica_id());
1048 deferred_ops.push(op);
1049 }
1050 }
1051 self.deferred_ops.insert(deferred_ops);
1052 self.flush_deferred_ops(executor);
1053 }
1054
1055 fn apply_op(&mut self, op: Operation, executor: Option<&BackgroundExecutor>) {
1056 match op {
1057 Operation::Edit(edit) => {
1058 if !self.version.observed(edit.timestamp) {
1059 self.apply_remote_edit(
1060 &edit.version,
1061 &edit.ranges,
1062 &edit.new_text,
1063 edit.timestamp,
1064 executor,
1065 );
1066 self.snapshot.version.observe(edit.timestamp);
1067 self.lamport_clock.observe(edit.timestamp);
1068 self.resolve_edit(edit.timestamp);
1069 }
1070 }
1071 Operation::Undo(undo) => {
1072 if !self.version.observed(undo.timestamp) {
1073 self.apply_undo(&undo);
1074 self.snapshot.version.observe(undo.timestamp);
1075 self.lamport_clock.observe(undo.timestamp);
1076 }
1077 }
1078 }
1079 self.wait_for_version_txs.retain_mut(|(version, tx)| {
1080 if self.snapshot.version().observed_all(version) {
1081 tx.try_send(()).ok();
1082 false
1083 } else {
1084 true
1085 }
1086 });
1087 }
1088
1089 fn apply_remote_edit(
1090 &mut self,
1091 version: &clock::Global,
1092 ranges: &[Range<FullOffset>],
1093 new_text: &[Arc<str>],
1094 timestamp: clock::Lamport,
1095 executor: Option<&BackgroundExecutor>,
1096 ) {
1097 if ranges.is_empty() {
1098 return;
1099 }
1100
1101 let edits = ranges.iter().zip(new_text.iter());
1102 let mut edits_patch = Patch::default();
1103 let mut insertion_slices = Vec::new();
1104 let cx = Some(version.clone());
1105 let mut new_insertions = Vec::new();
1106 let mut insertion_offset = 0;
1107 let mut new_ropes =
1108 RopeBuilder::new(self.visible_text.cursor(0), self.deleted_text.cursor(0));
1109 let mut old_fragments = self
1110 .fragments
1111 .cursor::<Dimensions<VersionedFullOffset, usize>>(&cx);
1112 let mut new_fragments =
1113 old_fragments.slice(&VersionedFullOffset::Offset(ranges[0].start), Bias::Left);
1114 new_ropes.append(new_fragments.summary().text);
1115
1116 let mut fragment_start = old_fragments.start().0.full_offset();
1117 for (range, new_text) in edits {
1118 let fragment_end = old_fragments.end().0.full_offset();
1119
1120 // If the current fragment ends before this range, then jump ahead to the first fragment
1121 // that extends past the start of this range, reusing any intervening fragments.
1122 if fragment_end < range.start {
1123 // If the current fragment has been partially consumed, then consume the rest of it
1124 // and advance to the next fragment before slicing.
1125 if fragment_start > old_fragments.start().0.full_offset() {
1126 if fragment_end > fragment_start {
1127 let mut suffix = old_fragments.item().unwrap().clone();
1128 suffix.len = fragment_end.0 - fragment_start.0;
1129 suffix.insertion_offset +=
1130 fragment_start - old_fragments.start().0.full_offset();
1131 new_insertions.push(InsertionFragment::insert_new(&suffix));
1132 new_ropes.push_fragment(&suffix, suffix.visible);
1133 new_fragments.push(suffix, &None);
1134 }
1135 old_fragments.next();
1136 }
1137
1138 let slice =
1139 old_fragments.slice(&VersionedFullOffset::Offset(range.start), Bias::Left);
1140 new_ropes.append(slice.summary().text);
1141 new_fragments.append(slice, &None);
1142 fragment_start = old_fragments.start().0.full_offset();
1143 }
1144
1145 // If we are at the end of a non-concurrent fragment, advance to the next one.
1146 let fragment_end = old_fragments.end().0.full_offset();
1147 if fragment_end == range.start && fragment_end > fragment_start {
1148 let mut fragment = old_fragments.item().unwrap().clone();
1149 fragment.len = fragment_end.0 - fragment_start.0;
1150 fragment.insertion_offset += fragment_start - old_fragments.start().0.full_offset();
1151 new_insertions.push(InsertionFragment::insert_new(&fragment));
1152 new_ropes.push_fragment(&fragment, fragment.visible);
1153 new_fragments.push(fragment, &None);
1154 old_fragments.next();
1155 fragment_start = old_fragments.start().0.full_offset();
1156 }
1157
1158 // Skip over insertions that are concurrent to this edit, but have a lower lamport
1159 // timestamp.
1160 while let Some(fragment) = old_fragments.item() {
1161 if fragment_start == range.start && fragment.timestamp > timestamp {
1162 new_ropes.push_fragment(fragment, fragment.visible);
1163 new_fragments.push(fragment.clone(), &None);
1164 old_fragments.next();
1165 debug_assert_eq!(fragment_start, range.start);
1166 } else {
1167 break;
1168 }
1169 }
1170 debug_assert!(fragment_start <= range.start);
1171
1172 // Preserve any portion of the current fragment that precedes this range.
1173 if fragment_start < range.start {
1174 let mut prefix = old_fragments.item().unwrap().clone();
1175 prefix.len = range.start.0 - fragment_start.0;
1176 prefix.insertion_offset += fragment_start - old_fragments.start().0.full_offset();
1177 prefix.id = Locator::between(&new_fragments.summary().max_id, &prefix.id);
1178 new_insertions.push(InsertionFragment::insert_new(&prefix));
1179 fragment_start = range.start;
1180 new_ropes.push_fragment(&prefix, prefix.visible);
1181 new_fragments.push(prefix, &None);
1182 }
1183
1184 // Insert the new text before any existing fragments within the range.
1185 if !new_text.is_empty() {
1186 let mut old_start = old_fragments.start().1;
1187 if old_fragments.item().is_some_and(|f| f.visible) {
1188 old_start += fragment_start.0 - old_fragments.start().0.full_offset().0;
1189 }
1190 let new_start = new_fragments.summary().text.visible;
1191 let fragment = Fragment {
1192 id: Locator::between(
1193 &new_fragments.summary().max_id,
1194 old_fragments
1195 .item()
1196 .map_or(&Locator::max(), |old_fragment| &old_fragment.id),
1197 ),
1198 timestamp,
1199 insertion_offset,
1200 len: new_text.len(),
1201 deletions: Default::default(),
1202 max_undos: Default::default(),
1203 visible: true,
1204 };
1205 edits_patch.push(Edit {
1206 old: old_start..old_start,
1207 new: new_start..new_start + new_text.len(),
1208 });
1209 insertion_slices.push(InsertionSlice::from_fragment(timestamp, &fragment));
1210 new_insertions.push(InsertionFragment::insert_new(&fragment));
1211 match executor {
1212 Some(executor) => new_ropes.push_str(new_text, executor),
1213 None => new_ropes.push_str_small(new_text),
1214 }
1215 new_fragments.push(fragment, &None);
1216 insertion_offset += new_text.len();
1217 }
1218
1219 // Advance through every fragment that intersects this range, marking the intersecting
1220 // portions as deleted.
1221 while fragment_start < range.end {
1222 let fragment = old_fragments.item().unwrap();
1223 let fragment_end = old_fragments.end().0.full_offset();
1224 let mut intersection = fragment.clone();
1225 let intersection_end = cmp::min(range.end, fragment_end);
1226 if fragment.was_visible(version, &self.undo_map) {
1227 intersection.len = intersection_end.0 - fragment_start.0;
1228 intersection.insertion_offset +=
1229 fragment_start - old_fragments.start().0.full_offset();
1230 intersection.id =
1231 Locator::between(&new_fragments.summary().max_id, &intersection.id);
1232 intersection.deletions.insert(timestamp);
1233 intersection.visible = false;
1234 insertion_slices.push(InsertionSlice::from_fragment(timestamp, &intersection));
1235 }
1236 if intersection.len > 0 {
1237 if fragment.visible && !intersection.visible {
1238 let old_start = old_fragments.start().1
1239 + (fragment_start.0 - old_fragments.start().0.full_offset().0);
1240 let new_start = new_fragments.summary().text.visible;
1241 edits_patch.push(Edit {
1242 old: old_start..old_start + intersection.len,
1243 new: new_start..new_start,
1244 });
1245 }
1246 new_insertions.push(InsertionFragment::insert_new(&intersection));
1247 new_ropes.push_fragment(&intersection, fragment.visible);
1248 new_fragments.push(intersection, &None);
1249 fragment_start = intersection_end;
1250 }
1251 if fragment_end <= range.end {
1252 old_fragments.next();
1253 }
1254 }
1255 }
1256
1257 // If the current fragment has been partially consumed, then consume the rest of it
1258 // and advance to the next fragment before slicing.
1259 if fragment_start > old_fragments.start().0.full_offset() {
1260 let fragment_end = old_fragments.end().0.full_offset();
1261 if fragment_end > fragment_start {
1262 let mut suffix = old_fragments.item().unwrap().clone();
1263 suffix.len = fragment_end.0 - fragment_start.0;
1264 suffix.insertion_offset += fragment_start - old_fragments.start().0.full_offset();
1265 new_insertions.push(InsertionFragment::insert_new(&suffix));
1266 new_ropes.push_fragment(&suffix, suffix.visible);
1267 new_fragments.push(suffix, &None);
1268 }
1269 old_fragments.next();
1270 }
1271
1272 let suffix = old_fragments.suffix();
1273 new_ropes.append(suffix.summary().text);
1274 new_fragments.append(suffix, &None);
1275 let (visible_text, deleted_text) = new_ropes.finish();
1276 drop(old_fragments);
1277
1278 self.snapshot.fragments = new_fragments;
1279 self.snapshot.visible_text = visible_text;
1280 self.snapshot.deleted_text = deleted_text;
1281 self.snapshot.insertions.edit(new_insertions, ());
1282 self.snapshot.insertion_slices.extend(insertion_slices);
1283 self.subscriptions.publish_mut(&edits_patch)
1284 }
1285
1286 fn fragment_ids_for_edits<'a>(
1287 &'a self,
1288 edit_ids: impl Iterator<Item = &'a clock::Lamport>,
1289 ) -> Vec<&'a Locator> {
1290 // Get all of the insertion slices changed by the given edits.
1291 let mut insertion_slices = Vec::new();
1292 for edit_id in edit_ids {
1293 let insertion_slice = InsertionSlice {
1294 edit_id: *edit_id,
1295 insertion_id: clock::Lamport::MIN,
1296 range: 0..0,
1297 };
1298 let slices = self
1299 .snapshot
1300 .insertion_slices
1301 .iter_from(&insertion_slice)
1302 .take_while(|slice| slice.edit_id == *edit_id);
1303 insertion_slices.extend(slices)
1304 }
1305 insertion_slices
1306 .sort_unstable_by_key(|s| (s.insertion_id, s.range.start, Reverse(s.range.end)));
1307
1308 // Get all of the fragments corresponding to these insertion slices.
1309 let mut fragment_ids = Vec::new();
1310 let mut insertions_cursor = self.insertions.cursor::<InsertionFragmentKey>(());
1311 for insertion_slice in &insertion_slices {
1312 if insertion_slice.insertion_id != insertions_cursor.start().timestamp
1313 || insertion_slice.range.start > insertions_cursor.start().split_offset
1314 {
1315 insertions_cursor.seek_forward(
1316 &InsertionFragmentKey {
1317 timestamp: insertion_slice.insertion_id,
1318 split_offset: insertion_slice.range.start,
1319 },
1320 Bias::Left,
1321 );
1322 }
1323 while let Some(item) = insertions_cursor.item() {
1324 if item.timestamp != insertion_slice.insertion_id
1325 || item.split_offset >= insertion_slice.range.end
1326 {
1327 break;
1328 }
1329 fragment_ids.push(&item.fragment_id);
1330 insertions_cursor.next();
1331 }
1332 }
1333 fragment_ids.sort_unstable();
1334 fragment_ids
1335 }
1336
1337 fn apply_undo(&mut self, undo: &UndoOperation) {
1338 self.snapshot.undo_map.insert(undo);
1339
1340 let mut edits = Patch::default();
1341 let mut old_fragments = self
1342 .fragments
1343 .cursor::<Dimensions<Option<&Locator>, usize>>(&None);
1344 let mut new_fragments = SumTree::new(&None);
1345 let mut new_ropes =
1346 RopeBuilder::new(self.visible_text.cursor(0), self.deleted_text.cursor(0));
1347
1348 for fragment_id in self.fragment_ids_for_edits(undo.counts.keys()) {
1349 let preceding_fragments = old_fragments.slice(&Some(fragment_id), Bias::Left);
1350 new_ropes.append(preceding_fragments.summary().text);
1351 new_fragments.append(preceding_fragments, &None);
1352
1353 if let Some(fragment) = old_fragments.item() {
1354 let mut fragment = fragment.clone();
1355 let fragment_was_visible = fragment.visible;
1356
1357 fragment.visible = fragment.is_visible(&self.undo_map);
1358 fragment.max_undos.observe(undo.timestamp);
1359
1360 let old_start = old_fragments.start().1;
1361 let new_start = new_fragments.summary().text.visible;
1362 if fragment_was_visible && !fragment.visible {
1363 edits.push(Edit {
1364 old: old_start..old_start + fragment.len,
1365 new: new_start..new_start,
1366 });
1367 } else if !fragment_was_visible && fragment.visible {
1368 edits.push(Edit {
1369 old: old_start..old_start,
1370 new: new_start..new_start + fragment.len,
1371 });
1372 }
1373 new_ropes.push_fragment(&fragment, fragment_was_visible);
1374 new_fragments.push(fragment, &None);
1375
1376 old_fragments.next();
1377 }
1378 }
1379
1380 let suffix = old_fragments.suffix();
1381 new_ropes.append(suffix.summary().text);
1382 new_fragments.append(suffix, &None);
1383
1384 drop(old_fragments);
1385 let (visible_text, deleted_text) = new_ropes.finish();
1386 self.snapshot.fragments = new_fragments;
1387 self.snapshot.visible_text = visible_text;
1388 self.snapshot.deleted_text = deleted_text;
1389 self.subscriptions.publish_mut(&edits);
1390 }
1391
1392 fn flush_deferred_ops(&mut self, executor: Option<&BackgroundExecutor>) {
1393 self.deferred_replicas.clear();
1394 let mut deferred_ops = Vec::new();
1395 for op in self.deferred_ops.drain().iter().cloned() {
1396 if self.can_apply_op(&op) {
1397 self.apply_op(op, executor);
1398 } else {
1399 self.deferred_replicas.insert(op.replica_id());
1400 deferred_ops.push(op);
1401 }
1402 }
1403 self.deferred_ops.insert(deferred_ops);
1404 }
1405
1406 fn can_apply_op(&self, op: &Operation) -> bool {
1407 if self.deferred_replicas.contains(&op.replica_id()) {
1408 false
1409 } else {
1410 self.version.observed_all(match op {
1411 Operation::Edit(edit) => &edit.version,
1412 Operation::Undo(undo) => &undo.version,
1413 })
1414 }
1415 }
1416
1417 pub fn has_deferred_ops(&self) -> bool {
1418 !self.deferred_ops.is_empty()
1419 }
1420
1421 pub fn peek_undo_stack(&self) -> Option<&HistoryEntry> {
1422 self.history.undo_stack.last()
1423 }
1424
1425 pub fn peek_redo_stack(&self) -> Option<&HistoryEntry> {
1426 self.history.redo_stack.last()
1427 }
1428
1429 pub fn start_transaction(&mut self) -> Option<TransactionId> {
1430 self.start_transaction_at(Instant::now())
1431 }
1432
1433 pub fn start_transaction_at(&mut self, now: Instant) -> Option<TransactionId> {
1434 self.history
1435 .start_transaction(self.version.clone(), now, &mut self.lamport_clock)
1436 }
1437
1438 pub fn end_transaction(&mut self) -> Option<(TransactionId, clock::Global)> {
1439 self.end_transaction_at(Instant::now())
1440 }
1441
1442 pub fn end_transaction_at(&mut self, now: Instant) -> Option<(TransactionId, clock::Global)> {
1443 if let Some(entry) = self.history.end_transaction(now) {
1444 let since = entry.transaction.start.clone();
1445 let id = self.history.group().unwrap();
1446 Some((id, since))
1447 } else {
1448 None
1449 }
1450 }
1451
1452 pub fn finalize_last_transaction(&mut self) -> Option<&Transaction> {
1453 self.history.finalize_last_transaction()
1454 }
1455
1456 pub fn group_until_transaction(&mut self, transaction_id: TransactionId) {
1457 self.history.group_until(transaction_id);
1458 }
1459
1460 pub fn base_text(&self) -> &Rope {
1461 &self.history.base_text
1462 }
1463
1464 pub fn operations(&self) -> &TreeMap<clock::Lamport, Operation> {
1465 &self.history.operations
1466 }
1467
1468 pub fn undo(&mut self) -> Option<(TransactionId, Operation)> {
1469 if let Some(entry) = self.history.pop_undo() {
1470 let transaction = entry.transaction.clone();
1471 let transaction_id = transaction.id;
1472 let op = self.undo_or_redo(transaction);
1473 Some((transaction_id, op))
1474 } else {
1475 None
1476 }
1477 }
1478
1479 pub fn undo_transaction(&mut self, transaction_id: TransactionId) -> Option<Operation> {
1480 let transaction = self
1481 .history
1482 .remove_from_undo(transaction_id)?
1483 .transaction
1484 .clone();
1485 Some(self.undo_or_redo(transaction))
1486 }
1487
1488 pub fn undo_to_transaction(&mut self, transaction_id: TransactionId) -> Vec<Operation> {
1489 let transactions = self
1490 .history
1491 .remove_from_undo_until(transaction_id)
1492 .iter()
1493 .map(|entry| entry.transaction.clone())
1494 .collect::<Vec<_>>();
1495
1496 transactions
1497 .into_iter()
1498 .map(|transaction| self.undo_or_redo(transaction))
1499 .collect()
1500 }
1501
1502 pub fn forget_transaction(&mut self, transaction_id: TransactionId) -> Option<Transaction> {
1503 self.history.forget(transaction_id)
1504 }
1505
1506 pub fn get_transaction(&self, transaction_id: TransactionId) -> Option<&Transaction> {
1507 self.history.transaction(transaction_id)
1508 }
1509
1510 pub fn merge_transactions(&mut self, transaction: TransactionId, destination: TransactionId) {
1511 self.history.merge_transactions(transaction, destination);
1512 }
1513
1514 pub fn redo(&mut self) -> Option<(TransactionId, Operation)> {
1515 if let Some(entry) = self.history.pop_redo() {
1516 let transaction = entry.transaction.clone();
1517 let transaction_id = transaction.id;
1518 let op = self.undo_or_redo(transaction);
1519 Some((transaction_id, op))
1520 } else {
1521 None
1522 }
1523 }
1524
1525 pub fn redo_to_transaction(&mut self, transaction_id: TransactionId) -> Vec<Operation> {
1526 let transactions = self
1527 .history
1528 .remove_from_redo(transaction_id)
1529 .iter()
1530 .map(|entry| entry.transaction.clone())
1531 .collect::<Vec<_>>();
1532
1533 transactions
1534 .into_iter()
1535 .map(|transaction| self.undo_or_redo(transaction))
1536 .collect()
1537 }
1538
1539 fn undo_or_redo(&mut self, transaction: Transaction) -> Operation {
1540 let mut counts = HashMap::default();
1541 for edit_id in transaction.edit_ids {
1542 counts.insert(edit_id, self.undo_map.undo_count(edit_id).saturating_add(1));
1543 }
1544
1545 let operation = self.undo_operations(counts);
1546 self.history.push(operation.clone());
1547 operation
1548 }
1549
1550 pub fn undo_operations(&mut self, counts: HashMap<clock::Lamport, u32>) -> Operation {
1551 let timestamp = self.lamport_clock.tick();
1552 let version = self.version();
1553 self.snapshot.version.observe(timestamp);
1554 let undo = UndoOperation {
1555 timestamp,
1556 version,
1557 counts,
1558 };
1559 self.apply_undo(&undo);
1560 Operation::Undo(undo)
1561 }
1562
1563 pub fn push_transaction(&mut self, transaction: Transaction, now: Instant) {
1564 self.history.push_transaction(transaction, now);
1565 }
1566
1567 /// Differs from `push_transaction` in that it does not clear the redo stack.
1568 /// The caller responsible for
1569 /// Differs from `push_transaction` in that it does not clear the redo
1570 /// stack. Intended to be used to create a parent transaction to merge
1571 /// potential child transactions into.
1572 ///
1573 /// The caller is responsible for removing it from the undo history using
1574 /// `forget_transaction` if no edits are merged into it. Otherwise, if edits
1575 /// are merged into this transaction, the caller is responsible for ensuring
1576 /// the redo stack is cleared. The easiest way to ensure the redo stack is
1577 /// cleared is to create transactions with the usual `start_transaction` and
1578 /// `end_transaction` methods and merging the resulting transactions into
1579 /// the transaction created by this method
1580 pub fn push_empty_transaction(&mut self, now: Instant) -> TransactionId {
1581 self.history
1582 .push_empty_transaction(self.version.clone(), now, &mut self.lamport_clock)
1583 }
1584
1585 pub fn edited_ranges_for_transaction_id<D>(
1586 &self,
1587 transaction_id: TransactionId,
1588 ) -> impl '_ + Iterator<Item = Range<D>>
1589 where
1590 D: TextDimension,
1591 {
1592 self.history
1593 .transaction(transaction_id)
1594 .into_iter()
1595 .flat_map(|transaction| self.edited_ranges_for_transaction(transaction))
1596 }
1597
1598 pub fn edited_ranges_for_edit_ids<'a, D>(
1599 &'a self,
1600 edit_ids: impl IntoIterator<Item = &'a clock::Lamport>,
1601 ) -> impl 'a + Iterator<Item = Range<D>>
1602 where
1603 D: TextDimension,
1604 {
1605 // get fragment ranges
1606 let mut cursor = self
1607 .fragments
1608 .cursor::<Dimensions<Option<&Locator>, usize>>(&None);
1609 let offset_ranges = self
1610 .fragment_ids_for_edits(edit_ids.into_iter())
1611 .into_iter()
1612 .filter_map(move |fragment_id| {
1613 cursor.seek_forward(&Some(fragment_id), Bias::Left);
1614 let fragment = cursor.item()?;
1615 let start_offset = cursor.start().1;
1616 let end_offset = start_offset + if fragment.visible { fragment.len } else { 0 };
1617 Some(start_offset..end_offset)
1618 });
1619
1620 // combine adjacent ranges
1621 let mut prev_range: Option<Range<usize>> = None;
1622 let disjoint_ranges = offset_ranges
1623 .map(Some)
1624 .chain([None])
1625 .filter_map(move |range| {
1626 if let Some((range, prev_range)) = range.as_ref().zip(prev_range.as_mut())
1627 && prev_range.end == range.start
1628 {
1629 prev_range.end = range.end;
1630 return None;
1631 }
1632 let result = prev_range.clone();
1633 prev_range = range;
1634 result
1635 });
1636
1637 // convert to the desired text dimension.
1638 let mut position = D::zero(());
1639 let mut rope_cursor = self.visible_text.cursor(0);
1640 disjoint_ranges.map(move |range| {
1641 position.add_assign(&rope_cursor.summary(range.start));
1642 let start = position;
1643 position.add_assign(&rope_cursor.summary(range.end));
1644 let end = position;
1645 start..end
1646 })
1647 }
1648
1649 pub fn edited_ranges_for_transaction<'a, D>(
1650 &'a self,
1651 transaction: &'a Transaction,
1652 ) -> impl 'a + Iterator<Item = Range<D>>
1653 where
1654 D: TextDimension,
1655 {
1656 self.edited_ranges_for_edit_ids(&transaction.edit_ids)
1657 }
1658
1659 pub fn subscribe(&mut self) -> Subscription {
1660 self.subscriptions.subscribe()
1661 }
1662
1663 pub fn wait_for_edits<It: IntoIterator<Item = clock::Lamport>>(
1664 &mut self,
1665 edit_ids: It,
1666 ) -> impl 'static + Future<Output = Result<()>> + use<It> {
1667 let mut futures = Vec::new();
1668 for edit_id in edit_ids {
1669 if !self.version.observed(edit_id) {
1670 let (tx, rx) = oneshot::channel();
1671 self.edit_id_resolvers.entry(edit_id).or_default().push(tx);
1672 futures.push(rx);
1673 }
1674 }
1675
1676 async move {
1677 for mut future in futures {
1678 if future.recv().await.is_none() {
1679 anyhow::bail!("gave up waiting for edits");
1680 }
1681 }
1682 Ok(())
1683 }
1684 }
1685
1686 pub fn wait_for_anchors<It: IntoIterator<Item = Anchor>>(
1687 &mut self,
1688 anchors: It,
1689 ) -> impl 'static + Future<Output = Result<()>> + use<It> {
1690 let mut futures = Vec::new();
1691 for anchor in anchors {
1692 if !self.version.observed(anchor.timestamp)
1693 && anchor != Anchor::MAX
1694 && anchor != Anchor::MIN
1695 {
1696 let (tx, rx) = oneshot::channel();
1697 self.edit_id_resolvers
1698 .entry(anchor.timestamp)
1699 .or_default()
1700 .push(tx);
1701 futures.push(rx);
1702 }
1703 }
1704
1705 async move {
1706 for mut future in futures {
1707 if future.recv().await.is_none() {
1708 anyhow::bail!("gave up waiting for anchors");
1709 }
1710 }
1711 Ok(())
1712 }
1713 }
1714
1715 pub fn wait_for_version(
1716 &mut self,
1717 version: clock::Global,
1718 ) -> impl Future<Output = Result<()>> + use<> {
1719 let mut rx = None;
1720 if !self.snapshot.version.observed_all(&version) {
1721 let channel = oneshot::channel();
1722 self.wait_for_version_txs.push((version, channel.0));
1723 rx = Some(channel.1);
1724 }
1725 async move {
1726 if let Some(mut rx) = rx
1727 && rx.recv().await.is_none()
1728 {
1729 anyhow::bail!("gave up waiting for version");
1730 }
1731 Ok(())
1732 }
1733 }
1734
1735 pub fn give_up_waiting(&mut self) {
1736 self.edit_id_resolvers.clear();
1737 self.wait_for_version_txs.clear();
1738 }
1739
1740 fn resolve_edit(&mut self, edit_id: clock::Lamport) {
1741 for mut tx in self
1742 .edit_id_resolvers
1743 .remove(&edit_id)
1744 .into_iter()
1745 .flatten()
1746 {
1747 tx.try_send(()).ok();
1748 }
1749 }
1750}
1751
1752#[cfg(any(test, feature = "test-support"))]
1753impl Buffer {
1754 #[track_caller]
1755 pub fn edit_via_marked_text(&mut self, marked_string: &str, cx: &BackgroundExecutor) {
1756 let edits = self.edits_for_marked_text(marked_string);
1757 self.edit(edits, cx);
1758 }
1759
1760 #[track_caller]
1761 pub fn edits_for_marked_text(&self, marked_string: &str) -> Vec<(Range<usize>, String)> {
1762 let old_text = self.text();
1763 let (new_text, mut ranges) = util::test::marked_text_ranges(marked_string, false);
1764 if ranges.is_empty() {
1765 ranges.push(0..new_text.len());
1766 }
1767
1768 assert_eq!(
1769 old_text[..ranges[0].start],
1770 new_text[..ranges[0].start],
1771 "invalid edit"
1772 );
1773
1774 let mut delta = 0;
1775 let mut edits = Vec::new();
1776 let mut ranges = ranges.into_iter().peekable();
1777
1778 while let Some(inserted_range) = ranges.next() {
1779 let new_start = inserted_range.start;
1780 let old_start = (new_start as isize - delta) as usize;
1781
1782 let following_text = if let Some(next_range) = ranges.peek() {
1783 &new_text[inserted_range.end..next_range.start]
1784 } else {
1785 &new_text[inserted_range.end..]
1786 };
1787
1788 let inserted_len = inserted_range.len();
1789 let deleted_len = old_text[old_start..]
1790 .find(following_text)
1791 .expect("invalid edit");
1792
1793 let old_range = old_start..old_start + deleted_len;
1794 edits.push((old_range, new_text[inserted_range].to_string()));
1795 delta += inserted_len as isize - deleted_len as isize;
1796 }
1797
1798 assert_eq!(
1799 old_text.len() as isize + delta,
1800 new_text.len() as isize,
1801 "invalid edit"
1802 );
1803
1804 edits
1805 }
1806
1807 pub fn check_invariants(&self) {
1808 // Ensure every fragment is ordered by locator in the fragment tree and corresponds
1809 // to an insertion fragment in the insertions tree.
1810 let mut prev_fragment_id = Locator::min();
1811 for fragment in self.snapshot.fragments.items(&None) {
1812 assert!(fragment.id > prev_fragment_id);
1813 prev_fragment_id = fragment.id.clone();
1814
1815 let insertion_fragment = self
1816 .snapshot
1817 .insertions
1818 .get(
1819 &InsertionFragmentKey {
1820 timestamp: fragment.timestamp,
1821 split_offset: fragment.insertion_offset,
1822 },
1823 (),
1824 )
1825 .unwrap();
1826 assert_eq!(
1827 insertion_fragment.fragment_id, fragment.id,
1828 "fragment: {:?}\ninsertion: {:?}",
1829 fragment, insertion_fragment
1830 );
1831 }
1832
1833 let mut cursor = self.snapshot.fragments.cursor::<Option<&Locator>>(&None);
1834 for insertion_fragment in self.snapshot.insertions.cursor::<()>(()) {
1835 cursor.seek(&Some(&insertion_fragment.fragment_id), Bias::Left);
1836 let fragment = cursor.item().unwrap();
1837 assert_eq!(insertion_fragment.fragment_id, fragment.id);
1838 assert_eq!(insertion_fragment.split_offset, fragment.insertion_offset);
1839 }
1840
1841 let fragment_summary = self.snapshot.fragments.summary();
1842 assert_eq!(
1843 fragment_summary.text.visible,
1844 self.snapshot.visible_text.len()
1845 );
1846 assert_eq!(
1847 fragment_summary.text.deleted,
1848 self.snapshot.deleted_text.len()
1849 );
1850
1851 assert!(!self.text().contains("\r\n"));
1852 }
1853
1854 pub fn set_group_interval(&mut self, group_interval: Duration) {
1855 self.history.group_interval = group_interval;
1856 }
1857
1858 pub fn random_byte_range(&self, start_offset: usize, rng: &mut impl rand::Rng) -> Range<usize> {
1859 let end = self.clip_offset(rng.random_range(start_offset..=self.len()), Bias::Right);
1860 let start = self.clip_offset(rng.random_range(start_offset..=end), Bias::Right);
1861 start..end
1862 }
1863
1864 pub fn get_random_edits<T>(
1865 &self,
1866 rng: &mut T,
1867 edit_count: usize,
1868 ) -> Vec<(Range<usize>, Arc<str>)>
1869 where
1870 T: rand::Rng,
1871 {
1872 let mut edits: Vec<(Range<usize>, Arc<str>)> = Vec::new();
1873 let mut last_end = None;
1874 for _ in 0..edit_count {
1875 if last_end.is_some_and(|last_end| last_end >= self.len()) {
1876 break;
1877 }
1878 let new_start = last_end.map_or(0, |last_end| last_end + 1);
1879 let range = self.random_byte_range(new_start, rng);
1880 last_end = Some(range.end);
1881
1882 let new_text_len = rng.random_range(0..10);
1883 let new_text: String = RandomCharIter::new(&mut *rng).take(new_text_len).collect();
1884
1885 edits.push((range, new_text.into()));
1886 }
1887 edits
1888 }
1889
1890 pub fn randomly_edit<T>(
1891 &mut self,
1892 rng: &mut T,
1893 edit_count: usize,
1894 executor: &BackgroundExecutor,
1895 ) -> (Vec<(Range<usize>, Arc<str>)>, Operation)
1896 where
1897 T: rand::Rng,
1898 {
1899 let mut edits = self.get_random_edits(rng, edit_count);
1900 log::info!("mutating buffer {:?} with {:?}", self.replica_id, edits);
1901
1902 let op = self.edit(edits.iter().cloned(), executor);
1903 if let Operation::Edit(edit) = &op {
1904 assert_eq!(edits.len(), edit.new_text.len());
1905 for (edit, new_text) in edits.iter_mut().zip(&edit.new_text) {
1906 edit.1 = new_text.clone();
1907 }
1908 } else {
1909 unreachable!()
1910 }
1911
1912 (edits, op)
1913 }
1914
1915 pub fn randomly_undo_redo(&mut self, rng: &mut impl rand::Rng) -> Vec<Operation> {
1916 use rand::prelude::*;
1917
1918 let mut ops = Vec::new();
1919 for _ in 0..rng.random_range(1..=5) {
1920 if let Some(entry) = self.history.undo_stack.choose(rng) {
1921 let transaction = entry.transaction.clone();
1922 log::info!(
1923 "undoing buffer {:?} transaction {:?}",
1924 self.replica_id,
1925 transaction
1926 );
1927 ops.push(self.undo_or_redo(transaction));
1928 }
1929 }
1930 ops
1931 }
1932}
1933
1934impl Deref for Buffer {
1935 type Target = BufferSnapshot;
1936
1937 fn deref(&self) -> &Self::Target {
1938 &self.snapshot
1939 }
1940}
1941
1942impl BufferSnapshot {
1943 pub fn as_rope(&self) -> &Rope {
1944 &self.visible_text
1945 }
1946
1947 pub fn rope_for_version(&self, version: &clock::Global) -> Rope {
1948 let mut rope = Rope::new();
1949
1950 let mut cursor = self
1951 .fragments
1952 .filter::<_, FragmentTextSummary>(&None, move |summary| {
1953 !version.observed_all(&summary.max_version)
1954 });
1955 cursor.next();
1956
1957 let mut visible_cursor = self.visible_text.cursor(0);
1958 let mut deleted_cursor = self.deleted_text.cursor(0);
1959
1960 while let Some(fragment) = cursor.item() {
1961 if cursor.start().visible > visible_cursor.offset() {
1962 let text = visible_cursor.slice(cursor.start().visible);
1963 rope.append(text);
1964 }
1965
1966 if fragment.was_visible(version, &self.undo_map) {
1967 if fragment.visible {
1968 let text = visible_cursor.slice(cursor.end().visible);
1969 rope.append(text);
1970 } else {
1971 deleted_cursor.seek_forward(cursor.start().deleted);
1972 let text = deleted_cursor.slice(cursor.end().deleted);
1973 rope.append(text);
1974 }
1975 } else if fragment.visible {
1976 visible_cursor.seek_forward(cursor.end().visible);
1977 }
1978
1979 cursor.next();
1980 }
1981
1982 if cursor.start().visible > visible_cursor.offset() {
1983 let text = visible_cursor.slice(cursor.start().visible);
1984 rope.append(text);
1985 }
1986
1987 rope
1988 }
1989
1990 pub fn remote_id(&self) -> BufferId {
1991 self.remote_id
1992 }
1993
1994 pub fn replica_id(&self) -> ReplicaId {
1995 self.replica_id
1996 }
1997
1998 pub fn row_count(&self) -> u32 {
1999 self.max_point().row + 1
2000 }
2001
2002 pub fn len(&self) -> usize {
2003 self.visible_text.len()
2004 }
2005
2006 pub fn is_empty(&self) -> bool {
2007 self.len() == 0
2008 }
2009
2010 pub fn chars(&self) -> impl Iterator<Item = char> + '_ {
2011 self.chars_at(0)
2012 }
2013
2014 pub fn chars_for_range<T: ToOffset>(&self, range: Range<T>) -> impl Iterator<Item = char> + '_ {
2015 self.text_for_range(range).flat_map(str::chars)
2016 }
2017
2018 pub fn reversed_chars_for_range<T: ToOffset>(
2019 &self,
2020 range: Range<T>,
2021 ) -> impl Iterator<Item = char> + '_ {
2022 self.reversed_chunks_in_range(range)
2023 .flat_map(|chunk| chunk.chars().rev())
2024 }
2025
2026 pub fn contains_str_at<T>(&self, position: T, needle: &str) -> bool
2027 where
2028 T: ToOffset,
2029 {
2030 let position = position.to_offset(self);
2031 position == self.clip_offset(position, Bias::Left)
2032 && self
2033 .bytes_in_range(position..self.len())
2034 .flatten()
2035 .copied()
2036 .take(needle.len())
2037 .eq(needle.bytes())
2038 }
2039
2040 pub fn common_prefix_at<T>(&self, position: T, needle: &str) -> Range<T>
2041 where
2042 T: ToOffset + TextDimension,
2043 {
2044 let offset = position.to_offset(self);
2045 let common_prefix_len = needle
2046 .char_indices()
2047 .map(|(index, _)| index)
2048 .chain([needle.len()])
2049 .take_while(|&len| len <= offset)
2050 .filter(|&len| {
2051 let left = self
2052 .chars_for_range(offset - len..offset)
2053 .flat_map(char::to_lowercase);
2054 let right = needle[..len].chars().flat_map(char::to_lowercase);
2055 left.eq(right)
2056 })
2057 .last()
2058 .unwrap_or(0);
2059 let start_offset = offset - common_prefix_len;
2060 let start = self.text_summary_for_range(0..start_offset);
2061 start..position
2062 }
2063
2064 pub fn text(&self) -> String {
2065 self.visible_text.to_string()
2066 }
2067
2068 pub fn line_ending(&self) -> LineEnding {
2069 self.line_ending
2070 }
2071
2072 pub fn deleted_text(&self) -> String {
2073 self.deleted_text.to_string()
2074 }
2075
2076 pub fn fragments(&self) -> impl Iterator<Item = &Fragment> {
2077 self.fragments.iter()
2078 }
2079
2080 pub fn text_summary(&self) -> TextSummary {
2081 self.visible_text.summary()
2082 }
2083
2084 pub fn max_point(&self) -> Point {
2085 self.visible_text.max_point()
2086 }
2087
2088 pub fn max_point_utf16(&self) -> PointUtf16 {
2089 self.visible_text.max_point_utf16()
2090 }
2091
2092 pub fn point_to_offset(&self, point: Point) -> usize {
2093 self.visible_text.point_to_offset(point)
2094 }
2095
2096 pub fn point_to_offset_utf16(&self, point: Point) -> OffsetUtf16 {
2097 self.visible_text.point_to_offset_utf16(point)
2098 }
2099
2100 pub fn point_utf16_to_offset_utf16(&self, point: PointUtf16) -> OffsetUtf16 {
2101 self.visible_text.point_utf16_to_offset_utf16(point)
2102 }
2103
2104 pub fn point_utf16_to_offset(&self, point: PointUtf16) -> usize {
2105 self.visible_text.point_utf16_to_offset(point)
2106 }
2107
2108 pub fn unclipped_point_utf16_to_offset(&self, point: Unclipped<PointUtf16>) -> usize {
2109 self.visible_text.unclipped_point_utf16_to_offset(point)
2110 }
2111
2112 pub fn unclipped_point_utf16_to_point(&self, point: Unclipped<PointUtf16>) -> Point {
2113 self.visible_text.unclipped_point_utf16_to_point(point)
2114 }
2115
2116 pub fn offset_utf16_to_offset(&self, offset: OffsetUtf16) -> usize {
2117 self.visible_text.offset_utf16_to_offset(offset)
2118 }
2119
2120 pub fn offset_to_offset_utf16(&self, offset: usize) -> OffsetUtf16 {
2121 self.visible_text.offset_to_offset_utf16(offset)
2122 }
2123
2124 pub fn offset_to_point(&self, offset: usize) -> Point {
2125 self.visible_text.offset_to_point(offset)
2126 }
2127
2128 pub fn offset_to_point_utf16(&self, offset: usize) -> PointUtf16 {
2129 self.visible_text.offset_to_point_utf16(offset)
2130 }
2131
2132 pub fn point_to_point_utf16(&self, point: Point) -> PointUtf16 {
2133 self.visible_text.point_to_point_utf16(point)
2134 }
2135
2136 pub fn point_utf16_to_point(&self, point: PointUtf16) -> Point {
2137 self.visible_text.point_utf16_to_point(point)
2138 }
2139
2140 pub fn version(&self) -> &clock::Global {
2141 &self.version
2142 }
2143
2144 pub fn chars_at<T: ToOffset>(&self, position: T) -> impl Iterator<Item = char> + '_ {
2145 let offset = position.to_offset(self);
2146 self.visible_text.chars_at(offset)
2147 }
2148
2149 pub fn reversed_chars_at<T: ToOffset>(&self, position: T) -> impl Iterator<Item = char> + '_ {
2150 let offset = position.to_offset(self);
2151 self.visible_text.reversed_chars_at(offset)
2152 }
2153
2154 pub fn reversed_chunks_in_range<T: ToOffset>(&self, range: Range<T>) -> rope::Chunks<'_> {
2155 let range = range.start.to_offset(self)..range.end.to_offset(self);
2156 self.visible_text.reversed_chunks_in_range(range)
2157 }
2158
2159 pub fn bytes_in_range<T: ToOffset>(&self, range: Range<T>) -> rope::Bytes<'_> {
2160 let start = range.start.to_offset(self);
2161 let end = range.end.to_offset(self);
2162 self.visible_text.bytes_in_range(start..end)
2163 }
2164
2165 pub fn reversed_bytes_in_range<T: ToOffset>(&self, range: Range<T>) -> rope::Bytes<'_> {
2166 let start = range.start.to_offset(self);
2167 let end = range.end.to_offset(self);
2168 self.visible_text.reversed_bytes_in_range(start..end)
2169 }
2170
2171 pub fn text_for_range<T: ToOffset>(&self, range: Range<T>) -> Chunks<'_> {
2172 let start = range.start.to_offset(self);
2173 let end = range.end.to_offset(self);
2174 self.visible_text.chunks_in_range(start..end)
2175 }
2176
2177 pub fn line_len(&self, row: u32) -> u32 {
2178 let row_start_offset = Point::new(row, 0).to_offset(self);
2179 let row_end_offset = if row >= self.max_point().row {
2180 self.len()
2181 } else {
2182 Point::new(row + 1, 0).to_previous_offset(self)
2183 };
2184 (row_end_offset - row_start_offset) as u32
2185 }
2186
2187 pub fn line_indents_in_row_range(
2188 &self,
2189 row_range: Range<u32>,
2190 ) -> impl Iterator<Item = (u32, LineIndent)> + '_ {
2191 let start = Point::new(row_range.start, 0).to_offset(self);
2192 let end = Point::new(row_range.end, self.line_len(row_range.end)).to_offset(self);
2193
2194 let mut chunks = self.as_rope().chunks_in_range(start..end);
2195 let mut row = row_range.start;
2196 let mut done = false;
2197 std::iter::from_fn(move || {
2198 if done {
2199 None
2200 } else {
2201 let indent = (row, LineIndent::from_chunks(&mut chunks));
2202 done = !chunks.next_line();
2203 row += 1;
2204 Some(indent)
2205 }
2206 })
2207 }
2208
2209 /// Returns the line indents in the given row range, exclusive of end row, in reversed order.
2210 pub fn reversed_line_indents_in_row_range(
2211 &self,
2212 row_range: Range<u32>,
2213 ) -> impl Iterator<Item = (u32, LineIndent)> + '_ {
2214 let start = Point::new(row_range.start, 0).to_offset(self);
2215
2216 let end_point;
2217 let end;
2218 if row_range.end > row_range.start {
2219 end_point = Point::new(row_range.end - 1, self.line_len(row_range.end - 1));
2220 end = end_point.to_offset(self);
2221 } else {
2222 end_point = Point::new(row_range.start, 0);
2223 end = start;
2224 };
2225
2226 let mut chunks = self.as_rope().chunks_in_range(start..end);
2227 // Move the cursor to the start of the last line if it's not empty.
2228 chunks.seek(end);
2229 if end_point.column > 0 {
2230 chunks.prev_line();
2231 }
2232
2233 let mut row = end_point.row;
2234 let mut done = false;
2235 std::iter::from_fn(move || {
2236 if done {
2237 None
2238 } else {
2239 let initial_offset = chunks.offset();
2240 let indent = (row, LineIndent::from_chunks(&mut chunks));
2241 if chunks.offset() > initial_offset {
2242 chunks.prev_line();
2243 }
2244 done = !chunks.prev_line();
2245 if !done {
2246 row -= 1;
2247 }
2248
2249 Some(indent)
2250 }
2251 })
2252 }
2253
2254 pub fn line_indent_for_row(&self, row: u32) -> LineIndent {
2255 LineIndent::from_iter(self.chars_at(Point::new(row, 0)))
2256 }
2257
2258 pub fn is_line_blank(&self, row: u32) -> bool {
2259 self.text_for_range(Point::new(row, 0)..Point::new(row, self.line_len(row)))
2260 .all(|chunk| chunk.matches(|c: char| !c.is_whitespace()).next().is_none())
2261 }
2262
2263 pub fn text_summary_for_range<D, O: ToOffset>(&self, range: Range<O>) -> D
2264 where
2265 D: TextDimension,
2266 {
2267 self.visible_text
2268 .cursor(range.start.to_offset(self))
2269 .summary(range.end.to_offset(self))
2270 }
2271
2272 pub fn summaries_for_anchors<'a, D, A>(&'a self, anchors: A) -> impl 'a + Iterator<Item = D>
2273 where
2274 D: 'a + TextDimension,
2275 A: 'a + IntoIterator<Item = &'a Anchor>,
2276 {
2277 let anchors = anchors.into_iter();
2278 self.summaries_for_anchors_with_payload::<D, _, ()>(anchors.map(|a| (a, ())))
2279 .map(|d| d.0)
2280 }
2281
2282 pub fn summaries_for_anchors_with_payload<'a, D, A, T>(
2283 &'a self,
2284 anchors: A,
2285 ) -> impl 'a + Iterator<Item = (D, T)>
2286 where
2287 D: 'a + TextDimension,
2288 A: 'a + IntoIterator<Item = (&'a Anchor, T)>,
2289 {
2290 let anchors = anchors.into_iter();
2291 let mut insertion_cursor = self.insertions.cursor::<InsertionFragmentKey>(());
2292 let mut fragment_cursor = self
2293 .fragments
2294 .cursor::<Dimensions<Option<&Locator>, usize>>(&None);
2295 let mut text_cursor = self.visible_text.cursor(0);
2296 let mut position = D::zero(());
2297
2298 anchors.map(move |(anchor, payload)| {
2299 if *anchor == Anchor::MIN {
2300 return (D::zero(()), payload);
2301 } else if *anchor == Anchor::MAX {
2302 return (D::from_text_summary(&self.visible_text.summary()), payload);
2303 }
2304
2305 let anchor_key = InsertionFragmentKey {
2306 timestamp: anchor.timestamp,
2307 split_offset: anchor.offset,
2308 };
2309 insertion_cursor.seek(&anchor_key, anchor.bias);
2310 if let Some(insertion) = insertion_cursor.item() {
2311 let comparison = sum_tree::KeyedItem::key(insertion).cmp(&anchor_key);
2312 if comparison == Ordering::Greater
2313 || (anchor.bias == Bias::Left
2314 && comparison == Ordering::Equal
2315 && anchor.offset > 0)
2316 {
2317 insertion_cursor.prev();
2318 }
2319 } else {
2320 insertion_cursor.prev();
2321 }
2322 let insertion = insertion_cursor.item().expect("invalid insertion");
2323 assert_eq!(
2324 insertion.timestamp,
2325 anchor.timestamp,
2326 "invalid insertion for buffer {} with anchor {:?}",
2327 self.remote_id(),
2328 anchor
2329 );
2330
2331 fragment_cursor.seek_forward(&Some(&insertion.fragment_id), Bias::Left);
2332 let fragment = fragment_cursor.item().unwrap();
2333 let mut fragment_offset = fragment_cursor.start().1;
2334 if fragment.visible {
2335 fragment_offset += anchor.offset - insertion.split_offset;
2336 }
2337
2338 position.add_assign(&text_cursor.summary(fragment_offset));
2339 (position, payload)
2340 })
2341 }
2342
2343 pub fn summary_for_anchor<D>(&self, anchor: &Anchor) -> D
2344 where
2345 D: TextDimension,
2346 {
2347 self.text_summary_for_range(0..self.offset_for_anchor(anchor))
2348 }
2349
2350 pub fn offset_for_anchor(&self, anchor: &Anchor) -> usize {
2351 if *anchor == Anchor::MIN {
2352 0
2353 } else if *anchor == Anchor::MAX {
2354 self.visible_text.len()
2355 } else {
2356 debug_assert!(anchor.buffer_id == Some(self.remote_id));
2357 let anchor_key = InsertionFragmentKey {
2358 timestamp: anchor.timestamp,
2359 split_offset: anchor.offset,
2360 };
2361 let mut insertion_cursor = self.insertions.cursor::<InsertionFragmentKey>(());
2362 insertion_cursor.seek(&anchor_key, anchor.bias);
2363 if let Some(insertion) = insertion_cursor.item() {
2364 let comparison = sum_tree::KeyedItem::key(insertion).cmp(&anchor_key);
2365 if comparison == Ordering::Greater
2366 || (anchor.bias == Bias::Left
2367 && comparison == Ordering::Equal
2368 && anchor.offset > 0)
2369 {
2370 insertion_cursor.prev();
2371 }
2372 } else {
2373 insertion_cursor.prev();
2374 }
2375
2376 let Some(insertion) = insertion_cursor
2377 .item()
2378 .filter(|insertion| insertion.timestamp == anchor.timestamp)
2379 else {
2380 panic!(
2381 "invalid anchor {:?}. buffer id: {}, version: {:?}",
2382 anchor, self.remote_id, self.version
2383 );
2384 };
2385
2386 let (start, _, item) = self
2387 .fragments
2388 .find::<Dimensions<Option<&Locator>, usize>, _>(
2389 &None,
2390 &Some(&insertion.fragment_id),
2391 Bias::Left,
2392 );
2393 let fragment = item.unwrap();
2394 let mut fragment_offset = start.1;
2395 if fragment.visible {
2396 fragment_offset += anchor.offset - insertion.split_offset;
2397 }
2398 fragment_offset
2399 }
2400 }
2401
2402 fn fragment_id_for_anchor(&self, anchor: &Anchor) -> &Locator {
2403 self.try_fragment_id_for_anchor(anchor).unwrap_or_else(|| {
2404 panic!(
2405 "invalid anchor {:?}. buffer id: {}, version: {:?}",
2406 anchor, self.remote_id, self.version,
2407 )
2408 })
2409 }
2410
2411 fn try_fragment_id_for_anchor(&self, anchor: &Anchor) -> Option<&Locator> {
2412 if *anchor == Anchor::MIN {
2413 Some(Locator::min_ref())
2414 } else if *anchor == Anchor::MAX {
2415 Some(Locator::max_ref())
2416 } else {
2417 let anchor_key = InsertionFragmentKey {
2418 timestamp: anchor.timestamp,
2419 split_offset: anchor.offset,
2420 };
2421 let mut insertion_cursor = self.insertions.cursor::<InsertionFragmentKey>(());
2422 insertion_cursor.seek(&anchor_key, anchor.bias);
2423 if let Some(insertion) = insertion_cursor.item() {
2424 let comparison = sum_tree::KeyedItem::key(insertion).cmp(&anchor_key);
2425 if comparison == Ordering::Greater
2426 || (anchor.bias == Bias::Left
2427 && comparison == Ordering::Equal
2428 && anchor.offset > 0)
2429 {
2430 insertion_cursor.prev();
2431 }
2432 } else {
2433 insertion_cursor.prev();
2434 }
2435
2436 insertion_cursor
2437 .item()
2438 .filter(|insertion| {
2439 !cfg!(debug_assertions) || insertion.timestamp == anchor.timestamp
2440 })
2441 .map(|insertion| &insertion.fragment_id)
2442 }
2443 }
2444
2445 pub fn anchor_before<T: ToOffset>(&self, position: T) -> Anchor {
2446 self.anchor_at(position, Bias::Left)
2447 }
2448
2449 pub fn anchor_after<T: ToOffset>(&self, position: T) -> Anchor {
2450 self.anchor_at(position, Bias::Right)
2451 }
2452
2453 pub fn anchor_at<T: ToOffset>(&self, position: T, bias: Bias) -> Anchor {
2454 self.anchor_at_offset(position.to_offset(self), bias)
2455 }
2456
2457 fn anchor_at_offset(&self, offset: usize, bias: Bias) -> Anchor {
2458 if bias == Bias::Left && offset == 0 {
2459 Anchor::MIN
2460 } else if bias == Bias::Right && offset == self.len() {
2461 Anchor::MAX
2462 } else {
2463 if offset > self.visible_text.len() {
2464 panic!("offset {} is out of bounds", offset)
2465 }
2466 self.visible_text.assert_char_boundary(offset);
2467 let (start, _, item) = self.fragments.find::<usize, _>(&None, &offset, bias);
2468 let fragment = item.unwrap();
2469 let overshoot = offset - start;
2470 Anchor {
2471 timestamp: fragment.timestamp,
2472 offset: fragment.insertion_offset + overshoot,
2473 bias,
2474 buffer_id: Some(self.remote_id),
2475 }
2476 }
2477 }
2478
2479 pub fn can_resolve(&self, anchor: &Anchor) -> bool {
2480 *anchor == Anchor::MIN
2481 || *anchor == Anchor::MAX
2482 || (Some(self.remote_id) == anchor.buffer_id && self.version.observed(anchor.timestamp))
2483 }
2484
2485 pub fn clip_offset(&self, offset: usize, bias: Bias) -> usize {
2486 self.visible_text.clip_offset(offset, bias)
2487 }
2488
2489 pub fn clip_point(&self, point: Point, bias: Bias) -> Point {
2490 self.visible_text.clip_point(point, bias)
2491 }
2492
2493 pub fn clip_offset_utf16(&self, offset: OffsetUtf16, bias: Bias) -> OffsetUtf16 {
2494 self.visible_text.clip_offset_utf16(offset, bias)
2495 }
2496
2497 pub fn clip_point_utf16(&self, point: Unclipped<PointUtf16>, bias: Bias) -> PointUtf16 {
2498 self.visible_text.clip_point_utf16(point, bias)
2499 }
2500
2501 pub fn edits_since<'a, D>(
2502 &'a self,
2503 since: &'a clock::Global,
2504 ) -> impl 'a + Iterator<Item = Edit<D>>
2505 where
2506 D: TextDimension + Ord,
2507 {
2508 self.edits_since_in_range(since, Anchor::MIN..Anchor::MAX)
2509 }
2510
2511 pub fn anchored_edits_since<'a, D>(
2512 &'a self,
2513 since: &'a clock::Global,
2514 ) -> impl 'a + Iterator<Item = (Edit<D>, Range<Anchor>)>
2515 where
2516 D: TextDimension + Ord,
2517 {
2518 self.anchored_edits_since_in_range(since, Anchor::MIN..Anchor::MAX)
2519 }
2520
2521 pub fn edits_since_in_range<'a, D>(
2522 &'a self,
2523 since: &'a clock::Global,
2524 range: Range<Anchor>,
2525 ) -> impl 'a + Iterator<Item = Edit<D>>
2526 where
2527 D: TextDimension + Ord,
2528 {
2529 self.anchored_edits_since_in_range(since, range)
2530 .map(|item| item.0)
2531 }
2532
2533 pub fn anchored_edits_since_in_range<'a, D>(
2534 &'a self,
2535 since: &'a clock::Global,
2536 range: Range<Anchor>,
2537 ) -> impl 'a + Iterator<Item = (Edit<D>, Range<Anchor>)>
2538 where
2539 D: TextDimension + Ord,
2540 {
2541 let fragments_cursor = if *since == self.version {
2542 None
2543 } else {
2544 let mut cursor = self.fragments.filter(&None, move |summary| {
2545 !since.observed_all(&summary.max_version)
2546 });
2547 cursor.next();
2548 Some(cursor)
2549 };
2550 let start_fragment_id = self.fragment_id_for_anchor(&range.start);
2551 let (start, _, item) = self
2552 .fragments
2553 .find::<Dimensions<Option<&Locator>, FragmentTextSummary>, _>(
2554 &None,
2555 &Some(start_fragment_id),
2556 Bias::Left,
2557 );
2558 let mut visible_start = start.1.visible;
2559 let mut deleted_start = start.1.deleted;
2560 if let Some(fragment) = item {
2561 let overshoot = range.start.offset - fragment.insertion_offset;
2562 if fragment.visible {
2563 visible_start += overshoot;
2564 } else {
2565 deleted_start += overshoot;
2566 }
2567 }
2568 let end_fragment_id = self.fragment_id_for_anchor(&range.end);
2569
2570 Edits {
2571 visible_cursor: self.visible_text.cursor(visible_start),
2572 deleted_cursor: self.deleted_text.cursor(deleted_start),
2573 fragments_cursor,
2574 undos: &self.undo_map,
2575 since,
2576 old_end: D::zero(()),
2577 new_end: D::zero(()),
2578 range: (start_fragment_id, range.start.offset)..(end_fragment_id, range.end.offset),
2579 buffer_id: self.remote_id,
2580 }
2581 }
2582
2583 pub fn has_edits_since_in_range(&self, since: &clock::Global, range: Range<Anchor>) -> bool {
2584 if *since != self.version {
2585 let start_fragment_id = self.fragment_id_for_anchor(&range.start);
2586 let end_fragment_id = self.fragment_id_for_anchor(&range.end);
2587 let mut cursor = self.fragments.filter::<_, usize>(&None, move |summary| {
2588 !since.observed_all(&summary.max_version)
2589 });
2590 cursor.next();
2591 while let Some(fragment) = cursor.item() {
2592 if fragment.id > *end_fragment_id {
2593 break;
2594 }
2595 if fragment.id > *start_fragment_id {
2596 let was_visible = fragment.was_visible(since, &self.undo_map);
2597 let is_visible = fragment.visible;
2598 if was_visible != is_visible {
2599 return true;
2600 }
2601 }
2602 cursor.next();
2603 }
2604 }
2605 false
2606 }
2607
2608 pub fn has_edits_since(&self, since: &clock::Global) -> bool {
2609 if *since != self.version {
2610 let mut cursor = self.fragments.filter::<_, usize>(&None, move |summary| {
2611 !since.observed_all(&summary.max_version)
2612 });
2613 cursor.next();
2614 while let Some(fragment) = cursor.item() {
2615 let was_visible = fragment.was_visible(since, &self.undo_map);
2616 let is_visible = fragment.visible;
2617 if was_visible != is_visible {
2618 return true;
2619 }
2620 cursor.next();
2621 }
2622 }
2623 false
2624 }
2625
2626 pub fn range_to_version(&self, range: Range<usize>, version: &clock::Global) -> Range<usize> {
2627 let mut offsets = self.offsets_to_version([range.start, range.end], version);
2628 offsets.next().unwrap()..offsets.next().unwrap()
2629 }
2630
2631 /// Converts the given sequence of offsets into their corresponding offsets
2632 /// at a prior version of this buffer.
2633 pub fn offsets_to_version<'a>(
2634 &'a self,
2635 offsets: impl 'a + IntoIterator<Item = usize>,
2636 version: &'a clock::Global,
2637 ) -> impl 'a + Iterator<Item = usize> {
2638 let mut edits = self.edits_since(version).peekable();
2639 let mut last_old_end = 0;
2640 let mut last_new_end = 0;
2641 offsets.into_iter().map(move |new_offset| {
2642 while let Some(edit) = edits.peek() {
2643 if edit.new.start > new_offset {
2644 break;
2645 }
2646
2647 if edit.new.end <= new_offset {
2648 last_new_end = edit.new.end;
2649 last_old_end = edit.old.end;
2650 edits.next();
2651 continue;
2652 }
2653
2654 let overshoot = new_offset - edit.new.start;
2655 return (edit.old.start + overshoot).min(edit.old.end);
2656 }
2657
2658 last_old_end + new_offset.saturating_sub(last_new_end)
2659 })
2660 }
2661
2662 /// Visually annotates a position or range with the `Debug` representation of a value. The
2663 /// callsite of this function is used as a key - previous annotations will be removed.
2664 #[cfg(debug_assertions)]
2665 #[track_caller]
2666 pub fn debug<R, V>(&self, ranges: &R, value: V)
2667 where
2668 R: debug::ToDebugRanges,
2669 V: std::fmt::Debug,
2670 {
2671 self.debug_with_key(std::panic::Location::caller(), ranges, value);
2672 }
2673
2674 /// Visually annotates a position or range with the `Debug` representation of a value. Previous
2675 /// debug annotations with the same key will be removed. The key is also used to determine the
2676 /// annotation's color.
2677 #[cfg(debug_assertions)]
2678 pub fn debug_with_key<K, R, V>(&self, key: &K, ranges: &R, value: V)
2679 where
2680 K: std::hash::Hash + 'static,
2681 R: debug::ToDebugRanges,
2682 V: std::fmt::Debug,
2683 {
2684 let ranges = ranges
2685 .to_debug_ranges(self)
2686 .into_iter()
2687 .map(|range| self.anchor_after(range.start)..self.anchor_before(range.end))
2688 .collect();
2689 debug::GlobalDebugRanges::with_locked(|debug_ranges| {
2690 debug_ranges.insert(key, ranges, format!("{value:?}").into());
2691 });
2692 }
2693}
2694
2695struct RopeBuilder<'a> {
2696 old_visible_cursor: rope::Cursor<'a>,
2697 old_deleted_cursor: rope::Cursor<'a>,
2698 new_visible: Rope,
2699 new_deleted: Rope,
2700}
2701
2702impl<'a> RopeBuilder<'a> {
2703 fn new(old_visible_cursor: rope::Cursor<'a>, old_deleted_cursor: rope::Cursor<'a>) -> Self {
2704 Self {
2705 old_visible_cursor,
2706 old_deleted_cursor,
2707 new_visible: Rope::new(),
2708 new_deleted: Rope::new(),
2709 }
2710 }
2711
2712 fn append(&mut self, len: FragmentTextSummary) {
2713 self.push(len.visible, true, true);
2714 self.push(len.deleted, false, false);
2715 }
2716
2717 fn push_fragment(&mut self, fragment: &Fragment, was_visible: bool) {
2718 debug_assert!(fragment.len > 0);
2719 self.push(fragment.len, was_visible, fragment.visible)
2720 }
2721
2722 fn push(&mut self, len: usize, was_visible: bool, is_visible: bool) {
2723 let text = if was_visible {
2724 self.old_visible_cursor
2725 .slice(self.old_visible_cursor.offset() + len)
2726 } else {
2727 self.old_deleted_cursor
2728 .slice(self.old_deleted_cursor.offset() + len)
2729 };
2730 if is_visible {
2731 self.new_visible.append(text);
2732 } else {
2733 self.new_deleted.append(text);
2734 }
2735 }
2736
2737 fn push_str(&mut self, text: &str, cx: &BackgroundExecutor) {
2738 self.new_visible.push(text, cx);
2739 }
2740
2741 fn push_str_small(&mut self, text: &str) {
2742 self.new_visible.push_small(text);
2743 }
2744
2745 fn finish(mut self) -> (Rope, Rope) {
2746 self.new_visible.append(self.old_visible_cursor.suffix());
2747 self.new_deleted.append(self.old_deleted_cursor.suffix());
2748 (self.new_visible, self.new_deleted)
2749 }
2750}
2751
2752impl<D: TextDimension + Ord, F: FnMut(&FragmentSummary) -> bool> Iterator for Edits<'_, D, F> {
2753 type Item = (Edit<D>, Range<Anchor>);
2754
2755 fn next(&mut self) -> Option<Self::Item> {
2756 let mut pending_edit: Option<Self::Item> = None;
2757 let cursor = self.fragments_cursor.as_mut()?;
2758
2759 while let Some(fragment) = cursor.item() {
2760 if fragment.id < *self.range.start.0 {
2761 cursor.next();
2762 continue;
2763 } else if fragment.id > *self.range.end.0 {
2764 break;
2765 }
2766
2767 if cursor.start().visible > self.visible_cursor.offset() {
2768 let summary = self.visible_cursor.summary(cursor.start().visible);
2769 self.old_end.add_assign(&summary);
2770 self.new_end.add_assign(&summary);
2771 }
2772
2773 if pending_edit
2774 .as_ref()
2775 .is_some_and(|(change, _)| change.new.end < self.new_end)
2776 {
2777 break;
2778 }
2779
2780 let start_anchor = Anchor {
2781 timestamp: fragment.timestamp,
2782 offset: fragment.insertion_offset,
2783 bias: Bias::Right,
2784 buffer_id: Some(self.buffer_id),
2785 };
2786 let end_anchor = Anchor {
2787 timestamp: fragment.timestamp,
2788 offset: fragment.insertion_offset + fragment.len,
2789 bias: Bias::Left,
2790 buffer_id: Some(self.buffer_id),
2791 };
2792
2793 if !fragment.was_visible(self.since, self.undos) && fragment.visible {
2794 let mut visible_end = cursor.end().visible;
2795 if fragment.id == *self.range.end.0 {
2796 visible_end = cmp::min(
2797 visible_end,
2798 cursor.start().visible + (self.range.end.1 - fragment.insertion_offset),
2799 );
2800 }
2801
2802 let fragment_summary = self.visible_cursor.summary(visible_end);
2803 let mut new_end = self.new_end;
2804 new_end.add_assign(&fragment_summary);
2805 if let Some((edit, range)) = pending_edit.as_mut() {
2806 edit.new.end = new_end;
2807 range.end = end_anchor;
2808 } else {
2809 pending_edit = Some((
2810 Edit {
2811 old: self.old_end..self.old_end,
2812 new: self.new_end..new_end,
2813 },
2814 start_anchor..end_anchor,
2815 ));
2816 }
2817
2818 self.new_end = new_end;
2819 } else if fragment.was_visible(self.since, self.undos) && !fragment.visible {
2820 let mut deleted_end = cursor.end().deleted;
2821 if fragment.id == *self.range.end.0 {
2822 deleted_end = cmp::min(
2823 deleted_end,
2824 cursor.start().deleted + (self.range.end.1 - fragment.insertion_offset),
2825 );
2826 }
2827
2828 if cursor.start().deleted > self.deleted_cursor.offset() {
2829 self.deleted_cursor.seek_forward(cursor.start().deleted);
2830 }
2831 let fragment_summary = self.deleted_cursor.summary(deleted_end);
2832 let mut old_end = self.old_end;
2833 old_end.add_assign(&fragment_summary);
2834 if let Some((edit, range)) = pending_edit.as_mut() {
2835 edit.old.end = old_end;
2836 range.end = end_anchor;
2837 } else {
2838 pending_edit = Some((
2839 Edit {
2840 old: self.old_end..old_end,
2841 new: self.new_end..self.new_end,
2842 },
2843 start_anchor..end_anchor,
2844 ));
2845 }
2846
2847 self.old_end = old_end;
2848 }
2849
2850 cursor.next();
2851 }
2852
2853 pending_edit
2854 }
2855}
2856
2857impl Fragment {
2858 fn is_visible(&self, undos: &UndoMap) -> bool {
2859 !undos.is_undone(self.timestamp) && self.deletions.iter().all(|d| undos.is_undone(*d))
2860 }
2861
2862 fn was_visible(&self, version: &clock::Global, undos: &UndoMap) -> bool {
2863 (version.observed(self.timestamp) && !undos.was_undone(self.timestamp, version))
2864 && self
2865 .deletions
2866 .iter()
2867 .all(|d| !version.observed(*d) || undos.was_undone(*d, version))
2868 }
2869}
2870
2871impl sum_tree::Item for Fragment {
2872 type Summary = FragmentSummary;
2873
2874 fn summary(&self, _cx: &Option<clock::Global>) -> Self::Summary {
2875 let mut max_version = clock::Global::new();
2876 max_version.observe(self.timestamp);
2877 for deletion in &self.deletions {
2878 max_version.observe(*deletion);
2879 }
2880 max_version.join(&self.max_undos);
2881
2882 let mut min_insertion_version = clock::Global::new();
2883 min_insertion_version.observe(self.timestamp);
2884 let max_insertion_version = min_insertion_version.clone();
2885 if self.visible {
2886 FragmentSummary {
2887 max_id: self.id.clone(),
2888 text: FragmentTextSummary {
2889 visible: self.len,
2890 deleted: 0,
2891 },
2892 max_version,
2893 min_insertion_version,
2894 max_insertion_version,
2895 }
2896 } else {
2897 FragmentSummary {
2898 max_id: self.id.clone(),
2899 text: FragmentTextSummary {
2900 visible: 0,
2901 deleted: self.len,
2902 },
2903 max_version,
2904 min_insertion_version,
2905 max_insertion_version,
2906 }
2907 }
2908 }
2909}
2910
2911impl sum_tree::Summary for FragmentSummary {
2912 type Context<'a> = &'a Option<clock::Global>;
2913
2914 fn zero(_cx: Self::Context<'_>) -> Self {
2915 Default::default()
2916 }
2917
2918 fn add_summary(&mut self, other: &Self, _: Self::Context<'_>) {
2919 self.max_id.assign(&other.max_id);
2920 self.text.visible += &other.text.visible;
2921 self.text.deleted += &other.text.deleted;
2922 self.max_version.join(&other.max_version);
2923 self.min_insertion_version
2924 .meet(&other.min_insertion_version);
2925 self.max_insertion_version
2926 .join(&other.max_insertion_version);
2927 }
2928}
2929
2930impl Default for FragmentSummary {
2931 fn default() -> Self {
2932 FragmentSummary {
2933 max_id: Locator::min(),
2934 text: FragmentTextSummary::default(),
2935 max_version: clock::Global::new(),
2936 min_insertion_version: clock::Global::new(),
2937 max_insertion_version: clock::Global::new(),
2938 }
2939 }
2940}
2941
2942impl sum_tree::Item for InsertionFragment {
2943 type Summary = InsertionFragmentKey;
2944
2945 fn summary(&self, _cx: ()) -> Self::Summary {
2946 InsertionFragmentKey {
2947 timestamp: self.timestamp,
2948 split_offset: self.split_offset,
2949 }
2950 }
2951}
2952
2953impl sum_tree::KeyedItem for InsertionFragment {
2954 type Key = InsertionFragmentKey;
2955
2956 fn key(&self) -> Self::Key {
2957 sum_tree::Item::summary(self, ())
2958 }
2959}
2960
2961impl InsertionFragment {
2962 fn new(fragment: &Fragment) -> Self {
2963 Self {
2964 timestamp: fragment.timestamp,
2965 split_offset: fragment.insertion_offset,
2966 fragment_id: fragment.id.clone(),
2967 }
2968 }
2969
2970 fn insert_new(fragment: &Fragment) -> sum_tree::Edit<Self> {
2971 sum_tree::Edit::Insert(Self::new(fragment))
2972 }
2973}
2974
2975impl sum_tree::ContextLessSummary for InsertionFragmentKey {
2976 fn zero() -> Self {
2977 InsertionFragmentKey {
2978 timestamp: Lamport::MIN,
2979 split_offset: 0,
2980 }
2981 }
2982
2983 fn add_summary(&mut self, summary: &Self) {
2984 *self = *summary;
2985 }
2986}
2987
2988#[derive(Copy, Clone, Debug, Default, PartialEq, Eq, PartialOrd, Ord, Hash)]
2989pub struct FullOffset(pub usize);
2990
2991impl ops::AddAssign<usize> for FullOffset {
2992 fn add_assign(&mut self, rhs: usize) {
2993 self.0 += rhs;
2994 }
2995}
2996
2997impl ops::Add<usize> for FullOffset {
2998 type Output = Self;
2999
3000 fn add(mut self, rhs: usize) -> Self::Output {
3001 self += rhs;
3002 self
3003 }
3004}
3005
3006impl ops::Sub for FullOffset {
3007 type Output = usize;
3008
3009 fn sub(self, rhs: Self) -> Self::Output {
3010 self.0 - rhs.0
3011 }
3012}
3013
3014impl sum_tree::Dimension<'_, FragmentSummary> for usize {
3015 fn zero(_: &Option<clock::Global>) -> Self {
3016 Default::default()
3017 }
3018
3019 fn add_summary(&mut self, summary: &FragmentSummary, _: &Option<clock::Global>) {
3020 *self += summary.text.visible;
3021 }
3022}
3023
3024impl sum_tree::Dimension<'_, FragmentSummary> for FullOffset {
3025 fn zero(_: &Option<clock::Global>) -> Self {
3026 Default::default()
3027 }
3028
3029 fn add_summary(&mut self, summary: &FragmentSummary, _: &Option<clock::Global>) {
3030 self.0 += summary.text.visible + summary.text.deleted;
3031 }
3032}
3033
3034impl<'a> sum_tree::Dimension<'a, FragmentSummary> for Option<&'a Locator> {
3035 fn zero(_: &Option<clock::Global>) -> Self {
3036 Default::default()
3037 }
3038
3039 fn add_summary(&mut self, summary: &'a FragmentSummary, _: &Option<clock::Global>) {
3040 *self = Some(&summary.max_id);
3041 }
3042}
3043
3044impl sum_tree::SeekTarget<'_, FragmentSummary, FragmentTextSummary> for usize {
3045 fn cmp(
3046 &self,
3047 cursor_location: &FragmentTextSummary,
3048 _: &Option<clock::Global>,
3049 ) -> cmp::Ordering {
3050 Ord::cmp(self, &cursor_location.visible)
3051 }
3052}
3053
3054#[derive(Copy, Clone, Debug, Eq, PartialEq)]
3055enum VersionedFullOffset {
3056 Offset(FullOffset),
3057 Invalid,
3058}
3059
3060impl VersionedFullOffset {
3061 fn full_offset(&self) -> FullOffset {
3062 if let Self::Offset(position) = self {
3063 *position
3064 } else {
3065 panic!("invalid version")
3066 }
3067 }
3068}
3069
3070impl Default for VersionedFullOffset {
3071 fn default() -> Self {
3072 Self::Offset(Default::default())
3073 }
3074}
3075
3076impl<'a> sum_tree::Dimension<'a, FragmentSummary> for VersionedFullOffset {
3077 fn zero(_cx: &Option<clock::Global>) -> Self {
3078 Default::default()
3079 }
3080
3081 fn add_summary(&mut self, summary: &'a FragmentSummary, cx: &Option<clock::Global>) {
3082 if let Self::Offset(offset) = self {
3083 let version = cx.as_ref().unwrap();
3084 if version.observed_all(&summary.max_insertion_version) {
3085 *offset += summary.text.visible + summary.text.deleted;
3086 } else if version.observed_any(&summary.min_insertion_version) {
3087 *self = Self::Invalid;
3088 }
3089 }
3090 }
3091}
3092
3093impl sum_tree::SeekTarget<'_, FragmentSummary, Self> for VersionedFullOffset {
3094 fn cmp(&self, cursor_position: &Self, _: &Option<clock::Global>) -> cmp::Ordering {
3095 match (self, cursor_position) {
3096 (Self::Offset(a), Self::Offset(b)) => Ord::cmp(a, b),
3097 (Self::Offset(_), Self::Invalid) => cmp::Ordering::Less,
3098 (Self::Invalid, _) => unreachable!(),
3099 }
3100 }
3101}
3102
3103impl Operation {
3104 fn replica_id(&self) -> ReplicaId {
3105 operation_queue::Operation::lamport_timestamp(self).replica_id
3106 }
3107
3108 pub fn timestamp(&self) -> clock::Lamport {
3109 match self {
3110 Operation::Edit(edit) => edit.timestamp,
3111 Operation::Undo(undo) => undo.timestamp,
3112 }
3113 }
3114
3115 pub fn as_edit(&self) -> Option<&EditOperation> {
3116 match self {
3117 Operation::Edit(edit) => Some(edit),
3118 _ => None,
3119 }
3120 }
3121
3122 pub fn is_edit(&self) -> bool {
3123 matches!(self, Operation::Edit { .. })
3124 }
3125}
3126
3127impl operation_queue::Operation for Operation {
3128 fn lamport_timestamp(&self) -> clock::Lamport {
3129 match self {
3130 Operation::Edit(edit) => edit.timestamp,
3131 Operation::Undo(undo) => undo.timestamp,
3132 }
3133 }
3134}
3135
3136pub trait ToOffset {
3137 fn to_offset(&self, snapshot: &BufferSnapshot) -> usize;
3138 /// Turns this point into the next offset in the buffer that comes after this, respecting utf8 boundaries.
3139 fn to_next_offset(&self, snapshot: &BufferSnapshot) -> usize {
3140 snapshot
3141 .visible_text
3142 .ceil_char_boundary(self.to_offset(snapshot) + 1)
3143 }
3144 /// Turns this point into the previous offset in the buffer that comes before this, respecting utf8 boundaries.
3145 fn to_previous_offset(&self, snapshot: &BufferSnapshot) -> usize {
3146 snapshot
3147 .visible_text
3148 .floor_char_boundary(self.to_offset(snapshot).saturating_sub(1))
3149 }
3150}
3151
3152impl ToOffset for Point {
3153 fn to_offset(&self, snapshot: &BufferSnapshot) -> usize {
3154 snapshot.point_to_offset(*self)
3155 }
3156}
3157
3158impl ToOffset for usize {
3159 #[track_caller]
3160 fn to_offset(&self, snapshot: &BufferSnapshot) -> usize {
3161 assert!(
3162 *self <= snapshot.len(),
3163 "offset {} is out of range, snapshot length is {}",
3164 self,
3165 snapshot.len()
3166 );
3167 *self
3168 }
3169}
3170
3171impl ToOffset for Anchor {
3172 fn to_offset(&self, snapshot: &BufferSnapshot) -> usize {
3173 snapshot.summary_for_anchor(self)
3174 }
3175}
3176
3177impl<T: ToOffset> ToOffset for &T {
3178 fn to_offset(&self, content: &BufferSnapshot) -> usize {
3179 (*self).to_offset(content)
3180 }
3181}
3182
3183impl ToOffset for PointUtf16 {
3184 fn to_offset(&self, snapshot: &BufferSnapshot) -> usize {
3185 snapshot.point_utf16_to_offset(*self)
3186 }
3187}
3188
3189impl ToOffset for Unclipped<PointUtf16> {
3190 fn to_offset(&self, snapshot: &BufferSnapshot) -> usize {
3191 snapshot.unclipped_point_utf16_to_offset(*self)
3192 }
3193}
3194
3195pub trait ToPoint {
3196 fn to_point(&self, snapshot: &BufferSnapshot) -> Point;
3197}
3198
3199impl ToPoint for Anchor {
3200 fn to_point(&self, snapshot: &BufferSnapshot) -> Point {
3201 snapshot.summary_for_anchor(self)
3202 }
3203}
3204
3205impl ToPoint for usize {
3206 fn to_point(&self, snapshot: &BufferSnapshot) -> Point {
3207 snapshot.offset_to_point(*self)
3208 }
3209}
3210
3211impl ToPoint for Point {
3212 fn to_point(&self, _: &BufferSnapshot) -> Point {
3213 *self
3214 }
3215}
3216
3217impl ToPoint for Unclipped<PointUtf16> {
3218 fn to_point(&self, snapshot: &BufferSnapshot) -> Point {
3219 snapshot.unclipped_point_utf16_to_point(*self)
3220 }
3221}
3222
3223pub trait ToPointUtf16 {
3224 fn to_point_utf16(&self, snapshot: &BufferSnapshot) -> PointUtf16;
3225}
3226
3227impl ToPointUtf16 for Anchor {
3228 fn to_point_utf16(&self, snapshot: &BufferSnapshot) -> PointUtf16 {
3229 snapshot.summary_for_anchor(self)
3230 }
3231}
3232
3233impl ToPointUtf16 for usize {
3234 fn to_point_utf16(&self, snapshot: &BufferSnapshot) -> PointUtf16 {
3235 snapshot.offset_to_point_utf16(*self)
3236 }
3237}
3238
3239impl ToPointUtf16 for PointUtf16 {
3240 fn to_point_utf16(&self, _: &BufferSnapshot) -> PointUtf16 {
3241 *self
3242 }
3243}
3244
3245impl ToPointUtf16 for Point {
3246 fn to_point_utf16(&self, snapshot: &BufferSnapshot) -> PointUtf16 {
3247 snapshot.point_to_point_utf16(*self)
3248 }
3249}
3250
3251pub trait ToOffsetUtf16 {
3252 fn to_offset_utf16(&self, snapshot: &BufferSnapshot) -> OffsetUtf16;
3253}
3254
3255impl ToOffsetUtf16 for Anchor {
3256 fn to_offset_utf16(&self, snapshot: &BufferSnapshot) -> OffsetUtf16 {
3257 snapshot.summary_for_anchor(self)
3258 }
3259}
3260
3261impl ToOffsetUtf16 for usize {
3262 fn to_offset_utf16(&self, snapshot: &BufferSnapshot) -> OffsetUtf16 {
3263 snapshot.offset_to_offset_utf16(*self)
3264 }
3265}
3266
3267impl ToOffsetUtf16 for OffsetUtf16 {
3268 fn to_offset_utf16(&self, _snapshot: &BufferSnapshot) -> OffsetUtf16 {
3269 *self
3270 }
3271}
3272
3273pub trait FromAnchor {
3274 fn from_anchor(anchor: &Anchor, snapshot: &BufferSnapshot) -> Self;
3275}
3276
3277impl FromAnchor for Anchor {
3278 fn from_anchor(anchor: &Anchor, _snapshot: &BufferSnapshot) -> Self {
3279 *anchor
3280 }
3281}
3282
3283impl FromAnchor for Point {
3284 fn from_anchor(anchor: &Anchor, snapshot: &BufferSnapshot) -> Self {
3285 snapshot.summary_for_anchor(anchor)
3286 }
3287}
3288
3289impl FromAnchor for PointUtf16 {
3290 fn from_anchor(anchor: &Anchor, snapshot: &BufferSnapshot) -> Self {
3291 snapshot.summary_for_anchor(anchor)
3292 }
3293}
3294
3295impl FromAnchor for usize {
3296 fn from_anchor(anchor: &Anchor, snapshot: &BufferSnapshot) -> Self {
3297 snapshot.summary_for_anchor(anchor)
3298 }
3299}
3300
3301#[derive(Clone, Copy, Debug, PartialEq)]
3302pub enum LineEnding {
3303 Unix,
3304 Windows,
3305}
3306
3307impl Default for LineEnding {
3308 fn default() -> Self {
3309 #[cfg(unix)]
3310 return Self::Unix;
3311
3312 #[cfg(not(unix))]
3313 return Self::Windows;
3314 }
3315}
3316
3317impl LineEnding {
3318 pub fn as_str(&self) -> &'static str {
3319 match self {
3320 LineEnding::Unix => "\n",
3321 LineEnding::Windows => "\r\n",
3322 }
3323 }
3324
3325 pub fn label(&self) -> &'static str {
3326 match self {
3327 LineEnding::Unix => "LF",
3328 LineEnding::Windows => "CRLF",
3329 }
3330 }
3331
3332 pub fn detect(text: &str) -> Self {
3333 let mut max_ix = cmp::min(text.len(), 1000);
3334 while !text.is_char_boundary(max_ix) {
3335 max_ix -= 1;
3336 }
3337
3338 if let Some(ix) = text[..max_ix].find(['\n']) {
3339 if ix > 0 && text.as_bytes()[ix - 1] == b'\r' {
3340 Self::Windows
3341 } else {
3342 Self::Unix
3343 }
3344 } else {
3345 Self::default()
3346 }
3347 }
3348
3349 pub fn normalize(text: &mut String) {
3350 if let Cow::Owned(replaced) = LINE_SEPARATORS_REGEX.replace_all(text, "\n") {
3351 *text = replaced;
3352 }
3353 }
3354
3355 pub fn normalize_arc(text: Arc<str>) -> Arc<str> {
3356 if let Cow::Owned(replaced) = LINE_SEPARATORS_REGEX.replace_all(&text, "\n") {
3357 replaced.into()
3358 } else {
3359 text
3360 }
3361 }
3362
3363 pub fn normalize_cow(text: Cow<str>) -> Cow<str> {
3364 if let Cow::Owned(replaced) = LINE_SEPARATORS_REGEX.replace_all(&text, "\n") {
3365 replaced.into()
3366 } else {
3367 text
3368 }
3369 }
3370}
3371
3372#[cfg(debug_assertions)]
3373pub mod debug {
3374 use super::*;
3375 use parking_lot::Mutex;
3376 use std::any::TypeId;
3377 use std::hash::{Hash, Hasher};
3378
3379 static GLOBAL_DEBUG_RANGES: Mutex<Option<GlobalDebugRanges>> = Mutex::new(None);
3380
3381 pub struct GlobalDebugRanges {
3382 pub ranges: Vec<DebugRange>,
3383 key_to_occurrence_index: HashMap<Key, usize>,
3384 next_occurrence_index: usize,
3385 }
3386
3387 pub struct DebugRange {
3388 key: Key,
3389 pub ranges: Vec<Range<Anchor>>,
3390 pub value: Arc<str>,
3391 pub occurrence_index: usize,
3392 }
3393
3394 #[derive(Debug, Clone, PartialEq, Eq, Hash)]
3395 struct Key {
3396 type_id: TypeId,
3397 hash: u64,
3398 }
3399
3400 impl GlobalDebugRanges {
3401 pub fn with_locked<R>(f: impl FnOnce(&mut Self) -> R) -> R {
3402 let mut state = GLOBAL_DEBUG_RANGES.lock();
3403 if state.is_none() {
3404 *state = Some(GlobalDebugRanges {
3405 ranges: Vec::new(),
3406 key_to_occurrence_index: HashMap::default(),
3407 next_occurrence_index: 0,
3408 });
3409 }
3410 if let Some(global_debug_ranges) = state.as_mut() {
3411 f(global_debug_ranges)
3412 } else {
3413 unreachable!()
3414 }
3415 }
3416
3417 pub fn insert<K: Hash + 'static>(
3418 &mut self,
3419 key: &K,
3420 ranges: Vec<Range<Anchor>>,
3421 value: Arc<str>,
3422 ) {
3423 let occurrence_index = *self
3424 .key_to_occurrence_index
3425 .entry(Key::new(key))
3426 .or_insert_with(|| {
3427 let occurrence_index = self.next_occurrence_index;
3428 self.next_occurrence_index += 1;
3429 occurrence_index
3430 });
3431 let key = Key::new(key);
3432 let existing = self
3433 .ranges
3434 .iter()
3435 .enumerate()
3436 .rfind(|(_, existing)| existing.key == key);
3437 if let Some((existing_ix, _)) = existing {
3438 self.ranges.remove(existing_ix);
3439 }
3440 self.ranges.push(DebugRange {
3441 ranges,
3442 key,
3443 value,
3444 occurrence_index,
3445 });
3446 }
3447
3448 pub fn remove<K: Hash + 'static>(&mut self, key: &K) {
3449 self.remove_impl(&Key::new(key));
3450 }
3451
3452 fn remove_impl(&mut self, key: &Key) {
3453 let existing = self
3454 .ranges
3455 .iter()
3456 .enumerate()
3457 .rfind(|(_, existing)| &existing.key == key);
3458 if let Some((existing_ix, _)) = existing {
3459 self.ranges.remove(existing_ix);
3460 }
3461 }
3462
3463 pub fn remove_all_with_key_type<K: 'static>(&mut self) {
3464 self.ranges
3465 .retain(|item| item.key.type_id != TypeId::of::<K>());
3466 }
3467 }
3468
3469 impl Key {
3470 fn new<K: Hash + 'static>(key: &K) -> Self {
3471 let type_id = TypeId::of::<K>();
3472 let mut hasher = collections::FxHasher::default();
3473 key.hash(&mut hasher);
3474 Key {
3475 type_id,
3476 hash: hasher.finish(),
3477 }
3478 }
3479 }
3480
3481 pub trait ToDebugRanges {
3482 fn to_debug_ranges(&self, snapshot: &BufferSnapshot) -> Vec<Range<usize>>;
3483 }
3484
3485 impl<T: ToOffset> ToDebugRanges for T {
3486 fn to_debug_ranges(&self, snapshot: &BufferSnapshot) -> Vec<Range<usize>> {
3487 [self.to_offset(snapshot)].to_debug_ranges(snapshot)
3488 }
3489 }
3490
3491 impl<T: ToOffset + Clone> ToDebugRanges for Range<T> {
3492 fn to_debug_ranges(&self, snapshot: &BufferSnapshot) -> Vec<Range<usize>> {
3493 [self.clone()].to_debug_ranges(snapshot)
3494 }
3495 }
3496
3497 impl<T: ToOffset> ToDebugRanges for Vec<T> {
3498 fn to_debug_ranges(&self, snapshot: &BufferSnapshot) -> Vec<Range<usize>> {
3499 self.as_slice().to_debug_ranges(snapshot)
3500 }
3501 }
3502
3503 impl<T: ToOffset> ToDebugRanges for Vec<Range<T>> {
3504 fn to_debug_ranges(&self, snapshot: &BufferSnapshot) -> Vec<Range<usize>> {
3505 self.as_slice().to_debug_ranges(snapshot)
3506 }
3507 }
3508
3509 impl<T: ToOffset> ToDebugRanges for [T] {
3510 fn to_debug_ranges(&self, snapshot: &BufferSnapshot) -> Vec<Range<usize>> {
3511 self.iter()
3512 .map(|item| {
3513 let offset = item.to_offset(snapshot);
3514 offset..offset
3515 })
3516 .collect()
3517 }
3518 }
3519
3520 impl<T: ToOffset> ToDebugRanges for [Range<T>] {
3521 fn to_debug_ranges(&self, snapshot: &BufferSnapshot) -> Vec<Range<usize>> {
3522 self.iter()
3523 .map(|range| range.start.to_offset(snapshot)..range.end.to_offset(snapshot))
3524 .collect()
3525 }
3526 }
3527}