1mod anchor;
2pub mod locator;
3#[cfg(any(test, feature = "test-support"))]
4pub mod network;
5pub mod operation_queue;
6mod patch;
7mod selection;
8pub mod subscription;
9#[cfg(test)]
10mod tests;
11mod undo_map;
12
13pub use anchor::*;
14use anyhow::{Context as _, Result};
15use clock::LOCAL_BRANCH_REPLICA_ID;
16pub use clock::ReplicaId;
17use collections::{HashMap, HashSet};
18use locator::Locator;
19use operation_queue::OperationQueue;
20pub use patch::Patch;
21use postage::{oneshot, prelude::*};
22
23use regex::Regex;
24pub use rope::*;
25pub use selection::*;
26use std::{
27 borrow::Cow,
28 cmp::{self, Ordering, Reverse},
29 fmt::Display,
30 future::Future,
31 iter::Iterator,
32 num::NonZeroU64,
33 ops::{self, Deref, Range, Sub},
34 str,
35 sync::{Arc, LazyLock},
36 time::{Duration, Instant},
37};
38pub use subscription::*;
39pub use sum_tree::Bias;
40use sum_tree::{Dimensions, FilterCursor, SumTree, TreeMap, TreeSet};
41use undo_map::UndoMap;
42
43#[cfg(any(test, feature = "test-support"))]
44use util::RandomCharIter;
45
46static LINE_SEPARATORS_REGEX: LazyLock<Regex> =
47 LazyLock::new(|| Regex::new(r"\r\n|\r").expect("Failed to create LINE_SEPARATORS_REGEX"));
48
49pub type TransactionId = clock::Lamport;
50
51pub struct Buffer {
52 snapshot: BufferSnapshot,
53 history: History,
54 deferred_ops: OperationQueue<Operation>,
55 deferred_replicas: HashSet<ReplicaId>,
56 pub lamport_clock: clock::Lamport,
57 subscriptions: Topic,
58 edit_id_resolvers: HashMap<clock::Lamport, Vec<oneshot::Sender<()>>>,
59 wait_for_version_txs: Vec<(clock::Global, oneshot::Sender<()>)>,
60}
61
62#[repr(transparent)]
63#[derive(Clone, Copy, Debug, Hash, PartialEq, PartialOrd, Ord, Eq)]
64pub struct BufferId(NonZeroU64);
65
66impl Display for BufferId {
67 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
68 write!(f, "{}", self.0)
69 }
70}
71
72impl From<NonZeroU64> for BufferId {
73 fn from(id: NonZeroU64) -> Self {
74 BufferId(id)
75 }
76}
77
78impl BufferId {
79 /// Returns Err if `id` is outside of BufferId domain.
80 pub fn new(id: u64) -> anyhow::Result<Self> {
81 let id = NonZeroU64::new(id).context("Buffer id cannot be 0.")?;
82 Ok(Self(id))
83 }
84
85 /// Increments this buffer id, returning the old value.
86 /// So that's a post-increment operator in disguise.
87 pub fn next(&mut self) -> Self {
88 let old = *self;
89 self.0 = self.0.saturating_add(1);
90 old
91 }
92
93 pub fn to_proto(self) -> u64 {
94 self.into()
95 }
96}
97
98impl From<BufferId> for u64 {
99 fn from(id: BufferId) -> Self {
100 id.0.get()
101 }
102}
103
104#[derive(Clone)]
105pub struct BufferSnapshot {
106 replica_id: ReplicaId,
107 remote_id: BufferId,
108 visible_text: Rope,
109 deleted_text: Rope,
110 line_ending: LineEnding,
111 undo_map: UndoMap,
112 fragments: SumTree<Fragment>,
113 insertions: SumTree<InsertionFragment>,
114 insertion_slices: TreeSet<InsertionSlice>,
115 pub version: clock::Global,
116}
117
118#[derive(Clone, Debug)]
119pub struct HistoryEntry {
120 transaction: Transaction,
121 first_edit_at: Instant,
122 last_edit_at: Instant,
123 suppress_grouping: bool,
124}
125
126#[derive(Clone, Debug)]
127pub struct Transaction {
128 pub id: TransactionId,
129 pub edit_ids: Vec<clock::Lamport>,
130 pub start: clock::Global,
131}
132
133impl Transaction {
134 pub fn merge_in(&mut self, other: Transaction) {
135 self.edit_ids.extend(other.edit_ids);
136 }
137}
138
139impl HistoryEntry {
140 pub fn transaction_id(&self) -> TransactionId {
141 self.transaction.id
142 }
143}
144
145struct History {
146 base_text: Rope,
147 operations: TreeMap<clock::Lamport, Operation>,
148 undo_stack: Vec<HistoryEntry>,
149 redo_stack: Vec<HistoryEntry>,
150 transaction_depth: usize,
151 group_interval: Duration,
152}
153
154#[derive(Clone, Debug, Eq, PartialEq)]
155struct InsertionSlice {
156 edit_id: clock::Lamport,
157 insertion_id: clock::Lamport,
158 range: Range<usize>,
159}
160
161impl Ord for InsertionSlice {
162 fn cmp(&self, other: &Self) -> Ordering {
163 self.edit_id
164 .cmp(&other.edit_id)
165 .then_with(|| self.insertion_id.cmp(&other.insertion_id))
166 .then_with(|| self.range.start.cmp(&other.range.start))
167 .then_with(|| self.range.end.cmp(&other.range.end))
168 }
169}
170
171impl PartialOrd for InsertionSlice {
172 fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
173 Some(self.cmp(other))
174 }
175}
176
177impl InsertionSlice {
178 fn from_fragment(edit_id: clock::Lamport, fragment: &Fragment) -> Self {
179 Self {
180 edit_id,
181 insertion_id: fragment.timestamp,
182 range: fragment.insertion_offset..fragment.insertion_offset + fragment.len,
183 }
184 }
185}
186
187impl History {
188 pub fn new(base_text: Rope) -> Self {
189 Self {
190 base_text,
191 operations: Default::default(),
192 undo_stack: Vec::new(),
193 redo_stack: Vec::new(),
194 transaction_depth: 0,
195 // Don't group transactions in tests unless we opt in, because it's a footgun.
196 #[cfg(any(test, feature = "test-support"))]
197 group_interval: Duration::ZERO,
198 #[cfg(not(any(test, feature = "test-support")))]
199 group_interval: Duration::from_millis(300),
200 }
201 }
202
203 fn push(&mut self, op: Operation) {
204 self.operations.insert(op.timestamp(), op);
205 }
206
207 fn start_transaction(
208 &mut self,
209 start: clock::Global,
210 now: Instant,
211 clock: &mut clock::Lamport,
212 ) -> Option<TransactionId> {
213 self.transaction_depth += 1;
214 if self.transaction_depth == 1 {
215 let id = clock.tick();
216 self.undo_stack.push(HistoryEntry {
217 transaction: Transaction {
218 id,
219 start,
220 edit_ids: Default::default(),
221 },
222 first_edit_at: now,
223 last_edit_at: now,
224 suppress_grouping: false,
225 });
226 Some(id)
227 } else {
228 None
229 }
230 }
231
232 fn end_transaction(&mut self, now: Instant) -> Option<&HistoryEntry> {
233 assert_ne!(self.transaction_depth, 0);
234 self.transaction_depth -= 1;
235 if self.transaction_depth == 0 {
236 if self
237 .undo_stack
238 .last()
239 .unwrap()
240 .transaction
241 .edit_ids
242 .is_empty()
243 {
244 self.undo_stack.pop();
245 None
246 } else {
247 self.redo_stack.clear();
248 let entry = self.undo_stack.last_mut().unwrap();
249 entry.last_edit_at = now;
250 Some(entry)
251 }
252 } else {
253 None
254 }
255 }
256
257 fn group(&mut self) -> Option<TransactionId> {
258 let mut count = 0;
259 let mut entries = self.undo_stack.iter();
260 if let Some(mut entry) = entries.next_back() {
261 while let Some(prev_entry) = entries.next_back() {
262 if !prev_entry.suppress_grouping
263 && entry.first_edit_at - prev_entry.last_edit_at < self.group_interval
264 {
265 entry = prev_entry;
266 count += 1;
267 } else {
268 break;
269 }
270 }
271 }
272 self.group_trailing(count)
273 }
274
275 fn group_until(&mut self, transaction_id: TransactionId) {
276 let mut count = 0;
277 for entry in self.undo_stack.iter().rev() {
278 if entry.transaction_id() == transaction_id {
279 self.group_trailing(count);
280 break;
281 } else if entry.suppress_grouping {
282 break;
283 } else {
284 count += 1;
285 }
286 }
287 }
288
289 fn group_trailing(&mut self, n: usize) -> Option<TransactionId> {
290 let new_len = self.undo_stack.len() - n;
291 let (entries_to_keep, entries_to_merge) = self.undo_stack.split_at_mut(new_len);
292 if let Some(last_entry) = entries_to_keep.last_mut() {
293 for entry in &*entries_to_merge {
294 for edit_id in &entry.transaction.edit_ids {
295 last_entry.transaction.edit_ids.push(*edit_id);
296 }
297 }
298
299 if let Some(entry) = entries_to_merge.last_mut() {
300 last_entry.last_edit_at = entry.last_edit_at;
301 }
302 }
303
304 self.undo_stack.truncate(new_len);
305 self.undo_stack.last().map(|e| e.transaction.id)
306 }
307
308 fn finalize_last_transaction(&mut self) -> Option<&Transaction> {
309 self.undo_stack.last_mut().map(|entry| {
310 entry.suppress_grouping = true;
311 &entry.transaction
312 })
313 }
314
315 fn push_transaction(&mut self, transaction: Transaction, now: Instant) {
316 assert_eq!(self.transaction_depth, 0);
317 self.undo_stack.push(HistoryEntry {
318 transaction,
319 first_edit_at: now,
320 last_edit_at: now,
321 suppress_grouping: false,
322 });
323 }
324
325 /// Differs from `push_transaction` in that it does not clear the redo
326 /// stack. Intended to be used to create a parent transaction to merge
327 /// potential child transactions into.
328 ///
329 /// The caller is responsible for removing it from the undo history using
330 /// `forget_transaction` if no edits are merged into it. Otherwise, if edits
331 /// are merged into this transaction, the caller is responsible for ensuring
332 /// the redo stack is cleared. The easiest way to ensure the redo stack is
333 /// cleared is to create transactions with the usual `start_transaction` and
334 /// `end_transaction` methods and merging the resulting transactions into
335 /// the transaction created by this method
336 fn push_empty_transaction(
337 &mut self,
338 start: clock::Global,
339 now: Instant,
340 clock: &mut clock::Lamport,
341 ) -> TransactionId {
342 assert_eq!(self.transaction_depth, 0);
343 let id = clock.tick();
344 let transaction = Transaction {
345 id,
346 start,
347 edit_ids: Vec::new(),
348 };
349 self.undo_stack.push(HistoryEntry {
350 transaction,
351 first_edit_at: now,
352 last_edit_at: now,
353 suppress_grouping: false,
354 });
355 id
356 }
357
358 fn push_undo(&mut self, op_id: clock::Lamport) {
359 assert_ne!(self.transaction_depth, 0);
360 if let Some(Operation::Edit(_)) = self.operations.get(&op_id) {
361 let last_transaction = self.undo_stack.last_mut().unwrap();
362 last_transaction.transaction.edit_ids.push(op_id);
363 }
364 }
365
366 fn pop_undo(&mut self) -> Option<&HistoryEntry> {
367 assert_eq!(self.transaction_depth, 0);
368 if let Some(entry) = self.undo_stack.pop() {
369 self.redo_stack.push(entry);
370 self.redo_stack.last()
371 } else {
372 None
373 }
374 }
375
376 fn remove_from_undo(&mut self, transaction_id: TransactionId) -> Option<&HistoryEntry> {
377 assert_eq!(self.transaction_depth, 0);
378
379 let entry_ix = self
380 .undo_stack
381 .iter()
382 .rposition(|entry| entry.transaction.id == transaction_id)?;
383 let entry = self.undo_stack.remove(entry_ix);
384 self.redo_stack.push(entry);
385 self.redo_stack.last()
386 }
387
388 fn remove_from_undo_until(&mut self, transaction_id: TransactionId) -> &[HistoryEntry] {
389 assert_eq!(self.transaction_depth, 0);
390
391 let redo_stack_start_len = self.redo_stack.len();
392 if let Some(entry_ix) = self
393 .undo_stack
394 .iter()
395 .rposition(|entry| entry.transaction.id == transaction_id)
396 {
397 self.redo_stack
398 .extend(self.undo_stack.drain(entry_ix..).rev());
399 }
400 &self.redo_stack[redo_stack_start_len..]
401 }
402
403 fn forget(&mut self, transaction_id: TransactionId) -> Option<Transaction> {
404 assert_eq!(self.transaction_depth, 0);
405 if let Some(entry_ix) = self
406 .undo_stack
407 .iter()
408 .rposition(|entry| entry.transaction.id == transaction_id)
409 {
410 Some(self.undo_stack.remove(entry_ix).transaction)
411 } else if let Some(entry_ix) = self
412 .redo_stack
413 .iter()
414 .rposition(|entry| entry.transaction.id == transaction_id)
415 {
416 Some(self.redo_stack.remove(entry_ix).transaction)
417 } else {
418 None
419 }
420 }
421
422 fn transaction(&self, transaction_id: TransactionId) -> Option<&Transaction> {
423 let entry = self
424 .undo_stack
425 .iter()
426 .rfind(|entry| entry.transaction.id == transaction_id)
427 .or_else(|| {
428 self.redo_stack
429 .iter()
430 .rfind(|entry| entry.transaction.id == transaction_id)
431 })?;
432 Some(&entry.transaction)
433 }
434
435 fn transaction_mut(&mut self, transaction_id: TransactionId) -> Option<&mut Transaction> {
436 let entry = self
437 .undo_stack
438 .iter_mut()
439 .rfind(|entry| entry.transaction.id == transaction_id)
440 .or_else(|| {
441 self.redo_stack
442 .iter_mut()
443 .rfind(|entry| entry.transaction.id == transaction_id)
444 })?;
445 Some(&mut entry.transaction)
446 }
447
448 fn merge_transactions(&mut self, transaction: TransactionId, destination: TransactionId) {
449 if let Some(transaction) = self.forget(transaction)
450 && let Some(destination) = self.transaction_mut(destination)
451 {
452 destination.edit_ids.extend(transaction.edit_ids);
453 }
454 }
455
456 fn pop_redo(&mut self) -> Option<&HistoryEntry> {
457 assert_eq!(self.transaction_depth, 0);
458 if let Some(entry) = self.redo_stack.pop() {
459 self.undo_stack.push(entry);
460 self.undo_stack.last()
461 } else {
462 None
463 }
464 }
465
466 fn remove_from_redo(&mut self, transaction_id: TransactionId) -> &[HistoryEntry] {
467 assert_eq!(self.transaction_depth, 0);
468
469 let undo_stack_start_len = self.undo_stack.len();
470 if let Some(entry_ix) = self
471 .redo_stack
472 .iter()
473 .rposition(|entry| entry.transaction.id == transaction_id)
474 {
475 self.undo_stack
476 .extend(self.redo_stack.drain(entry_ix..).rev());
477 }
478 &self.undo_stack[undo_stack_start_len..]
479 }
480}
481
482struct Edits<'a, D: TextDimension, F: FnMut(&FragmentSummary) -> bool> {
483 visible_cursor: rope::Cursor<'a>,
484 deleted_cursor: rope::Cursor<'a>,
485 fragments_cursor: Option<FilterCursor<'a, 'static, F, Fragment, FragmentTextSummary>>,
486 undos: &'a UndoMap,
487 since: &'a clock::Global,
488 old_end: D,
489 new_end: D,
490 range: Range<(&'a Locator, usize)>,
491 buffer_id: BufferId,
492}
493
494#[derive(Clone, Debug, Default, Eq, PartialEq)]
495pub struct Edit<D> {
496 pub old: Range<D>,
497 pub new: Range<D>,
498}
499
500impl<D> Edit<D>
501where
502 D: Sub<D, Output = D> + PartialEq + Copy,
503{
504 pub fn old_len(&self) -> D {
505 self.old.end - self.old.start
506 }
507
508 pub fn new_len(&self) -> D {
509 self.new.end - self.new.start
510 }
511
512 pub fn is_empty(&self) -> bool {
513 self.old.start == self.old.end && self.new.start == self.new.end
514 }
515}
516
517impl<D1, D2> Edit<(D1, D2)> {
518 pub fn flatten(self) -> (Edit<D1>, Edit<D2>) {
519 (
520 Edit {
521 old: self.old.start.0..self.old.end.0,
522 new: self.new.start.0..self.new.end.0,
523 },
524 Edit {
525 old: self.old.start.1..self.old.end.1,
526 new: self.new.start.1..self.new.end.1,
527 },
528 )
529 }
530}
531
532#[derive(Eq, PartialEq, Clone, Debug)]
533pub struct Fragment {
534 pub id: Locator,
535 pub timestamp: clock::Lamport,
536 pub insertion_offset: usize,
537 pub len: usize,
538 pub visible: bool,
539 pub deletions: HashSet<clock::Lamport>,
540 pub max_undos: clock::Global,
541}
542
543#[derive(Eq, PartialEq, Clone, Debug)]
544pub struct FragmentSummary {
545 text: FragmentTextSummary,
546 max_id: Locator,
547 max_version: clock::Global,
548 min_insertion_version: clock::Global,
549 max_insertion_version: clock::Global,
550}
551
552#[derive(Copy, Default, Clone, Debug, PartialEq, Eq)]
553struct FragmentTextSummary {
554 visible: usize,
555 deleted: usize,
556}
557
558impl<'a> sum_tree::Dimension<'a, FragmentSummary> for FragmentTextSummary {
559 fn zero(_: &Option<clock::Global>) -> Self {
560 Default::default()
561 }
562
563 fn add_summary(&mut self, summary: &'a FragmentSummary, _: &Option<clock::Global>) {
564 self.visible += summary.text.visible;
565 self.deleted += summary.text.deleted;
566 }
567}
568
569#[derive(Eq, PartialEq, Clone, Debug)]
570struct InsertionFragment {
571 timestamp: clock::Lamport,
572 split_offset: usize,
573 fragment_id: Locator,
574}
575
576#[derive(Clone, Copy, Debug, Default, PartialEq, Eq, PartialOrd, Ord)]
577struct InsertionFragmentKey {
578 timestamp: clock::Lamport,
579 split_offset: usize,
580}
581
582#[derive(Clone, Debug, Eq, PartialEq)]
583pub enum Operation {
584 Edit(EditOperation),
585 Undo(UndoOperation),
586}
587
588#[derive(Clone, Debug, Eq, PartialEq)]
589pub struct EditOperation {
590 pub timestamp: clock::Lamport,
591 pub version: clock::Global,
592 pub ranges: Vec<Range<FullOffset>>,
593 pub new_text: Vec<Arc<str>>,
594}
595
596#[derive(Clone, Debug, Eq, PartialEq)]
597pub struct UndoOperation {
598 pub timestamp: clock::Lamport,
599 pub version: clock::Global,
600 pub counts: HashMap<clock::Lamport, u32>,
601}
602
603/// Stores information about the indentation of a line (tabs and spaces).
604#[derive(Clone, Copy, Debug, Eq, PartialEq)]
605pub struct LineIndent {
606 pub tabs: u32,
607 pub spaces: u32,
608 pub line_blank: bool,
609}
610
611impl LineIndent {
612 pub fn from_chunks(chunks: &mut Chunks) -> Self {
613 let mut tabs = 0;
614 let mut spaces = 0;
615 let mut line_blank = true;
616
617 'outer: while let Some(chunk) = chunks.peek() {
618 for ch in chunk.chars() {
619 if ch == '\t' {
620 tabs += 1;
621 } else if ch == ' ' {
622 spaces += 1;
623 } else {
624 if ch != '\n' {
625 line_blank = false;
626 }
627 break 'outer;
628 }
629 }
630
631 chunks.next();
632 }
633
634 Self {
635 tabs,
636 spaces,
637 line_blank,
638 }
639 }
640
641 /// Constructs a new `LineIndent` which only contains spaces.
642 pub fn spaces(spaces: u32) -> Self {
643 Self {
644 tabs: 0,
645 spaces,
646 line_blank: true,
647 }
648 }
649
650 /// Constructs a new `LineIndent` which only contains tabs.
651 pub fn tabs(tabs: u32) -> Self {
652 Self {
653 tabs,
654 spaces: 0,
655 line_blank: true,
656 }
657 }
658
659 /// Indicates whether the line is empty.
660 pub fn is_line_empty(&self) -> bool {
661 self.tabs == 0 && self.spaces == 0 && self.line_blank
662 }
663
664 /// Indicates whether the line is blank (contains only whitespace).
665 pub fn is_line_blank(&self) -> bool {
666 self.line_blank
667 }
668
669 /// Returns the number of indentation characters (tabs or spaces).
670 pub fn raw_len(&self) -> u32 {
671 self.tabs + self.spaces
672 }
673
674 /// Returns the number of indentation characters (tabs or spaces), taking tab size into account.
675 pub fn len(&self, tab_size: u32) -> u32 {
676 self.tabs * tab_size + self.spaces
677 }
678}
679
680impl From<&str> for LineIndent {
681 fn from(value: &str) -> Self {
682 Self::from_iter(value.chars())
683 }
684}
685
686impl FromIterator<char> for LineIndent {
687 fn from_iter<T: IntoIterator<Item = char>>(chars: T) -> Self {
688 let mut tabs = 0;
689 let mut spaces = 0;
690 let mut line_blank = true;
691 for c in chars {
692 if c == '\t' {
693 tabs += 1;
694 } else if c == ' ' {
695 spaces += 1;
696 } else {
697 if c != '\n' {
698 line_blank = false;
699 }
700 break;
701 }
702 }
703 Self {
704 tabs,
705 spaces,
706 line_blank,
707 }
708 }
709}
710
711impl Buffer {
712 pub fn new(replica_id: u16, remote_id: BufferId, base_text: impl Into<String>) -> Buffer {
713 let mut base_text = base_text.into();
714 let line_ending = LineEnding::detect(&base_text);
715 LineEnding::normalize(&mut base_text);
716 Self::new_normalized(replica_id, remote_id, line_ending, Rope::from(&*base_text))
717 }
718
719 pub fn new_normalized(
720 replica_id: u16,
721 remote_id: BufferId,
722 line_ending: LineEnding,
723 normalized: Rope,
724 ) -> Buffer {
725 let history = History::new(normalized);
726 let mut fragments = SumTree::new(&None);
727 let mut insertions = SumTree::default();
728
729 let mut lamport_clock = clock::Lamport::new(replica_id);
730 let mut version = clock::Global::new();
731
732 let visible_text = history.base_text.clone();
733 if !visible_text.is_empty() {
734 let insertion_timestamp = clock::Lamport {
735 replica_id: 0,
736 value: 1,
737 };
738 lamport_clock.observe(insertion_timestamp);
739 version.observe(insertion_timestamp);
740 let fragment_id = Locator::between(&Locator::min(), &Locator::max());
741 let fragment = Fragment {
742 id: fragment_id,
743 timestamp: insertion_timestamp,
744 insertion_offset: 0,
745 len: visible_text.len(),
746 visible: true,
747 deletions: Default::default(),
748 max_undos: Default::default(),
749 };
750 insertions.push(InsertionFragment::new(&fragment), ());
751 fragments.push(fragment, &None);
752 }
753
754 Buffer {
755 snapshot: BufferSnapshot {
756 replica_id,
757 remote_id,
758 visible_text,
759 deleted_text: Rope::new(),
760 line_ending,
761 fragments,
762 insertions,
763 version,
764 undo_map: Default::default(),
765 insertion_slices: Default::default(),
766 },
767 history,
768 deferred_ops: OperationQueue::new(),
769 deferred_replicas: HashSet::default(),
770 lamport_clock,
771 subscriptions: Default::default(),
772 edit_id_resolvers: Default::default(),
773 wait_for_version_txs: Default::default(),
774 }
775 }
776
777 pub fn version(&self) -> clock::Global {
778 self.version.clone()
779 }
780
781 pub fn snapshot(&self) -> BufferSnapshot {
782 self.snapshot.clone()
783 }
784
785 pub fn branch(&self) -> Self {
786 Self {
787 snapshot: self.snapshot.clone(),
788 history: History::new(self.base_text().clone()),
789 deferred_ops: OperationQueue::new(),
790 deferred_replicas: HashSet::default(),
791 lamport_clock: clock::Lamport::new(LOCAL_BRANCH_REPLICA_ID),
792 subscriptions: Default::default(),
793 edit_id_resolvers: Default::default(),
794 wait_for_version_txs: Default::default(),
795 }
796 }
797
798 pub fn replica_id(&self) -> ReplicaId {
799 self.lamport_clock.replica_id
800 }
801
802 pub fn remote_id(&self) -> BufferId {
803 self.remote_id
804 }
805
806 pub fn deferred_ops_len(&self) -> usize {
807 self.deferred_ops.len()
808 }
809
810 pub fn transaction_group_interval(&self) -> Duration {
811 self.history.group_interval
812 }
813
814 pub fn edit<R, I, S, T>(&mut self, edits: R) -> Operation
815 where
816 R: IntoIterator<IntoIter = I>,
817 I: ExactSizeIterator<Item = (Range<S>, T)>,
818 S: ToOffset,
819 T: Into<Arc<str>>,
820 {
821 let edits = edits
822 .into_iter()
823 .map(|(range, new_text)| (range, new_text.into()));
824
825 self.start_transaction();
826 let timestamp = self.lamport_clock.tick();
827 let operation = Operation::Edit(self.apply_local_edit(edits, timestamp));
828
829 self.history.push(operation.clone());
830 self.history.push_undo(operation.timestamp());
831 self.snapshot.version.observe(operation.timestamp());
832 self.end_transaction();
833 operation
834 }
835
836 fn apply_local_edit<S: ToOffset, T: Into<Arc<str>>>(
837 &mut self,
838 edits: impl ExactSizeIterator<Item = (Range<S>, T)>,
839 timestamp: clock::Lamport,
840 ) -> EditOperation {
841 let mut edits_patch = Patch::default();
842 let mut edit_op = EditOperation {
843 timestamp,
844 version: self.version(),
845 ranges: Vec::with_capacity(edits.len()),
846 new_text: Vec::with_capacity(edits.len()),
847 };
848 let mut new_insertions = Vec::new();
849 let mut insertion_offset = 0;
850 let mut insertion_slices = Vec::new();
851
852 let mut edits = edits
853 .map(|(range, new_text)| (range.to_offset(&*self), new_text))
854 .peekable();
855
856 let mut new_ropes =
857 RopeBuilder::new(self.visible_text.cursor(0), self.deleted_text.cursor(0));
858 let mut old_fragments = self.fragments.cursor::<FragmentTextSummary>(&None);
859 let mut new_fragments = old_fragments.slice(&edits.peek().unwrap().0.start, Bias::Right);
860 new_ropes.append(new_fragments.summary().text);
861
862 let mut fragment_start = old_fragments.start().visible;
863 for (range, new_text) in edits {
864 let new_text = LineEnding::normalize_arc(new_text.into());
865 let fragment_end = old_fragments.end().visible;
866
867 // If the current fragment ends before this range, then jump ahead to the first fragment
868 // that extends past the start of this range, reusing any intervening fragments.
869 if fragment_end < range.start {
870 // If the current fragment has been partially consumed, then consume the rest of it
871 // and advance to the next fragment before slicing.
872 if fragment_start > old_fragments.start().visible {
873 if fragment_end > fragment_start {
874 let mut suffix = old_fragments.item().unwrap().clone();
875 suffix.len = fragment_end - fragment_start;
876 suffix.insertion_offset += fragment_start - old_fragments.start().visible;
877 new_insertions.push(InsertionFragment::insert_new(&suffix));
878 new_ropes.push_fragment(&suffix, suffix.visible);
879 new_fragments.push(suffix, &None);
880 }
881 old_fragments.next();
882 }
883
884 let slice = old_fragments.slice(&range.start, Bias::Right);
885 new_ropes.append(slice.summary().text);
886 new_fragments.append(slice, &None);
887 fragment_start = old_fragments.start().visible;
888 }
889
890 let full_range_start = FullOffset(range.start + old_fragments.start().deleted);
891
892 // Preserve any portion of the current fragment that precedes this range.
893 if fragment_start < range.start {
894 let mut prefix = old_fragments.item().unwrap().clone();
895 prefix.len = range.start - fragment_start;
896 prefix.insertion_offset += fragment_start - old_fragments.start().visible;
897 prefix.id = Locator::between(&new_fragments.summary().max_id, &prefix.id);
898 new_insertions.push(InsertionFragment::insert_new(&prefix));
899 new_ropes.push_fragment(&prefix, prefix.visible);
900 new_fragments.push(prefix, &None);
901 fragment_start = range.start;
902 }
903
904 // Insert the new text before any existing fragments within the range.
905 if !new_text.is_empty() {
906 let new_start = new_fragments.summary().text.visible;
907
908 let fragment = Fragment {
909 id: Locator::between(
910 &new_fragments.summary().max_id,
911 old_fragments
912 .item()
913 .map_or(&Locator::max(), |old_fragment| &old_fragment.id),
914 ),
915 timestamp,
916 insertion_offset,
917 len: new_text.len(),
918 deletions: Default::default(),
919 max_undos: Default::default(),
920 visible: true,
921 };
922 edits_patch.push(Edit {
923 old: fragment_start..fragment_start,
924 new: new_start..new_start + new_text.len(),
925 });
926 insertion_slices.push(InsertionSlice::from_fragment(timestamp, &fragment));
927 new_insertions.push(InsertionFragment::insert_new(&fragment));
928 new_ropes.push_str(new_text.as_ref());
929 new_fragments.push(fragment, &None);
930 insertion_offset += new_text.len();
931 }
932
933 // Advance through every fragment that intersects this range, marking the intersecting
934 // portions as deleted.
935 while fragment_start < range.end {
936 let fragment = old_fragments.item().unwrap();
937 let fragment_end = old_fragments.end().visible;
938 let mut intersection = fragment.clone();
939 let intersection_end = cmp::min(range.end, fragment_end);
940 if fragment.visible {
941 intersection.len = intersection_end - fragment_start;
942 intersection.insertion_offset += fragment_start - old_fragments.start().visible;
943 intersection.id =
944 Locator::between(&new_fragments.summary().max_id, &intersection.id);
945 intersection.deletions.insert(timestamp);
946 intersection.visible = false;
947 }
948 if intersection.len > 0 {
949 if fragment.visible && !intersection.visible {
950 let new_start = new_fragments.summary().text.visible;
951 edits_patch.push(Edit {
952 old: fragment_start..intersection_end,
953 new: new_start..new_start,
954 });
955 insertion_slices
956 .push(InsertionSlice::from_fragment(timestamp, &intersection));
957 }
958 new_insertions.push(InsertionFragment::insert_new(&intersection));
959 new_ropes.push_fragment(&intersection, fragment.visible);
960 new_fragments.push(intersection, &None);
961 fragment_start = intersection_end;
962 }
963 if fragment_end <= range.end {
964 old_fragments.next();
965 }
966 }
967
968 let full_range_end = FullOffset(range.end + old_fragments.start().deleted);
969 edit_op.ranges.push(full_range_start..full_range_end);
970 edit_op.new_text.push(new_text);
971 }
972
973 // If the current fragment has been partially consumed, then consume the rest of it
974 // and advance to the next fragment before slicing.
975 if fragment_start > old_fragments.start().visible {
976 let fragment_end = old_fragments.end().visible;
977 if fragment_end > fragment_start {
978 let mut suffix = old_fragments.item().unwrap().clone();
979 suffix.len = fragment_end - fragment_start;
980 suffix.insertion_offset += fragment_start - old_fragments.start().visible;
981 new_insertions.push(InsertionFragment::insert_new(&suffix));
982 new_ropes.push_fragment(&suffix, suffix.visible);
983 new_fragments.push(suffix, &None);
984 }
985 old_fragments.next();
986 }
987
988 let suffix = old_fragments.suffix();
989 new_ropes.append(suffix.summary().text);
990 new_fragments.append(suffix, &None);
991 let (visible_text, deleted_text) = new_ropes.finish();
992 drop(old_fragments);
993
994 self.snapshot.fragments = new_fragments;
995 self.snapshot.insertions.edit(new_insertions, ());
996 self.snapshot.visible_text = visible_text;
997 self.snapshot.deleted_text = deleted_text;
998 self.subscriptions.publish_mut(&edits_patch);
999 self.snapshot.insertion_slices.extend(insertion_slices);
1000 edit_op
1001 }
1002
1003 pub fn set_line_ending(&mut self, line_ending: LineEnding) {
1004 self.snapshot.line_ending = line_ending;
1005 }
1006
1007 pub fn apply_ops<I: IntoIterator<Item = Operation>>(&mut self, ops: I) {
1008 let mut deferred_ops = Vec::new();
1009 for op in ops {
1010 self.history.push(op.clone());
1011 if self.can_apply_op(&op) {
1012 self.apply_op(op);
1013 } else {
1014 self.deferred_replicas.insert(op.replica_id());
1015 deferred_ops.push(op);
1016 }
1017 }
1018 self.deferred_ops.insert(deferred_ops);
1019 self.flush_deferred_ops();
1020 }
1021
1022 fn apply_op(&mut self, op: Operation) {
1023 match op {
1024 Operation::Edit(edit) => {
1025 if !self.version.observed(edit.timestamp) {
1026 self.apply_remote_edit(
1027 &edit.version,
1028 &edit.ranges,
1029 &edit.new_text,
1030 edit.timestamp,
1031 );
1032 self.snapshot.version.observe(edit.timestamp);
1033 self.lamport_clock.observe(edit.timestamp);
1034 self.resolve_edit(edit.timestamp);
1035 }
1036 }
1037 Operation::Undo(undo) => {
1038 if !self.version.observed(undo.timestamp) {
1039 self.apply_undo(&undo);
1040 self.snapshot.version.observe(undo.timestamp);
1041 self.lamport_clock.observe(undo.timestamp);
1042 }
1043 }
1044 }
1045 self.wait_for_version_txs.retain_mut(|(version, tx)| {
1046 if self.snapshot.version().observed_all(version) {
1047 tx.try_send(()).ok();
1048 false
1049 } else {
1050 true
1051 }
1052 });
1053 }
1054
1055 fn apply_remote_edit(
1056 &mut self,
1057 version: &clock::Global,
1058 ranges: &[Range<FullOffset>],
1059 new_text: &[Arc<str>],
1060 timestamp: clock::Lamport,
1061 ) {
1062 if ranges.is_empty() {
1063 return;
1064 }
1065
1066 let edits = ranges.iter().zip(new_text.iter());
1067 let mut edits_patch = Patch::default();
1068 let mut insertion_slices = Vec::new();
1069 let cx = Some(version.clone());
1070 let mut new_insertions = Vec::new();
1071 let mut insertion_offset = 0;
1072 let mut new_ropes =
1073 RopeBuilder::new(self.visible_text.cursor(0), self.deleted_text.cursor(0));
1074 let mut old_fragments = self
1075 .fragments
1076 .cursor::<Dimensions<VersionedFullOffset, usize>>(&cx);
1077 let mut new_fragments =
1078 old_fragments.slice(&VersionedFullOffset::Offset(ranges[0].start), Bias::Left);
1079 new_ropes.append(new_fragments.summary().text);
1080
1081 let mut fragment_start = old_fragments.start().0.full_offset();
1082 for (range, new_text) in edits {
1083 let fragment_end = old_fragments.end().0.full_offset();
1084
1085 // If the current fragment ends before this range, then jump ahead to the first fragment
1086 // that extends past the start of this range, reusing any intervening fragments.
1087 if fragment_end < range.start {
1088 // If the current fragment has been partially consumed, then consume the rest of it
1089 // and advance to the next fragment before slicing.
1090 if fragment_start > old_fragments.start().0.full_offset() {
1091 if fragment_end > fragment_start {
1092 let mut suffix = old_fragments.item().unwrap().clone();
1093 suffix.len = fragment_end.0 - fragment_start.0;
1094 suffix.insertion_offset +=
1095 fragment_start - old_fragments.start().0.full_offset();
1096 new_insertions.push(InsertionFragment::insert_new(&suffix));
1097 new_ropes.push_fragment(&suffix, suffix.visible);
1098 new_fragments.push(suffix, &None);
1099 }
1100 old_fragments.next();
1101 }
1102
1103 let slice =
1104 old_fragments.slice(&VersionedFullOffset::Offset(range.start), Bias::Left);
1105 new_ropes.append(slice.summary().text);
1106 new_fragments.append(slice, &None);
1107 fragment_start = old_fragments.start().0.full_offset();
1108 }
1109
1110 // If we are at the end of a non-concurrent fragment, advance to the next one.
1111 let fragment_end = old_fragments.end().0.full_offset();
1112 if fragment_end == range.start && fragment_end > fragment_start {
1113 let mut fragment = old_fragments.item().unwrap().clone();
1114 fragment.len = fragment_end.0 - fragment_start.0;
1115 fragment.insertion_offset += fragment_start - old_fragments.start().0.full_offset();
1116 new_insertions.push(InsertionFragment::insert_new(&fragment));
1117 new_ropes.push_fragment(&fragment, fragment.visible);
1118 new_fragments.push(fragment, &None);
1119 old_fragments.next();
1120 fragment_start = old_fragments.start().0.full_offset();
1121 }
1122
1123 // Skip over insertions that are concurrent to this edit, but have a lower lamport
1124 // timestamp.
1125 while let Some(fragment) = old_fragments.item() {
1126 if fragment_start == range.start && fragment.timestamp > timestamp {
1127 new_ropes.push_fragment(fragment, fragment.visible);
1128 new_fragments.push(fragment.clone(), &None);
1129 old_fragments.next();
1130 debug_assert_eq!(fragment_start, range.start);
1131 } else {
1132 break;
1133 }
1134 }
1135 debug_assert!(fragment_start <= range.start);
1136
1137 // Preserve any portion of the current fragment that precedes this range.
1138 if fragment_start < range.start {
1139 let mut prefix = old_fragments.item().unwrap().clone();
1140 prefix.len = range.start.0 - fragment_start.0;
1141 prefix.insertion_offset += fragment_start - old_fragments.start().0.full_offset();
1142 prefix.id = Locator::between(&new_fragments.summary().max_id, &prefix.id);
1143 new_insertions.push(InsertionFragment::insert_new(&prefix));
1144 fragment_start = range.start;
1145 new_ropes.push_fragment(&prefix, prefix.visible);
1146 new_fragments.push(prefix, &None);
1147 }
1148
1149 // Insert the new text before any existing fragments within the range.
1150 if !new_text.is_empty() {
1151 let mut old_start = old_fragments.start().1;
1152 if old_fragments.item().is_some_and(|f| f.visible) {
1153 old_start += fragment_start.0 - old_fragments.start().0.full_offset().0;
1154 }
1155 let new_start = new_fragments.summary().text.visible;
1156 let fragment = Fragment {
1157 id: Locator::between(
1158 &new_fragments.summary().max_id,
1159 old_fragments
1160 .item()
1161 .map_or(&Locator::max(), |old_fragment| &old_fragment.id),
1162 ),
1163 timestamp,
1164 insertion_offset,
1165 len: new_text.len(),
1166 deletions: Default::default(),
1167 max_undos: Default::default(),
1168 visible: true,
1169 };
1170 edits_patch.push(Edit {
1171 old: old_start..old_start,
1172 new: new_start..new_start + new_text.len(),
1173 });
1174 insertion_slices.push(InsertionSlice::from_fragment(timestamp, &fragment));
1175 new_insertions.push(InsertionFragment::insert_new(&fragment));
1176 new_ropes.push_str(new_text);
1177 new_fragments.push(fragment, &None);
1178 insertion_offset += new_text.len();
1179 }
1180
1181 // Advance through every fragment that intersects this range, marking the intersecting
1182 // portions as deleted.
1183 while fragment_start < range.end {
1184 let fragment = old_fragments.item().unwrap();
1185 let fragment_end = old_fragments.end().0.full_offset();
1186 let mut intersection = fragment.clone();
1187 let intersection_end = cmp::min(range.end, fragment_end);
1188 if fragment.was_visible(version, &self.undo_map) {
1189 intersection.len = intersection_end.0 - fragment_start.0;
1190 intersection.insertion_offset +=
1191 fragment_start - old_fragments.start().0.full_offset();
1192 intersection.id =
1193 Locator::between(&new_fragments.summary().max_id, &intersection.id);
1194 intersection.deletions.insert(timestamp);
1195 intersection.visible = false;
1196 insertion_slices.push(InsertionSlice::from_fragment(timestamp, &intersection));
1197 }
1198 if intersection.len > 0 {
1199 if fragment.visible && !intersection.visible {
1200 let old_start = old_fragments.start().1
1201 + (fragment_start.0 - old_fragments.start().0.full_offset().0);
1202 let new_start = new_fragments.summary().text.visible;
1203 edits_patch.push(Edit {
1204 old: old_start..old_start + intersection.len,
1205 new: new_start..new_start,
1206 });
1207 }
1208 new_insertions.push(InsertionFragment::insert_new(&intersection));
1209 new_ropes.push_fragment(&intersection, fragment.visible);
1210 new_fragments.push(intersection, &None);
1211 fragment_start = intersection_end;
1212 }
1213 if fragment_end <= range.end {
1214 old_fragments.next();
1215 }
1216 }
1217 }
1218
1219 // If the current fragment has been partially consumed, then consume the rest of it
1220 // and advance to the next fragment before slicing.
1221 if fragment_start > old_fragments.start().0.full_offset() {
1222 let fragment_end = old_fragments.end().0.full_offset();
1223 if fragment_end > fragment_start {
1224 let mut suffix = old_fragments.item().unwrap().clone();
1225 suffix.len = fragment_end.0 - fragment_start.0;
1226 suffix.insertion_offset += fragment_start - old_fragments.start().0.full_offset();
1227 new_insertions.push(InsertionFragment::insert_new(&suffix));
1228 new_ropes.push_fragment(&suffix, suffix.visible);
1229 new_fragments.push(suffix, &None);
1230 }
1231 old_fragments.next();
1232 }
1233
1234 let suffix = old_fragments.suffix();
1235 new_ropes.append(suffix.summary().text);
1236 new_fragments.append(suffix, &None);
1237 let (visible_text, deleted_text) = new_ropes.finish();
1238 drop(old_fragments);
1239
1240 self.snapshot.fragments = new_fragments;
1241 self.snapshot.visible_text = visible_text;
1242 self.snapshot.deleted_text = deleted_text;
1243 self.snapshot.insertions.edit(new_insertions, ());
1244 self.snapshot.insertion_slices.extend(insertion_slices);
1245 self.subscriptions.publish_mut(&edits_patch)
1246 }
1247
1248 fn fragment_ids_for_edits<'a>(
1249 &'a self,
1250 edit_ids: impl Iterator<Item = &'a clock::Lamport>,
1251 ) -> Vec<&'a Locator> {
1252 // Get all of the insertion slices changed by the given edits.
1253 let mut insertion_slices = Vec::new();
1254 for edit_id in edit_ids {
1255 let insertion_slice = InsertionSlice {
1256 edit_id: *edit_id,
1257 insertion_id: clock::Lamport::default(),
1258 range: 0..0,
1259 };
1260 let slices = self
1261 .snapshot
1262 .insertion_slices
1263 .iter_from(&insertion_slice)
1264 .take_while(|slice| slice.edit_id == *edit_id);
1265 insertion_slices.extend(slices)
1266 }
1267 insertion_slices
1268 .sort_unstable_by_key(|s| (s.insertion_id, s.range.start, Reverse(s.range.end)));
1269
1270 // Get all of the fragments corresponding to these insertion slices.
1271 let mut fragment_ids = Vec::new();
1272 let mut insertions_cursor = self.insertions.cursor::<InsertionFragmentKey>(());
1273 for insertion_slice in &insertion_slices {
1274 if insertion_slice.insertion_id != insertions_cursor.start().timestamp
1275 || insertion_slice.range.start > insertions_cursor.start().split_offset
1276 {
1277 insertions_cursor.seek_forward(
1278 &InsertionFragmentKey {
1279 timestamp: insertion_slice.insertion_id,
1280 split_offset: insertion_slice.range.start,
1281 },
1282 Bias::Left,
1283 );
1284 }
1285 while let Some(item) = insertions_cursor.item() {
1286 if item.timestamp != insertion_slice.insertion_id
1287 || item.split_offset >= insertion_slice.range.end
1288 {
1289 break;
1290 }
1291 fragment_ids.push(&item.fragment_id);
1292 insertions_cursor.next();
1293 }
1294 }
1295 fragment_ids.sort_unstable();
1296 fragment_ids
1297 }
1298
1299 fn apply_undo(&mut self, undo: &UndoOperation) {
1300 self.snapshot.undo_map.insert(undo);
1301
1302 let mut edits = Patch::default();
1303 let mut old_fragments = self
1304 .fragments
1305 .cursor::<Dimensions<Option<&Locator>, usize>>(&None);
1306 let mut new_fragments = SumTree::new(&None);
1307 let mut new_ropes =
1308 RopeBuilder::new(self.visible_text.cursor(0), self.deleted_text.cursor(0));
1309
1310 for fragment_id in self.fragment_ids_for_edits(undo.counts.keys()) {
1311 let preceding_fragments = old_fragments.slice(&Some(fragment_id), Bias::Left);
1312 new_ropes.append(preceding_fragments.summary().text);
1313 new_fragments.append(preceding_fragments, &None);
1314
1315 if let Some(fragment) = old_fragments.item() {
1316 let mut fragment = fragment.clone();
1317 let fragment_was_visible = fragment.visible;
1318
1319 fragment.visible = fragment.is_visible(&self.undo_map);
1320 fragment.max_undos.observe(undo.timestamp);
1321
1322 let old_start = old_fragments.start().1;
1323 let new_start = new_fragments.summary().text.visible;
1324 if fragment_was_visible && !fragment.visible {
1325 edits.push(Edit {
1326 old: old_start..old_start + fragment.len,
1327 new: new_start..new_start,
1328 });
1329 } else if !fragment_was_visible && fragment.visible {
1330 edits.push(Edit {
1331 old: old_start..old_start,
1332 new: new_start..new_start + fragment.len,
1333 });
1334 }
1335 new_ropes.push_fragment(&fragment, fragment_was_visible);
1336 new_fragments.push(fragment, &None);
1337
1338 old_fragments.next();
1339 }
1340 }
1341
1342 let suffix = old_fragments.suffix();
1343 new_ropes.append(suffix.summary().text);
1344 new_fragments.append(suffix, &None);
1345
1346 drop(old_fragments);
1347 let (visible_text, deleted_text) = new_ropes.finish();
1348 self.snapshot.fragments = new_fragments;
1349 self.snapshot.visible_text = visible_text;
1350 self.snapshot.deleted_text = deleted_text;
1351 self.subscriptions.publish_mut(&edits);
1352 }
1353
1354 fn flush_deferred_ops(&mut self) {
1355 self.deferred_replicas.clear();
1356 let mut deferred_ops = Vec::new();
1357 for op in self.deferred_ops.drain().iter().cloned() {
1358 if self.can_apply_op(&op) {
1359 self.apply_op(op);
1360 } else {
1361 self.deferred_replicas.insert(op.replica_id());
1362 deferred_ops.push(op);
1363 }
1364 }
1365 self.deferred_ops.insert(deferred_ops);
1366 }
1367
1368 fn can_apply_op(&self, op: &Operation) -> bool {
1369 if self.deferred_replicas.contains(&op.replica_id()) {
1370 false
1371 } else {
1372 self.version.observed_all(match op {
1373 Operation::Edit(edit) => &edit.version,
1374 Operation::Undo(undo) => &undo.version,
1375 })
1376 }
1377 }
1378
1379 pub fn has_deferred_ops(&self) -> bool {
1380 !self.deferred_ops.is_empty()
1381 }
1382
1383 pub fn peek_undo_stack(&self) -> Option<&HistoryEntry> {
1384 self.history.undo_stack.last()
1385 }
1386
1387 pub fn peek_redo_stack(&self) -> Option<&HistoryEntry> {
1388 self.history.redo_stack.last()
1389 }
1390
1391 pub fn start_transaction(&mut self) -> Option<TransactionId> {
1392 self.start_transaction_at(Instant::now())
1393 }
1394
1395 pub fn start_transaction_at(&mut self, now: Instant) -> Option<TransactionId> {
1396 self.history
1397 .start_transaction(self.version.clone(), now, &mut self.lamport_clock)
1398 }
1399
1400 pub fn end_transaction(&mut self) -> Option<(TransactionId, clock::Global)> {
1401 self.end_transaction_at(Instant::now())
1402 }
1403
1404 pub fn end_transaction_at(&mut self, now: Instant) -> Option<(TransactionId, clock::Global)> {
1405 if let Some(entry) = self.history.end_transaction(now) {
1406 let since = entry.transaction.start.clone();
1407 let id = self.history.group().unwrap();
1408 Some((id, since))
1409 } else {
1410 None
1411 }
1412 }
1413
1414 pub fn finalize_last_transaction(&mut self) -> Option<&Transaction> {
1415 self.history.finalize_last_transaction()
1416 }
1417
1418 pub fn group_until_transaction(&mut self, transaction_id: TransactionId) {
1419 self.history.group_until(transaction_id);
1420 }
1421
1422 pub fn base_text(&self) -> &Rope {
1423 &self.history.base_text
1424 }
1425
1426 pub fn operations(&self) -> &TreeMap<clock::Lamport, Operation> {
1427 &self.history.operations
1428 }
1429
1430 pub fn undo(&mut self) -> Option<(TransactionId, Operation)> {
1431 if let Some(entry) = self.history.pop_undo() {
1432 let transaction = entry.transaction.clone();
1433 let transaction_id = transaction.id;
1434 let op = self.undo_or_redo(transaction);
1435 Some((transaction_id, op))
1436 } else {
1437 None
1438 }
1439 }
1440
1441 pub fn undo_transaction(&mut self, transaction_id: TransactionId) -> Option<Operation> {
1442 let transaction = self
1443 .history
1444 .remove_from_undo(transaction_id)?
1445 .transaction
1446 .clone();
1447 Some(self.undo_or_redo(transaction))
1448 }
1449
1450 pub fn undo_to_transaction(&mut self, transaction_id: TransactionId) -> Vec<Operation> {
1451 let transactions = self
1452 .history
1453 .remove_from_undo_until(transaction_id)
1454 .iter()
1455 .map(|entry| entry.transaction.clone())
1456 .collect::<Vec<_>>();
1457
1458 transactions
1459 .into_iter()
1460 .map(|transaction| self.undo_or_redo(transaction))
1461 .collect()
1462 }
1463
1464 pub fn forget_transaction(&mut self, transaction_id: TransactionId) -> Option<Transaction> {
1465 self.history.forget(transaction_id)
1466 }
1467
1468 pub fn get_transaction(&self, transaction_id: TransactionId) -> Option<&Transaction> {
1469 self.history.transaction(transaction_id)
1470 }
1471
1472 pub fn merge_transactions(&mut self, transaction: TransactionId, destination: TransactionId) {
1473 self.history.merge_transactions(transaction, destination);
1474 }
1475
1476 pub fn redo(&mut self) -> Option<(TransactionId, Operation)> {
1477 if let Some(entry) = self.history.pop_redo() {
1478 let transaction = entry.transaction.clone();
1479 let transaction_id = transaction.id;
1480 let op = self.undo_or_redo(transaction);
1481 Some((transaction_id, op))
1482 } else {
1483 None
1484 }
1485 }
1486
1487 pub fn redo_to_transaction(&mut self, transaction_id: TransactionId) -> Vec<Operation> {
1488 let transactions = self
1489 .history
1490 .remove_from_redo(transaction_id)
1491 .iter()
1492 .map(|entry| entry.transaction.clone())
1493 .collect::<Vec<_>>();
1494
1495 transactions
1496 .into_iter()
1497 .map(|transaction| self.undo_or_redo(transaction))
1498 .collect()
1499 }
1500
1501 fn undo_or_redo(&mut self, transaction: Transaction) -> Operation {
1502 let mut counts = HashMap::default();
1503 for edit_id in transaction.edit_ids {
1504 counts.insert(edit_id, self.undo_map.undo_count(edit_id).saturating_add(1));
1505 }
1506
1507 let operation = self.undo_operations(counts);
1508 self.history.push(operation.clone());
1509 operation
1510 }
1511
1512 pub fn undo_operations(&mut self, counts: HashMap<clock::Lamport, u32>) -> Operation {
1513 let timestamp = self.lamport_clock.tick();
1514 let version = self.version();
1515 self.snapshot.version.observe(timestamp);
1516 let undo = UndoOperation {
1517 timestamp,
1518 version,
1519 counts,
1520 };
1521 self.apply_undo(&undo);
1522 Operation::Undo(undo)
1523 }
1524
1525 pub fn push_transaction(&mut self, transaction: Transaction, now: Instant) {
1526 self.history.push_transaction(transaction, now);
1527 }
1528
1529 /// Differs from `push_transaction` in that it does not clear the redo stack.
1530 /// The caller responsible for
1531 /// Differs from `push_transaction` in that it does not clear the redo
1532 /// stack. Intended to be used to create a parent transaction to merge
1533 /// potential child transactions into.
1534 ///
1535 /// The caller is responsible for removing it from the undo history using
1536 /// `forget_transaction` if no edits are merged into it. Otherwise, if edits
1537 /// are merged into this transaction, the caller is responsible for ensuring
1538 /// the redo stack is cleared. The easiest way to ensure the redo stack is
1539 /// cleared is to create transactions with the usual `start_transaction` and
1540 /// `end_transaction` methods and merging the resulting transactions into
1541 /// the transaction created by this method
1542 pub fn push_empty_transaction(&mut self, now: Instant) -> TransactionId {
1543 self.history
1544 .push_empty_transaction(self.version.clone(), now, &mut self.lamport_clock)
1545 }
1546
1547 pub fn edited_ranges_for_transaction_id<D>(
1548 &self,
1549 transaction_id: TransactionId,
1550 ) -> impl '_ + Iterator<Item = Range<D>>
1551 where
1552 D: TextDimension,
1553 {
1554 self.history
1555 .transaction(transaction_id)
1556 .into_iter()
1557 .flat_map(|transaction| self.edited_ranges_for_transaction(transaction))
1558 }
1559
1560 pub fn edited_ranges_for_edit_ids<'a, D>(
1561 &'a self,
1562 edit_ids: impl IntoIterator<Item = &'a clock::Lamport>,
1563 ) -> impl 'a + Iterator<Item = Range<D>>
1564 where
1565 D: TextDimension,
1566 {
1567 // get fragment ranges
1568 let mut cursor = self
1569 .fragments
1570 .cursor::<Dimensions<Option<&Locator>, usize>>(&None);
1571 let offset_ranges = self
1572 .fragment_ids_for_edits(edit_ids.into_iter())
1573 .into_iter()
1574 .filter_map(move |fragment_id| {
1575 cursor.seek_forward(&Some(fragment_id), Bias::Left);
1576 let fragment = cursor.item()?;
1577 let start_offset = cursor.start().1;
1578 let end_offset = start_offset + if fragment.visible { fragment.len } else { 0 };
1579 Some(start_offset..end_offset)
1580 });
1581
1582 // combine adjacent ranges
1583 let mut prev_range: Option<Range<usize>> = None;
1584 let disjoint_ranges = offset_ranges
1585 .map(Some)
1586 .chain([None])
1587 .filter_map(move |range| {
1588 if let Some((range, prev_range)) = range.as_ref().zip(prev_range.as_mut())
1589 && prev_range.end == range.start
1590 {
1591 prev_range.end = range.end;
1592 return None;
1593 }
1594 let result = prev_range.clone();
1595 prev_range = range;
1596 result
1597 });
1598
1599 // convert to the desired text dimension.
1600 let mut position = D::zero(());
1601 let mut rope_cursor = self.visible_text.cursor(0);
1602 disjoint_ranges.map(move |range| {
1603 position.add_assign(&rope_cursor.summary(range.start));
1604 let start = position;
1605 position.add_assign(&rope_cursor.summary(range.end));
1606 let end = position;
1607 start..end
1608 })
1609 }
1610
1611 pub fn edited_ranges_for_transaction<'a, D>(
1612 &'a self,
1613 transaction: &'a Transaction,
1614 ) -> impl 'a + Iterator<Item = Range<D>>
1615 where
1616 D: TextDimension,
1617 {
1618 self.edited_ranges_for_edit_ids(&transaction.edit_ids)
1619 }
1620
1621 pub fn subscribe(&mut self) -> Subscription {
1622 self.subscriptions.subscribe()
1623 }
1624
1625 pub fn wait_for_edits<It: IntoIterator<Item = clock::Lamport>>(
1626 &mut self,
1627 edit_ids: It,
1628 ) -> impl 'static + Future<Output = Result<()>> + use<It> {
1629 let mut futures = Vec::new();
1630 for edit_id in edit_ids {
1631 if !self.version.observed(edit_id) {
1632 let (tx, rx) = oneshot::channel();
1633 self.edit_id_resolvers.entry(edit_id).or_default().push(tx);
1634 futures.push(rx);
1635 }
1636 }
1637
1638 async move {
1639 for mut future in futures {
1640 if future.recv().await.is_none() {
1641 anyhow::bail!("gave up waiting for edits");
1642 }
1643 }
1644 Ok(())
1645 }
1646 }
1647
1648 pub fn wait_for_anchors<It: IntoIterator<Item = Anchor>>(
1649 &mut self,
1650 anchors: It,
1651 ) -> impl 'static + Future<Output = Result<()>> + use<It> {
1652 let mut futures = Vec::new();
1653 for anchor in anchors {
1654 if !self.version.observed(anchor.timestamp)
1655 && anchor != Anchor::MAX
1656 && anchor != Anchor::MIN
1657 {
1658 let (tx, rx) = oneshot::channel();
1659 self.edit_id_resolvers
1660 .entry(anchor.timestamp)
1661 .or_default()
1662 .push(tx);
1663 futures.push(rx);
1664 }
1665 }
1666
1667 async move {
1668 for mut future in futures {
1669 if future.recv().await.is_none() {
1670 anyhow::bail!("gave up waiting for anchors");
1671 }
1672 }
1673 Ok(())
1674 }
1675 }
1676
1677 pub fn wait_for_version(
1678 &mut self,
1679 version: clock::Global,
1680 ) -> impl Future<Output = Result<()>> + use<> {
1681 let mut rx = None;
1682 if !self.snapshot.version.observed_all(&version) {
1683 let channel = oneshot::channel();
1684 self.wait_for_version_txs.push((version, channel.0));
1685 rx = Some(channel.1);
1686 }
1687 async move {
1688 if let Some(mut rx) = rx
1689 && rx.recv().await.is_none()
1690 {
1691 anyhow::bail!("gave up waiting for version");
1692 }
1693 Ok(())
1694 }
1695 }
1696
1697 pub fn give_up_waiting(&mut self) {
1698 self.edit_id_resolvers.clear();
1699 self.wait_for_version_txs.clear();
1700 }
1701
1702 fn resolve_edit(&mut self, edit_id: clock::Lamport) {
1703 for mut tx in self
1704 .edit_id_resolvers
1705 .remove(&edit_id)
1706 .into_iter()
1707 .flatten()
1708 {
1709 tx.try_send(()).ok();
1710 }
1711 }
1712}
1713
1714#[cfg(any(test, feature = "test-support"))]
1715impl Buffer {
1716 #[track_caller]
1717 pub fn edit_via_marked_text(&mut self, marked_string: &str) {
1718 let edits = self.edits_for_marked_text(marked_string);
1719 self.edit(edits);
1720 }
1721
1722 #[track_caller]
1723 pub fn edits_for_marked_text(&self, marked_string: &str) -> Vec<(Range<usize>, String)> {
1724 let old_text = self.text();
1725 let (new_text, mut ranges) = util::test::marked_text_ranges(marked_string, false);
1726 if ranges.is_empty() {
1727 ranges.push(0..new_text.len());
1728 }
1729
1730 assert_eq!(
1731 old_text[..ranges[0].start],
1732 new_text[..ranges[0].start],
1733 "invalid edit"
1734 );
1735
1736 let mut delta = 0;
1737 let mut edits = Vec::new();
1738 let mut ranges = ranges.into_iter().peekable();
1739
1740 while let Some(inserted_range) = ranges.next() {
1741 let new_start = inserted_range.start;
1742 let old_start = (new_start as isize - delta) as usize;
1743
1744 let following_text = if let Some(next_range) = ranges.peek() {
1745 &new_text[inserted_range.end..next_range.start]
1746 } else {
1747 &new_text[inserted_range.end..]
1748 };
1749
1750 let inserted_len = inserted_range.len();
1751 let deleted_len = old_text[old_start..]
1752 .find(following_text)
1753 .expect("invalid edit");
1754
1755 let old_range = old_start..old_start + deleted_len;
1756 edits.push((old_range, new_text[inserted_range].to_string()));
1757 delta += inserted_len as isize - deleted_len as isize;
1758 }
1759
1760 assert_eq!(
1761 old_text.len() as isize + delta,
1762 new_text.len() as isize,
1763 "invalid edit"
1764 );
1765
1766 edits
1767 }
1768
1769 pub fn check_invariants(&self) {
1770 // Ensure every fragment is ordered by locator in the fragment tree and corresponds
1771 // to an insertion fragment in the insertions tree.
1772 let mut prev_fragment_id = Locator::min();
1773 for fragment in self.snapshot.fragments.items(&None) {
1774 assert!(fragment.id > prev_fragment_id);
1775 prev_fragment_id = fragment.id.clone();
1776
1777 let insertion_fragment = self
1778 .snapshot
1779 .insertions
1780 .get(
1781 &InsertionFragmentKey {
1782 timestamp: fragment.timestamp,
1783 split_offset: fragment.insertion_offset,
1784 },
1785 (),
1786 )
1787 .unwrap();
1788 assert_eq!(
1789 insertion_fragment.fragment_id, fragment.id,
1790 "fragment: {:?}\ninsertion: {:?}",
1791 fragment, insertion_fragment
1792 );
1793 }
1794
1795 let mut cursor = self.snapshot.fragments.cursor::<Option<&Locator>>(&None);
1796 for insertion_fragment in self.snapshot.insertions.cursor::<()>(()) {
1797 cursor.seek(&Some(&insertion_fragment.fragment_id), Bias::Left);
1798 let fragment = cursor.item().unwrap();
1799 assert_eq!(insertion_fragment.fragment_id, fragment.id);
1800 assert_eq!(insertion_fragment.split_offset, fragment.insertion_offset);
1801 }
1802
1803 let fragment_summary = self.snapshot.fragments.summary();
1804 assert_eq!(
1805 fragment_summary.text.visible,
1806 self.snapshot.visible_text.len()
1807 );
1808 assert_eq!(
1809 fragment_summary.text.deleted,
1810 self.snapshot.deleted_text.len()
1811 );
1812
1813 assert!(!self.text().contains("\r\n"));
1814 }
1815
1816 pub fn set_group_interval(&mut self, group_interval: Duration) {
1817 self.history.group_interval = group_interval;
1818 }
1819
1820 pub fn random_byte_range(&self, start_offset: usize, rng: &mut impl rand::Rng) -> Range<usize> {
1821 let end = self.clip_offset(rng.random_range(start_offset..=self.len()), Bias::Right);
1822 let start = self.clip_offset(rng.random_range(start_offset..=end), Bias::Right);
1823 start..end
1824 }
1825
1826 pub fn get_random_edits<T>(
1827 &self,
1828 rng: &mut T,
1829 edit_count: usize,
1830 ) -> Vec<(Range<usize>, Arc<str>)>
1831 where
1832 T: rand::Rng,
1833 {
1834 let mut edits: Vec<(Range<usize>, Arc<str>)> = Vec::new();
1835 let mut last_end = None;
1836 for _ in 0..edit_count {
1837 if last_end.is_some_and(|last_end| last_end >= self.len()) {
1838 break;
1839 }
1840 let new_start = last_end.map_or(0, |last_end| last_end + 1);
1841 let range = self.random_byte_range(new_start, rng);
1842 last_end = Some(range.end);
1843
1844 let new_text_len = rng.random_range(0..10);
1845 let new_text: String = RandomCharIter::new(&mut *rng).take(new_text_len).collect();
1846
1847 edits.push((range, new_text.into()));
1848 }
1849 edits
1850 }
1851
1852 pub fn randomly_edit<T>(
1853 &mut self,
1854 rng: &mut T,
1855 edit_count: usize,
1856 ) -> (Vec<(Range<usize>, Arc<str>)>, Operation)
1857 where
1858 T: rand::Rng,
1859 {
1860 let mut edits = self.get_random_edits(rng, edit_count);
1861 log::info!("mutating buffer {} with {:?}", self.replica_id, edits);
1862
1863 let op = self.edit(edits.iter().cloned());
1864 if let Operation::Edit(edit) = &op {
1865 assert_eq!(edits.len(), edit.new_text.len());
1866 for (edit, new_text) in edits.iter_mut().zip(&edit.new_text) {
1867 edit.1 = new_text.clone();
1868 }
1869 } else {
1870 unreachable!()
1871 }
1872
1873 (edits, op)
1874 }
1875
1876 pub fn randomly_undo_redo(&mut self, rng: &mut impl rand::Rng) -> Vec<Operation> {
1877 use rand::prelude::*;
1878
1879 let mut ops = Vec::new();
1880 for _ in 0..rng.random_range(1..=5) {
1881 if let Some(entry) = self.history.undo_stack.choose(rng) {
1882 let transaction = entry.transaction.clone();
1883 log::info!(
1884 "undoing buffer {} transaction {:?}",
1885 self.replica_id,
1886 transaction
1887 );
1888 ops.push(self.undo_or_redo(transaction));
1889 }
1890 }
1891 ops
1892 }
1893}
1894
1895impl Deref for Buffer {
1896 type Target = BufferSnapshot;
1897
1898 fn deref(&self) -> &Self::Target {
1899 &self.snapshot
1900 }
1901}
1902
1903impl BufferSnapshot {
1904 pub fn as_rope(&self) -> &Rope {
1905 &self.visible_text
1906 }
1907
1908 pub fn rope_for_version(&self, version: &clock::Global) -> Rope {
1909 let mut rope = Rope::new();
1910
1911 let mut cursor = self
1912 .fragments
1913 .filter::<_, FragmentTextSummary>(&None, move |summary| {
1914 !version.observed_all(&summary.max_version)
1915 });
1916 cursor.next();
1917
1918 let mut visible_cursor = self.visible_text.cursor(0);
1919 let mut deleted_cursor = self.deleted_text.cursor(0);
1920
1921 while let Some(fragment) = cursor.item() {
1922 if cursor.start().visible > visible_cursor.offset() {
1923 let text = visible_cursor.slice(cursor.start().visible);
1924 rope.append(text);
1925 }
1926
1927 if fragment.was_visible(version, &self.undo_map) {
1928 if fragment.visible {
1929 let text = visible_cursor.slice(cursor.end().visible);
1930 rope.append(text);
1931 } else {
1932 deleted_cursor.seek_forward(cursor.start().deleted);
1933 let text = deleted_cursor.slice(cursor.end().deleted);
1934 rope.append(text);
1935 }
1936 } else if fragment.visible {
1937 visible_cursor.seek_forward(cursor.end().visible);
1938 }
1939
1940 cursor.next();
1941 }
1942
1943 if cursor.start().visible > visible_cursor.offset() {
1944 let text = visible_cursor.slice(cursor.start().visible);
1945 rope.append(text);
1946 }
1947
1948 rope
1949 }
1950
1951 pub fn remote_id(&self) -> BufferId {
1952 self.remote_id
1953 }
1954
1955 pub fn replica_id(&self) -> ReplicaId {
1956 self.replica_id
1957 }
1958
1959 pub fn row_count(&self) -> u32 {
1960 self.max_point().row + 1
1961 }
1962
1963 pub fn len(&self) -> usize {
1964 self.visible_text.len()
1965 }
1966
1967 pub fn is_empty(&self) -> bool {
1968 self.len() == 0
1969 }
1970
1971 pub fn chars(&self) -> impl Iterator<Item = char> + '_ {
1972 self.chars_at(0)
1973 }
1974
1975 pub fn chars_for_range<T: ToOffset>(&self, range: Range<T>) -> impl Iterator<Item = char> + '_ {
1976 self.text_for_range(range).flat_map(str::chars)
1977 }
1978
1979 pub fn reversed_chars_for_range<T: ToOffset>(
1980 &self,
1981 range: Range<T>,
1982 ) -> impl Iterator<Item = char> + '_ {
1983 self.reversed_chunks_in_range(range)
1984 .flat_map(|chunk| chunk.chars().rev())
1985 }
1986
1987 pub fn contains_str_at<T>(&self, position: T, needle: &str) -> bool
1988 where
1989 T: ToOffset,
1990 {
1991 let position = position.to_offset(self);
1992 position == self.clip_offset(position, Bias::Left)
1993 && self
1994 .bytes_in_range(position..self.len())
1995 .flatten()
1996 .copied()
1997 .take(needle.len())
1998 .eq(needle.bytes())
1999 }
2000
2001 pub fn common_prefix_at<T>(&self, position: T, needle: &str) -> Range<T>
2002 where
2003 T: ToOffset + TextDimension,
2004 {
2005 let offset = position.to_offset(self);
2006 let common_prefix_len = needle
2007 .char_indices()
2008 .map(|(index, _)| index)
2009 .chain([needle.len()])
2010 .take_while(|&len| len <= offset)
2011 .filter(|&len| {
2012 let left = self
2013 .chars_for_range(offset - len..offset)
2014 .flat_map(char::to_lowercase);
2015 let right = needle[..len].chars().flat_map(char::to_lowercase);
2016 left.eq(right)
2017 })
2018 .last()
2019 .unwrap_or(0);
2020 let start_offset = offset - common_prefix_len;
2021 let start = self.text_summary_for_range(0..start_offset);
2022 start..position
2023 }
2024
2025 pub fn text(&self) -> String {
2026 self.visible_text.to_string()
2027 }
2028
2029 pub fn line_ending(&self) -> LineEnding {
2030 self.line_ending
2031 }
2032
2033 pub fn deleted_text(&self) -> String {
2034 self.deleted_text.to_string()
2035 }
2036
2037 pub fn fragments(&self) -> impl Iterator<Item = &Fragment> {
2038 self.fragments.iter()
2039 }
2040
2041 pub fn text_summary(&self) -> TextSummary {
2042 self.visible_text.summary()
2043 }
2044
2045 pub fn max_point(&self) -> Point {
2046 self.visible_text.max_point()
2047 }
2048
2049 pub fn max_point_utf16(&self) -> PointUtf16 {
2050 self.visible_text.max_point_utf16()
2051 }
2052
2053 pub fn point_to_offset(&self, point: Point) -> usize {
2054 self.visible_text.point_to_offset(point)
2055 }
2056
2057 pub fn point_utf16_to_offset(&self, point: PointUtf16) -> usize {
2058 self.visible_text.point_utf16_to_offset(point)
2059 }
2060
2061 pub fn unclipped_point_utf16_to_offset(&self, point: Unclipped<PointUtf16>) -> usize {
2062 self.visible_text.unclipped_point_utf16_to_offset(point)
2063 }
2064
2065 pub fn unclipped_point_utf16_to_point(&self, point: Unclipped<PointUtf16>) -> Point {
2066 self.visible_text.unclipped_point_utf16_to_point(point)
2067 }
2068
2069 pub fn offset_utf16_to_offset(&self, offset: OffsetUtf16) -> usize {
2070 self.visible_text.offset_utf16_to_offset(offset)
2071 }
2072
2073 pub fn offset_to_offset_utf16(&self, offset: usize) -> OffsetUtf16 {
2074 self.visible_text.offset_to_offset_utf16(offset)
2075 }
2076
2077 pub fn offset_to_point(&self, offset: usize) -> Point {
2078 self.visible_text.offset_to_point(offset)
2079 }
2080
2081 pub fn offset_to_point_utf16(&self, offset: usize) -> PointUtf16 {
2082 self.visible_text.offset_to_point_utf16(offset)
2083 }
2084
2085 pub fn point_to_point_utf16(&self, point: Point) -> PointUtf16 {
2086 self.visible_text.point_to_point_utf16(point)
2087 }
2088
2089 pub fn version(&self) -> &clock::Global {
2090 &self.version
2091 }
2092
2093 pub fn chars_at<T: ToOffset>(&self, position: T) -> impl Iterator<Item = char> + '_ {
2094 let offset = position.to_offset(self);
2095 self.visible_text.chars_at(offset)
2096 }
2097
2098 pub fn reversed_chars_at<T: ToOffset>(&self, position: T) -> impl Iterator<Item = char> + '_ {
2099 let offset = position.to_offset(self);
2100 self.visible_text.reversed_chars_at(offset)
2101 }
2102
2103 pub fn reversed_chunks_in_range<T: ToOffset>(&self, range: Range<T>) -> rope::Chunks<'_> {
2104 let range = range.start.to_offset(self)..range.end.to_offset(self);
2105 self.visible_text.reversed_chunks_in_range(range)
2106 }
2107
2108 pub fn bytes_in_range<T: ToOffset>(&self, range: Range<T>) -> rope::Bytes<'_> {
2109 let start = range.start.to_offset(self);
2110 let end = range.end.to_offset(self);
2111 self.visible_text.bytes_in_range(start..end)
2112 }
2113
2114 pub fn reversed_bytes_in_range<T: ToOffset>(&self, range: Range<T>) -> rope::Bytes<'_> {
2115 let start = range.start.to_offset(self);
2116 let end = range.end.to_offset(self);
2117 self.visible_text.reversed_bytes_in_range(start..end)
2118 }
2119
2120 pub fn text_for_range<T: ToOffset>(&self, range: Range<T>) -> Chunks<'_> {
2121 let start = range.start.to_offset(self);
2122 let end = range.end.to_offset(self);
2123 self.visible_text.chunks_in_range(start..end)
2124 }
2125
2126 pub fn line_len(&self, row: u32) -> u32 {
2127 let row_start_offset = Point::new(row, 0).to_offset(self);
2128 let row_end_offset = if row >= self.max_point().row {
2129 self.len()
2130 } else {
2131 Point::new(row + 1, 0).to_previous_offset(self)
2132 };
2133 (row_end_offset - row_start_offset) as u32
2134 }
2135
2136 pub fn line_indents_in_row_range(
2137 &self,
2138 row_range: Range<u32>,
2139 ) -> impl Iterator<Item = (u32, LineIndent)> + '_ {
2140 let start = Point::new(row_range.start, 0).to_offset(self);
2141 let end = Point::new(row_range.end, self.line_len(row_range.end)).to_offset(self);
2142
2143 let mut chunks = self.as_rope().chunks_in_range(start..end);
2144 let mut row = row_range.start;
2145 let mut done = false;
2146 std::iter::from_fn(move || {
2147 if done {
2148 None
2149 } else {
2150 let indent = (row, LineIndent::from_chunks(&mut chunks));
2151 done = !chunks.next_line();
2152 row += 1;
2153 Some(indent)
2154 }
2155 })
2156 }
2157
2158 /// Returns the line indents in the given row range, exclusive of end row, in reversed order.
2159 pub fn reversed_line_indents_in_row_range(
2160 &self,
2161 row_range: Range<u32>,
2162 ) -> impl Iterator<Item = (u32, LineIndent)> + '_ {
2163 let start = Point::new(row_range.start, 0).to_offset(self);
2164
2165 let end_point;
2166 let end;
2167 if row_range.end > row_range.start {
2168 end_point = Point::new(row_range.end - 1, self.line_len(row_range.end - 1));
2169 end = end_point.to_offset(self);
2170 } else {
2171 end_point = Point::new(row_range.start, 0);
2172 end = start;
2173 };
2174
2175 let mut chunks = self.as_rope().chunks_in_range(start..end);
2176 // Move the cursor to the start of the last line if it's not empty.
2177 chunks.seek(end);
2178 if end_point.column > 0 {
2179 chunks.prev_line();
2180 }
2181
2182 let mut row = end_point.row;
2183 let mut done = false;
2184 std::iter::from_fn(move || {
2185 if done {
2186 None
2187 } else {
2188 let initial_offset = chunks.offset();
2189 let indent = (row, LineIndent::from_chunks(&mut chunks));
2190 if chunks.offset() > initial_offset {
2191 chunks.prev_line();
2192 }
2193 done = !chunks.prev_line();
2194 if !done {
2195 row -= 1;
2196 }
2197
2198 Some(indent)
2199 }
2200 })
2201 }
2202
2203 pub fn line_indent_for_row(&self, row: u32) -> LineIndent {
2204 LineIndent::from_iter(self.chars_at(Point::new(row, 0)))
2205 }
2206
2207 pub fn is_line_blank(&self, row: u32) -> bool {
2208 self.text_for_range(Point::new(row, 0)..Point::new(row, self.line_len(row)))
2209 .all(|chunk| chunk.matches(|c: char| !c.is_whitespace()).next().is_none())
2210 }
2211
2212 pub fn text_summary_for_range<D, O: ToOffset>(&self, range: Range<O>) -> D
2213 where
2214 D: TextDimension,
2215 {
2216 self.visible_text
2217 .cursor(range.start.to_offset(self))
2218 .summary(range.end.to_offset(self))
2219 }
2220
2221 pub fn summaries_for_anchors<'a, D, A>(&'a self, anchors: A) -> impl 'a + Iterator<Item = D>
2222 where
2223 D: 'a + TextDimension,
2224 A: 'a + IntoIterator<Item = &'a Anchor>,
2225 {
2226 let anchors = anchors.into_iter();
2227 self.summaries_for_anchors_with_payload::<D, _, ()>(anchors.map(|a| (a, ())))
2228 .map(|d| d.0)
2229 }
2230
2231 pub fn summaries_for_anchors_with_payload<'a, D, A, T>(
2232 &'a self,
2233 anchors: A,
2234 ) -> impl 'a + Iterator<Item = (D, T)>
2235 where
2236 D: 'a + TextDimension,
2237 A: 'a + IntoIterator<Item = (&'a Anchor, T)>,
2238 {
2239 let anchors = anchors.into_iter();
2240 let mut insertion_cursor = self.insertions.cursor::<InsertionFragmentKey>(());
2241 let mut fragment_cursor = self
2242 .fragments
2243 .cursor::<Dimensions<Option<&Locator>, usize>>(&None);
2244 let mut text_cursor = self.visible_text.cursor(0);
2245 let mut position = D::zero(());
2246
2247 anchors.map(move |(anchor, payload)| {
2248 if *anchor == Anchor::MIN {
2249 return (D::zero(()), payload);
2250 } else if *anchor == Anchor::MAX {
2251 return (D::from_text_summary(&self.visible_text.summary()), payload);
2252 }
2253
2254 let anchor_key = InsertionFragmentKey {
2255 timestamp: anchor.timestamp,
2256 split_offset: anchor.offset,
2257 };
2258 insertion_cursor.seek(&anchor_key, anchor.bias);
2259 if let Some(insertion) = insertion_cursor.item() {
2260 let comparison = sum_tree::KeyedItem::key(insertion).cmp(&anchor_key);
2261 if comparison == Ordering::Greater
2262 || (anchor.bias == Bias::Left
2263 && comparison == Ordering::Equal
2264 && anchor.offset > 0)
2265 {
2266 insertion_cursor.prev();
2267 }
2268 } else {
2269 insertion_cursor.prev();
2270 }
2271 let insertion = insertion_cursor.item().expect("invalid insertion");
2272 assert_eq!(insertion.timestamp, anchor.timestamp, "invalid insertion");
2273
2274 fragment_cursor.seek_forward(&Some(&insertion.fragment_id), Bias::Left);
2275 let fragment = fragment_cursor.item().unwrap();
2276 let mut fragment_offset = fragment_cursor.start().1;
2277 if fragment.visible {
2278 fragment_offset += anchor.offset - insertion.split_offset;
2279 }
2280
2281 position.add_assign(&text_cursor.summary(fragment_offset));
2282 (position, payload)
2283 })
2284 }
2285
2286 pub fn summary_for_anchor<D>(&self, anchor: &Anchor) -> D
2287 where
2288 D: TextDimension,
2289 {
2290 self.text_summary_for_range(0..self.offset_for_anchor(anchor))
2291 }
2292
2293 pub fn offset_for_anchor(&self, anchor: &Anchor) -> usize {
2294 if *anchor == Anchor::MIN {
2295 0
2296 } else if *anchor == Anchor::MAX {
2297 self.visible_text.len()
2298 } else {
2299 debug_assert!(anchor.buffer_id == Some(self.remote_id));
2300 let anchor_key = InsertionFragmentKey {
2301 timestamp: anchor.timestamp,
2302 split_offset: anchor.offset,
2303 };
2304 let mut insertion_cursor = self.insertions.cursor::<InsertionFragmentKey>(());
2305 insertion_cursor.seek(&anchor_key, anchor.bias);
2306 if let Some(insertion) = insertion_cursor.item() {
2307 let comparison = sum_tree::KeyedItem::key(insertion).cmp(&anchor_key);
2308 if comparison == Ordering::Greater
2309 || (anchor.bias == Bias::Left
2310 && comparison == Ordering::Equal
2311 && anchor.offset > 0)
2312 {
2313 insertion_cursor.prev();
2314 }
2315 } else {
2316 insertion_cursor.prev();
2317 }
2318
2319 let Some(insertion) = insertion_cursor
2320 .item()
2321 .filter(|insertion| insertion.timestamp == anchor.timestamp)
2322 else {
2323 panic!(
2324 "invalid anchor {:?}. buffer id: {}, version: {:?}",
2325 anchor, self.remote_id, self.version
2326 );
2327 };
2328
2329 let mut fragment_cursor = self
2330 .fragments
2331 .cursor::<Dimensions<Option<&Locator>, usize>>(&None);
2332 fragment_cursor.seek(&Some(&insertion.fragment_id), Bias::Left);
2333 let fragment = fragment_cursor.item().unwrap();
2334 let mut fragment_offset = fragment_cursor.start().1;
2335 if fragment.visible {
2336 fragment_offset += anchor.offset - insertion.split_offset;
2337 }
2338 fragment_offset
2339 }
2340 }
2341
2342 fn fragment_id_for_anchor(&self, anchor: &Anchor) -> &Locator {
2343 self.try_fragment_id_for_anchor(anchor).unwrap_or_else(|| {
2344 panic!(
2345 "invalid anchor {:?}. buffer id: {}, version: {:?}",
2346 anchor, self.remote_id, self.version,
2347 )
2348 })
2349 }
2350
2351 fn try_fragment_id_for_anchor(&self, anchor: &Anchor) -> Option<&Locator> {
2352 if *anchor == Anchor::MIN {
2353 Some(Locator::min_ref())
2354 } else if *anchor == Anchor::MAX {
2355 Some(Locator::max_ref())
2356 } else {
2357 let anchor_key = InsertionFragmentKey {
2358 timestamp: anchor.timestamp,
2359 split_offset: anchor.offset,
2360 };
2361 let mut insertion_cursor = self.insertions.cursor::<InsertionFragmentKey>(());
2362 insertion_cursor.seek(&anchor_key, anchor.bias);
2363 if let Some(insertion) = insertion_cursor.item() {
2364 let comparison = sum_tree::KeyedItem::key(insertion).cmp(&anchor_key);
2365 if comparison == Ordering::Greater
2366 || (anchor.bias == Bias::Left
2367 && comparison == Ordering::Equal
2368 && anchor.offset > 0)
2369 {
2370 insertion_cursor.prev();
2371 }
2372 } else {
2373 insertion_cursor.prev();
2374 }
2375
2376 insertion_cursor
2377 .item()
2378 .filter(|insertion| {
2379 !cfg!(debug_assertions) || insertion.timestamp == anchor.timestamp
2380 })
2381 .map(|insertion| &insertion.fragment_id)
2382 }
2383 }
2384
2385 pub fn anchor_before<T: ToOffset>(&self, position: T) -> Anchor {
2386 self.anchor_at(position, Bias::Left)
2387 }
2388
2389 pub fn anchor_after<T: ToOffset>(&self, position: T) -> Anchor {
2390 self.anchor_at(position, Bias::Right)
2391 }
2392
2393 pub fn anchor_at<T: ToOffset>(&self, position: T, bias: Bias) -> Anchor {
2394 self.anchor_at_offset(position.to_offset(self), bias)
2395 }
2396
2397 fn anchor_at_offset(&self, offset: usize, bias: Bias) -> Anchor {
2398 if bias == Bias::Left && offset == 0 {
2399 Anchor::MIN
2400 } else if bias == Bias::Right && offset == self.len() {
2401 Anchor::MAX
2402 } else {
2403 if offset > self.visible_text.len() {
2404 panic!("offset {} is out of bounds", offset)
2405 } else if !self.visible_text.is_char_boundary(offset) {
2406 // find the character
2407 let char_start = self.visible_text.floor_char_boundary(offset);
2408 // `char_start` must be less than len and a char boundary
2409 let ch = self.visible_text.chars_at(char_start).next().unwrap();
2410 let char_range = char_start..char_start + ch.len_utf8();
2411 panic!(
2412 "byte index {} is not a char boundary; it is inside {:?} (bytes {:?})",
2413 offset, ch, char_range,
2414 );
2415 }
2416 let mut fragment_cursor = self.fragments.cursor::<usize>(&None);
2417 fragment_cursor.seek(&offset, bias);
2418 let fragment = fragment_cursor.item().unwrap();
2419 let overshoot = offset - *fragment_cursor.start();
2420 Anchor {
2421 timestamp: fragment.timestamp,
2422 offset: fragment.insertion_offset + overshoot,
2423 bias,
2424 buffer_id: Some(self.remote_id),
2425 }
2426 }
2427 }
2428
2429 pub fn can_resolve(&self, anchor: &Anchor) -> bool {
2430 *anchor == Anchor::MIN
2431 || *anchor == Anchor::MAX
2432 || (Some(self.remote_id) == anchor.buffer_id && self.version.observed(anchor.timestamp))
2433 }
2434
2435 pub fn clip_offset(&self, offset: usize, bias: Bias) -> usize {
2436 self.visible_text.clip_offset(offset, bias)
2437 }
2438
2439 pub fn clip_point(&self, point: Point, bias: Bias) -> Point {
2440 self.visible_text.clip_point(point, bias)
2441 }
2442
2443 pub fn clip_offset_utf16(&self, offset: OffsetUtf16, bias: Bias) -> OffsetUtf16 {
2444 self.visible_text.clip_offset_utf16(offset, bias)
2445 }
2446
2447 pub fn clip_point_utf16(&self, point: Unclipped<PointUtf16>, bias: Bias) -> PointUtf16 {
2448 self.visible_text.clip_point_utf16(point, bias)
2449 }
2450
2451 pub fn edits_since<'a, D>(
2452 &'a self,
2453 since: &'a clock::Global,
2454 ) -> impl 'a + Iterator<Item = Edit<D>>
2455 where
2456 D: TextDimension + Ord,
2457 {
2458 self.edits_since_in_range(since, Anchor::MIN..Anchor::MAX)
2459 }
2460
2461 pub fn anchored_edits_since<'a, D>(
2462 &'a self,
2463 since: &'a clock::Global,
2464 ) -> impl 'a + Iterator<Item = (Edit<D>, Range<Anchor>)>
2465 where
2466 D: TextDimension + Ord,
2467 {
2468 self.anchored_edits_since_in_range(since, Anchor::MIN..Anchor::MAX)
2469 }
2470
2471 pub fn edits_since_in_range<'a, D>(
2472 &'a self,
2473 since: &'a clock::Global,
2474 range: Range<Anchor>,
2475 ) -> impl 'a + Iterator<Item = Edit<D>>
2476 where
2477 D: TextDimension + Ord,
2478 {
2479 self.anchored_edits_since_in_range(since, range)
2480 .map(|item| item.0)
2481 }
2482
2483 pub fn anchored_edits_since_in_range<'a, D>(
2484 &'a self,
2485 since: &'a clock::Global,
2486 range: Range<Anchor>,
2487 ) -> impl 'a + Iterator<Item = (Edit<D>, Range<Anchor>)>
2488 where
2489 D: TextDimension + Ord,
2490 {
2491 let fragments_cursor = if *since == self.version {
2492 None
2493 } else {
2494 let mut cursor = self.fragments.filter(&None, move |summary| {
2495 !since.observed_all(&summary.max_version)
2496 });
2497 cursor.next();
2498 Some(cursor)
2499 };
2500 let mut cursor = self
2501 .fragments
2502 .cursor::<Dimensions<Option<&Locator>, FragmentTextSummary>>(&None);
2503
2504 let start_fragment_id = self.fragment_id_for_anchor(&range.start);
2505 cursor.seek(&Some(start_fragment_id), Bias::Left);
2506 let mut visible_start = cursor.start().1.visible;
2507 let mut deleted_start = cursor.start().1.deleted;
2508 if let Some(fragment) = cursor.item() {
2509 let overshoot = range.start.offset - fragment.insertion_offset;
2510 if fragment.visible {
2511 visible_start += overshoot;
2512 } else {
2513 deleted_start += overshoot;
2514 }
2515 }
2516 let end_fragment_id = self.fragment_id_for_anchor(&range.end);
2517
2518 Edits {
2519 visible_cursor: self.visible_text.cursor(visible_start),
2520 deleted_cursor: self.deleted_text.cursor(deleted_start),
2521 fragments_cursor,
2522 undos: &self.undo_map,
2523 since,
2524 old_end: D::zero(()),
2525 new_end: D::zero(()),
2526 range: (start_fragment_id, range.start.offset)..(end_fragment_id, range.end.offset),
2527 buffer_id: self.remote_id,
2528 }
2529 }
2530
2531 pub fn has_edits_since_in_range(&self, since: &clock::Global, range: Range<Anchor>) -> bool {
2532 if *since != self.version {
2533 let start_fragment_id = self.fragment_id_for_anchor(&range.start);
2534 let end_fragment_id = self.fragment_id_for_anchor(&range.end);
2535 let mut cursor = self.fragments.filter::<_, usize>(&None, move |summary| {
2536 !since.observed_all(&summary.max_version)
2537 });
2538 cursor.next();
2539 while let Some(fragment) = cursor.item() {
2540 if fragment.id > *end_fragment_id {
2541 break;
2542 }
2543 if fragment.id > *start_fragment_id {
2544 let was_visible = fragment.was_visible(since, &self.undo_map);
2545 let is_visible = fragment.visible;
2546 if was_visible != is_visible {
2547 return true;
2548 }
2549 }
2550 cursor.next();
2551 }
2552 }
2553 false
2554 }
2555
2556 pub fn has_edits_since(&self, since: &clock::Global) -> bool {
2557 if *since != self.version {
2558 let mut cursor = self.fragments.filter::<_, usize>(&None, move |summary| {
2559 !since.observed_all(&summary.max_version)
2560 });
2561 cursor.next();
2562 while let Some(fragment) = cursor.item() {
2563 let was_visible = fragment.was_visible(since, &self.undo_map);
2564 let is_visible = fragment.visible;
2565 if was_visible != is_visible {
2566 return true;
2567 }
2568 cursor.next();
2569 }
2570 }
2571 false
2572 }
2573
2574 pub fn range_to_version(&self, range: Range<usize>, version: &clock::Global) -> Range<usize> {
2575 let mut offsets = self.offsets_to_version([range.start, range.end], version);
2576 offsets.next().unwrap()..offsets.next().unwrap()
2577 }
2578
2579 /// Converts the given sequence of offsets into their corresponding offsets
2580 /// at a prior version of this buffer.
2581 pub fn offsets_to_version<'a>(
2582 &'a self,
2583 offsets: impl 'a + IntoIterator<Item = usize>,
2584 version: &'a clock::Global,
2585 ) -> impl 'a + Iterator<Item = usize> {
2586 let mut edits = self.edits_since(version).peekable();
2587 let mut last_old_end = 0;
2588 let mut last_new_end = 0;
2589 offsets.into_iter().map(move |new_offset| {
2590 while let Some(edit) = edits.peek() {
2591 if edit.new.start > new_offset {
2592 break;
2593 }
2594
2595 if edit.new.end <= new_offset {
2596 last_new_end = edit.new.end;
2597 last_old_end = edit.old.end;
2598 edits.next();
2599 continue;
2600 }
2601
2602 let overshoot = new_offset - edit.new.start;
2603 return (edit.old.start + overshoot).min(edit.old.end);
2604 }
2605
2606 last_old_end + new_offset.saturating_sub(last_new_end)
2607 })
2608 }
2609
2610 /// Visually annotates a position or range with the `Debug` representation of a value. The
2611 /// callsite of this function is used as a key - previous annotations will be removed.
2612 #[cfg(debug_assertions)]
2613 #[track_caller]
2614 pub fn debug<R, V>(&self, ranges: &R, value: V)
2615 where
2616 R: debug::ToDebugRanges,
2617 V: std::fmt::Debug,
2618 {
2619 self.debug_with_key(std::panic::Location::caller(), ranges, value);
2620 }
2621
2622 /// Visually annotates a position or range with the `Debug` representation of a value. Previous
2623 /// debug annotations with the same key will be removed. The key is also used to determine the
2624 /// annotation's color.
2625 #[cfg(debug_assertions)]
2626 pub fn debug_with_key<K, R, V>(&self, key: &K, ranges: &R, value: V)
2627 where
2628 K: std::hash::Hash + 'static,
2629 R: debug::ToDebugRanges,
2630 V: std::fmt::Debug,
2631 {
2632 let ranges = ranges
2633 .to_debug_ranges(self)
2634 .into_iter()
2635 .map(|range| self.anchor_after(range.start)..self.anchor_before(range.end))
2636 .collect();
2637 debug::GlobalDebugRanges::with_locked(|debug_ranges| {
2638 debug_ranges.insert(key, ranges, format!("{value:?}").into());
2639 });
2640 }
2641}
2642
2643struct RopeBuilder<'a> {
2644 old_visible_cursor: rope::Cursor<'a>,
2645 old_deleted_cursor: rope::Cursor<'a>,
2646 new_visible: Rope,
2647 new_deleted: Rope,
2648}
2649
2650impl<'a> RopeBuilder<'a> {
2651 fn new(old_visible_cursor: rope::Cursor<'a>, old_deleted_cursor: rope::Cursor<'a>) -> Self {
2652 Self {
2653 old_visible_cursor,
2654 old_deleted_cursor,
2655 new_visible: Rope::new(),
2656 new_deleted: Rope::new(),
2657 }
2658 }
2659
2660 fn append(&mut self, len: FragmentTextSummary) {
2661 self.push(len.visible, true, true);
2662 self.push(len.deleted, false, false);
2663 }
2664
2665 fn push_fragment(&mut self, fragment: &Fragment, was_visible: bool) {
2666 debug_assert!(fragment.len > 0);
2667 self.push(fragment.len, was_visible, fragment.visible)
2668 }
2669
2670 fn push(&mut self, len: usize, was_visible: bool, is_visible: bool) {
2671 let text = if was_visible {
2672 self.old_visible_cursor
2673 .slice(self.old_visible_cursor.offset() + len)
2674 } else {
2675 self.old_deleted_cursor
2676 .slice(self.old_deleted_cursor.offset() + len)
2677 };
2678 if is_visible {
2679 self.new_visible.append(text);
2680 } else {
2681 self.new_deleted.append(text);
2682 }
2683 }
2684
2685 fn push_str(&mut self, text: &str) {
2686 self.new_visible.push(text);
2687 }
2688
2689 fn finish(mut self) -> (Rope, Rope) {
2690 self.new_visible.append(self.old_visible_cursor.suffix());
2691 self.new_deleted.append(self.old_deleted_cursor.suffix());
2692 (self.new_visible, self.new_deleted)
2693 }
2694}
2695
2696impl<D: TextDimension + Ord, F: FnMut(&FragmentSummary) -> bool> Iterator for Edits<'_, D, F> {
2697 type Item = (Edit<D>, Range<Anchor>);
2698
2699 fn next(&mut self) -> Option<Self::Item> {
2700 let mut pending_edit: Option<Self::Item> = None;
2701 let cursor = self.fragments_cursor.as_mut()?;
2702
2703 while let Some(fragment) = cursor.item() {
2704 if fragment.id < *self.range.start.0 {
2705 cursor.next();
2706 continue;
2707 } else if fragment.id > *self.range.end.0 {
2708 break;
2709 }
2710
2711 if cursor.start().visible > self.visible_cursor.offset() {
2712 let summary = self.visible_cursor.summary(cursor.start().visible);
2713 self.old_end.add_assign(&summary);
2714 self.new_end.add_assign(&summary);
2715 }
2716
2717 if pending_edit
2718 .as_ref()
2719 .is_some_and(|(change, _)| change.new.end < self.new_end)
2720 {
2721 break;
2722 }
2723
2724 let start_anchor = Anchor {
2725 timestamp: fragment.timestamp,
2726 offset: fragment.insertion_offset,
2727 bias: Bias::Right,
2728 buffer_id: Some(self.buffer_id),
2729 };
2730 let end_anchor = Anchor {
2731 timestamp: fragment.timestamp,
2732 offset: fragment.insertion_offset + fragment.len,
2733 bias: Bias::Left,
2734 buffer_id: Some(self.buffer_id),
2735 };
2736
2737 if !fragment.was_visible(self.since, self.undos) && fragment.visible {
2738 let mut visible_end = cursor.end().visible;
2739 if fragment.id == *self.range.end.0 {
2740 visible_end = cmp::min(
2741 visible_end,
2742 cursor.start().visible + (self.range.end.1 - fragment.insertion_offset),
2743 );
2744 }
2745
2746 let fragment_summary = self.visible_cursor.summary(visible_end);
2747 let mut new_end = self.new_end;
2748 new_end.add_assign(&fragment_summary);
2749 if let Some((edit, range)) = pending_edit.as_mut() {
2750 edit.new.end = new_end;
2751 range.end = end_anchor;
2752 } else {
2753 pending_edit = Some((
2754 Edit {
2755 old: self.old_end..self.old_end,
2756 new: self.new_end..new_end,
2757 },
2758 start_anchor..end_anchor,
2759 ));
2760 }
2761
2762 self.new_end = new_end;
2763 } else if fragment.was_visible(self.since, self.undos) && !fragment.visible {
2764 let mut deleted_end = cursor.end().deleted;
2765 if fragment.id == *self.range.end.0 {
2766 deleted_end = cmp::min(
2767 deleted_end,
2768 cursor.start().deleted + (self.range.end.1 - fragment.insertion_offset),
2769 );
2770 }
2771
2772 if cursor.start().deleted > self.deleted_cursor.offset() {
2773 self.deleted_cursor.seek_forward(cursor.start().deleted);
2774 }
2775 let fragment_summary = self.deleted_cursor.summary(deleted_end);
2776 let mut old_end = self.old_end;
2777 old_end.add_assign(&fragment_summary);
2778 if let Some((edit, range)) = pending_edit.as_mut() {
2779 edit.old.end = old_end;
2780 range.end = end_anchor;
2781 } else {
2782 pending_edit = Some((
2783 Edit {
2784 old: self.old_end..old_end,
2785 new: self.new_end..self.new_end,
2786 },
2787 start_anchor..end_anchor,
2788 ));
2789 }
2790
2791 self.old_end = old_end;
2792 }
2793
2794 cursor.next();
2795 }
2796
2797 pending_edit
2798 }
2799}
2800
2801impl Fragment {
2802 fn is_visible(&self, undos: &UndoMap) -> bool {
2803 !undos.is_undone(self.timestamp) && self.deletions.iter().all(|d| undos.is_undone(*d))
2804 }
2805
2806 fn was_visible(&self, version: &clock::Global, undos: &UndoMap) -> bool {
2807 (version.observed(self.timestamp) && !undos.was_undone(self.timestamp, version))
2808 && self
2809 .deletions
2810 .iter()
2811 .all(|d| !version.observed(*d) || undos.was_undone(*d, version))
2812 }
2813}
2814
2815impl sum_tree::Item for Fragment {
2816 type Summary = FragmentSummary;
2817
2818 fn summary(&self, _cx: &Option<clock::Global>) -> Self::Summary {
2819 let mut max_version = clock::Global::new();
2820 max_version.observe(self.timestamp);
2821 for deletion in &self.deletions {
2822 max_version.observe(*deletion);
2823 }
2824 max_version.join(&self.max_undos);
2825
2826 let mut min_insertion_version = clock::Global::new();
2827 min_insertion_version.observe(self.timestamp);
2828 let max_insertion_version = min_insertion_version.clone();
2829 if self.visible {
2830 FragmentSummary {
2831 max_id: self.id.clone(),
2832 text: FragmentTextSummary {
2833 visible: self.len,
2834 deleted: 0,
2835 },
2836 max_version,
2837 min_insertion_version,
2838 max_insertion_version,
2839 }
2840 } else {
2841 FragmentSummary {
2842 max_id: self.id.clone(),
2843 text: FragmentTextSummary {
2844 visible: 0,
2845 deleted: self.len,
2846 },
2847 max_version,
2848 min_insertion_version,
2849 max_insertion_version,
2850 }
2851 }
2852 }
2853}
2854
2855impl sum_tree::Summary for FragmentSummary {
2856 type Context<'a> = &'a Option<clock::Global>;
2857
2858 fn zero(_cx: Self::Context<'_>) -> Self {
2859 Default::default()
2860 }
2861
2862 fn add_summary(&mut self, other: &Self, _: Self::Context<'_>) {
2863 self.max_id.assign(&other.max_id);
2864 self.text.visible += &other.text.visible;
2865 self.text.deleted += &other.text.deleted;
2866 self.max_version.join(&other.max_version);
2867 self.min_insertion_version
2868 .meet(&other.min_insertion_version);
2869 self.max_insertion_version
2870 .join(&other.max_insertion_version);
2871 }
2872}
2873
2874impl Default for FragmentSummary {
2875 fn default() -> Self {
2876 FragmentSummary {
2877 max_id: Locator::min(),
2878 text: FragmentTextSummary::default(),
2879 max_version: clock::Global::new(),
2880 min_insertion_version: clock::Global::new(),
2881 max_insertion_version: clock::Global::new(),
2882 }
2883 }
2884}
2885
2886impl sum_tree::Item for InsertionFragment {
2887 type Summary = InsertionFragmentKey;
2888
2889 fn summary(&self, _cx: ()) -> Self::Summary {
2890 InsertionFragmentKey {
2891 timestamp: self.timestamp,
2892 split_offset: self.split_offset,
2893 }
2894 }
2895}
2896
2897impl sum_tree::KeyedItem for InsertionFragment {
2898 type Key = InsertionFragmentKey;
2899
2900 fn key(&self) -> Self::Key {
2901 sum_tree::Item::summary(self, ())
2902 }
2903}
2904
2905impl InsertionFragment {
2906 fn new(fragment: &Fragment) -> Self {
2907 Self {
2908 timestamp: fragment.timestamp,
2909 split_offset: fragment.insertion_offset,
2910 fragment_id: fragment.id.clone(),
2911 }
2912 }
2913
2914 fn insert_new(fragment: &Fragment) -> sum_tree::Edit<Self> {
2915 sum_tree::Edit::Insert(Self::new(fragment))
2916 }
2917}
2918
2919impl sum_tree::ContextLessSummary for InsertionFragmentKey {
2920 fn zero() -> Self {
2921 Default::default()
2922 }
2923
2924 fn add_summary(&mut self, summary: &Self) {
2925 *self = *summary;
2926 }
2927}
2928
2929#[derive(Copy, Clone, Debug, Default, PartialEq, Eq, PartialOrd, Ord, Hash)]
2930pub struct FullOffset(pub usize);
2931
2932impl ops::AddAssign<usize> for FullOffset {
2933 fn add_assign(&mut self, rhs: usize) {
2934 self.0 += rhs;
2935 }
2936}
2937
2938impl ops::Add<usize> for FullOffset {
2939 type Output = Self;
2940
2941 fn add(mut self, rhs: usize) -> Self::Output {
2942 self += rhs;
2943 self
2944 }
2945}
2946
2947impl ops::Sub for FullOffset {
2948 type Output = usize;
2949
2950 fn sub(self, rhs: Self) -> Self::Output {
2951 self.0 - rhs.0
2952 }
2953}
2954
2955impl sum_tree::Dimension<'_, FragmentSummary> for usize {
2956 fn zero(_: &Option<clock::Global>) -> Self {
2957 Default::default()
2958 }
2959
2960 fn add_summary(&mut self, summary: &FragmentSummary, _: &Option<clock::Global>) {
2961 *self += summary.text.visible;
2962 }
2963}
2964
2965impl sum_tree::Dimension<'_, FragmentSummary> for FullOffset {
2966 fn zero(_: &Option<clock::Global>) -> Self {
2967 Default::default()
2968 }
2969
2970 fn add_summary(&mut self, summary: &FragmentSummary, _: &Option<clock::Global>) {
2971 self.0 += summary.text.visible + summary.text.deleted;
2972 }
2973}
2974
2975impl<'a> sum_tree::Dimension<'a, FragmentSummary> for Option<&'a Locator> {
2976 fn zero(_: &Option<clock::Global>) -> Self {
2977 Default::default()
2978 }
2979
2980 fn add_summary(&mut self, summary: &'a FragmentSummary, _: &Option<clock::Global>) {
2981 *self = Some(&summary.max_id);
2982 }
2983}
2984
2985impl sum_tree::SeekTarget<'_, FragmentSummary, FragmentTextSummary> for usize {
2986 fn cmp(
2987 &self,
2988 cursor_location: &FragmentTextSummary,
2989 _: &Option<clock::Global>,
2990 ) -> cmp::Ordering {
2991 Ord::cmp(self, &cursor_location.visible)
2992 }
2993}
2994
2995#[derive(Copy, Clone, Debug, Eq, PartialEq)]
2996enum VersionedFullOffset {
2997 Offset(FullOffset),
2998 Invalid,
2999}
3000
3001impl VersionedFullOffset {
3002 fn full_offset(&self) -> FullOffset {
3003 if let Self::Offset(position) = self {
3004 *position
3005 } else {
3006 panic!("invalid version")
3007 }
3008 }
3009}
3010
3011impl Default for VersionedFullOffset {
3012 fn default() -> Self {
3013 Self::Offset(Default::default())
3014 }
3015}
3016
3017impl<'a> sum_tree::Dimension<'a, FragmentSummary> for VersionedFullOffset {
3018 fn zero(_cx: &Option<clock::Global>) -> Self {
3019 Default::default()
3020 }
3021
3022 fn add_summary(&mut self, summary: &'a FragmentSummary, cx: &Option<clock::Global>) {
3023 if let Self::Offset(offset) = self {
3024 let version = cx.as_ref().unwrap();
3025 if version.observed_all(&summary.max_insertion_version) {
3026 *offset += summary.text.visible + summary.text.deleted;
3027 } else if version.observed_any(&summary.min_insertion_version) {
3028 *self = Self::Invalid;
3029 }
3030 }
3031 }
3032}
3033
3034impl sum_tree::SeekTarget<'_, FragmentSummary, Self> for VersionedFullOffset {
3035 fn cmp(&self, cursor_position: &Self, _: &Option<clock::Global>) -> cmp::Ordering {
3036 match (self, cursor_position) {
3037 (Self::Offset(a), Self::Offset(b)) => Ord::cmp(a, b),
3038 (Self::Offset(_), Self::Invalid) => cmp::Ordering::Less,
3039 (Self::Invalid, _) => unreachable!(),
3040 }
3041 }
3042}
3043
3044impl Operation {
3045 fn replica_id(&self) -> ReplicaId {
3046 operation_queue::Operation::lamport_timestamp(self).replica_id
3047 }
3048
3049 pub fn timestamp(&self) -> clock::Lamport {
3050 match self {
3051 Operation::Edit(edit) => edit.timestamp,
3052 Operation::Undo(undo) => undo.timestamp,
3053 }
3054 }
3055
3056 pub fn as_edit(&self) -> Option<&EditOperation> {
3057 match self {
3058 Operation::Edit(edit) => Some(edit),
3059 _ => None,
3060 }
3061 }
3062
3063 pub fn is_edit(&self) -> bool {
3064 matches!(self, Operation::Edit { .. })
3065 }
3066}
3067
3068impl operation_queue::Operation for Operation {
3069 fn lamport_timestamp(&self) -> clock::Lamport {
3070 match self {
3071 Operation::Edit(edit) => edit.timestamp,
3072 Operation::Undo(undo) => undo.timestamp,
3073 }
3074 }
3075}
3076
3077pub trait ToOffset {
3078 fn to_offset(&self, snapshot: &BufferSnapshot) -> usize;
3079 /// Turns this point into the next offset in the buffer that comes after this, respecting utf8 boundaries.
3080 fn to_next_offset(&self, snapshot: &BufferSnapshot) -> usize {
3081 snapshot
3082 .visible_text
3083 .ceil_char_boundary(self.to_offset(snapshot) + 1)
3084 }
3085 /// Turns this point into the previous offset in the buffer that comes before this, respecting utf8 boundaries.
3086 fn to_previous_offset(&self, snapshot: &BufferSnapshot) -> usize {
3087 snapshot
3088 .visible_text
3089 .floor_char_boundary(self.to_offset(snapshot).saturating_sub(1))
3090 }
3091}
3092
3093impl ToOffset for Point {
3094 fn to_offset(&self, snapshot: &BufferSnapshot) -> usize {
3095 snapshot.point_to_offset(*self)
3096 }
3097}
3098
3099impl ToOffset for usize {
3100 #[track_caller]
3101 fn to_offset(&self, snapshot: &BufferSnapshot) -> usize {
3102 assert!(
3103 *self <= snapshot.len(),
3104 "offset {} is out of range, snapshot length is {}",
3105 self,
3106 snapshot.len()
3107 );
3108 *self
3109 }
3110}
3111
3112impl ToOffset for Anchor {
3113 fn to_offset(&self, snapshot: &BufferSnapshot) -> usize {
3114 snapshot.summary_for_anchor(self)
3115 }
3116}
3117
3118impl<T: ToOffset> ToOffset for &T {
3119 fn to_offset(&self, content: &BufferSnapshot) -> usize {
3120 (*self).to_offset(content)
3121 }
3122}
3123
3124impl ToOffset for PointUtf16 {
3125 fn to_offset(&self, snapshot: &BufferSnapshot) -> usize {
3126 snapshot.point_utf16_to_offset(*self)
3127 }
3128}
3129
3130impl ToOffset for Unclipped<PointUtf16> {
3131 fn to_offset(&self, snapshot: &BufferSnapshot) -> usize {
3132 snapshot.unclipped_point_utf16_to_offset(*self)
3133 }
3134}
3135
3136pub trait ToPoint {
3137 fn to_point(&self, snapshot: &BufferSnapshot) -> Point;
3138}
3139
3140impl ToPoint for Anchor {
3141 fn to_point(&self, snapshot: &BufferSnapshot) -> Point {
3142 snapshot.summary_for_anchor(self)
3143 }
3144}
3145
3146impl ToPoint for usize {
3147 fn to_point(&self, snapshot: &BufferSnapshot) -> Point {
3148 snapshot.offset_to_point(*self)
3149 }
3150}
3151
3152impl ToPoint for Point {
3153 fn to_point(&self, _: &BufferSnapshot) -> Point {
3154 *self
3155 }
3156}
3157
3158impl ToPoint for Unclipped<PointUtf16> {
3159 fn to_point(&self, snapshot: &BufferSnapshot) -> Point {
3160 snapshot.unclipped_point_utf16_to_point(*self)
3161 }
3162}
3163
3164pub trait ToPointUtf16 {
3165 fn to_point_utf16(&self, snapshot: &BufferSnapshot) -> PointUtf16;
3166}
3167
3168impl ToPointUtf16 for Anchor {
3169 fn to_point_utf16(&self, snapshot: &BufferSnapshot) -> PointUtf16 {
3170 snapshot.summary_for_anchor(self)
3171 }
3172}
3173
3174impl ToPointUtf16 for usize {
3175 fn to_point_utf16(&self, snapshot: &BufferSnapshot) -> PointUtf16 {
3176 snapshot.offset_to_point_utf16(*self)
3177 }
3178}
3179
3180impl ToPointUtf16 for PointUtf16 {
3181 fn to_point_utf16(&self, _: &BufferSnapshot) -> PointUtf16 {
3182 *self
3183 }
3184}
3185
3186impl ToPointUtf16 for Point {
3187 fn to_point_utf16(&self, snapshot: &BufferSnapshot) -> PointUtf16 {
3188 snapshot.point_to_point_utf16(*self)
3189 }
3190}
3191
3192pub trait ToOffsetUtf16 {
3193 fn to_offset_utf16(&self, snapshot: &BufferSnapshot) -> OffsetUtf16;
3194}
3195
3196impl ToOffsetUtf16 for Anchor {
3197 fn to_offset_utf16(&self, snapshot: &BufferSnapshot) -> OffsetUtf16 {
3198 snapshot.summary_for_anchor(self)
3199 }
3200}
3201
3202impl ToOffsetUtf16 for usize {
3203 fn to_offset_utf16(&self, snapshot: &BufferSnapshot) -> OffsetUtf16 {
3204 snapshot.offset_to_offset_utf16(*self)
3205 }
3206}
3207
3208impl ToOffsetUtf16 for OffsetUtf16 {
3209 fn to_offset_utf16(&self, _snapshot: &BufferSnapshot) -> OffsetUtf16 {
3210 *self
3211 }
3212}
3213
3214pub trait FromAnchor {
3215 fn from_anchor(anchor: &Anchor, snapshot: &BufferSnapshot) -> Self;
3216}
3217
3218impl FromAnchor for Anchor {
3219 fn from_anchor(anchor: &Anchor, _snapshot: &BufferSnapshot) -> Self {
3220 *anchor
3221 }
3222}
3223
3224impl FromAnchor for Point {
3225 fn from_anchor(anchor: &Anchor, snapshot: &BufferSnapshot) -> Self {
3226 snapshot.summary_for_anchor(anchor)
3227 }
3228}
3229
3230impl FromAnchor for PointUtf16 {
3231 fn from_anchor(anchor: &Anchor, snapshot: &BufferSnapshot) -> Self {
3232 snapshot.summary_for_anchor(anchor)
3233 }
3234}
3235
3236impl FromAnchor for usize {
3237 fn from_anchor(anchor: &Anchor, snapshot: &BufferSnapshot) -> Self {
3238 snapshot.summary_for_anchor(anchor)
3239 }
3240}
3241
3242#[derive(Clone, Copy, Debug, PartialEq)]
3243pub enum LineEnding {
3244 Unix,
3245 Windows,
3246}
3247
3248impl Default for LineEnding {
3249 fn default() -> Self {
3250 #[cfg(unix)]
3251 return Self::Unix;
3252
3253 #[cfg(not(unix))]
3254 return Self::Windows;
3255 }
3256}
3257
3258impl LineEnding {
3259 pub fn as_str(&self) -> &'static str {
3260 match self {
3261 LineEnding::Unix => "\n",
3262 LineEnding::Windows => "\r\n",
3263 }
3264 }
3265
3266 pub fn detect(text: &str) -> Self {
3267 let mut max_ix = cmp::min(text.len(), 1000);
3268 while !text.is_char_boundary(max_ix) {
3269 max_ix -= 1;
3270 }
3271
3272 if let Some(ix) = text[..max_ix].find(['\n']) {
3273 if ix > 0 && text.as_bytes()[ix - 1] == b'\r' {
3274 Self::Windows
3275 } else {
3276 Self::Unix
3277 }
3278 } else {
3279 Self::default()
3280 }
3281 }
3282
3283 pub fn normalize(text: &mut String) {
3284 if let Cow::Owned(replaced) = LINE_SEPARATORS_REGEX.replace_all(text, "\n") {
3285 *text = replaced;
3286 }
3287 }
3288
3289 pub fn normalize_arc(text: Arc<str>) -> Arc<str> {
3290 if let Cow::Owned(replaced) = LINE_SEPARATORS_REGEX.replace_all(&text, "\n") {
3291 replaced.into()
3292 } else {
3293 text
3294 }
3295 }
3296
3297 pub fn normalize_cow(text: Cow<str>) -> Cow<str> {
3298 if let Cow::Owned(replaced) = LINE_SEPARATORS_REGEX.replace_all(&text, "\n") {
3299 replaced.into()
3300 } else {
3301 text
3302 }
3303 }
3304}
3305
3306#[cfg(debug_assertions)]
3307pub mod debug {
3308 use super::*;
3309 use parking_lot::Mutex;
3310 use std::any::TypeId;
3311 use std::hash::{Hash, Hasher};
3312
3313 static GLOBAL_DEBUG_RANGES: Mutex<Option<GlobalDebugRanges>> = Mutex::new(None);
3314
3315 pub struct GlobalDebugRanges {
3316 pub ranges: Vec<DebugRange>,
3317 key_to_occurrence_index: HashMap<Key, usize>,
3318 next_occurrence_index: usize,
3319 }
3320
3321 pub struct DebugRange {
3322 key: Key,
3323 pub ranges: Vec<Range<Anchor>>,
3324 pub value: Arc<str>,
3325 pub occurrence_index: usize,
3326 }
3327
3328 #[derive(Debug, Clone, PartialEq, Eq, Hash)]
3329 struct Key {
3330 type_id: TypeId,
3331 hash: u64,
3332 }
3333
3334 impl GlobalDebugRanges {
3335 pub fn with_locked<R>(f: impl FnOnce(&mut Self) -> R) -> R {
3336 let mut state = GLOBAL_DEBUG_RANGES.lock();
3337 if state.is_none() {
3338 *state = Some(GlobalDebugRanges {
3339 ranges: Vec::new(),
3340 key_to_occurrence_index: HashMap::default(),
3341 next_occurrence_index: 0,
3342 });
3343 }
3344 if let Some(global_debug_ranges) = state.as_mut() {
3345 f(global_debug_ranges)
3346 } else {
3347 unreachable!()
3348 }
3349 }
3350
3351 pub fn insert<K: Hash + 'static>(
3352 &mut self,
3353 key: &K,
3354 ranges: Vec<Range<Anchor>>,
3355 value: Arc<str>,
3356 ) {
3357 let occurrence_index = *self
3358 .key_to_occurrence_index
3359 .entry(Key::new(key))
3360 .or_insert_with(|| {
3361 let occurrence_index = self.next_occurrence_index;
3362 self.next_occurrence_index += 1;
3363 occurrence_index
3364 });
3365 let key = Key::new(key);
3366 let existing = self
3367 .ranges
3368 .iter()
3369 .enumerate()
3370 .rfind(|(_, existing)| existing.key == key);
3371 if let Some((existing_ix, _)) = existing {
3372 self.ranges.remove(existing_ix);
3373 }
3374 self.ranges.push(DebugRange {
3375 ranges,
3376 key,
3377 value,
3378 occurrence_index,
3379 });
3380 }
3381
3382 pub fn remove<K: Hash + 'static>(&mut self, key: &K) {
3383 self.remove_impl(&Key::new(key));
3384 }
3385
3386 fn remove_impl(&mut self, key: &Key) {
3387 let existing = self
3388 .ranges
3389 .iter()
3390 .enumerate()
3391 .rfind(|(_, existing)| &existing.key == key);
3392 if let Some((existing_ix, _)) = existing {
3393 self.ranges.remove(existing_ix);
3394 }
3395 }
3396
3397 pub fn remove_all_with_key_type<K: 'static>(&mut self) {
3398 self.ranges
3399 .retain(|item| item.key.type_id != TypeId::of::<K>());
3400 }
3401 }
3402
3403 impl Key {
3404 fn new<K: Hash + 'static>(key: &K) -> Self {
3405 let type_id = TypeId::of::<K>();
3406 let mut hasher = collections::FxHasher::default();
3407 key.hash(&mut hasher);
3408 Key {
3409 type_id,
3410 hash: hasher.finish(),
3411 }
3412 }
3413 }
3414
3415 pub trait ToDebugRanges {
3416 fn to_debug_ranges(&self, snapshot: &BufferSnapshot) -> Vec<Range<usize>>;
3417 }
3418
3419 impl<T: ToOffset> ToDebugRanges for T {
3420 fn to_debug_ranges(&self, snapshot: &BufferSnapshot) -> Vec<Range<usize>> {
3421 [self.to_offset(snapshot)].to_debug_ranges(snapshot)
3422 }
3423 }
3424
3425 impl<T: ToOffset + Clone> ToDebugRanges for Range<T> {
3426 fn to_debug_ranges(&self, snapshot: &BufferSnapshot) -> Vec<Range<usize>> {
3427 [self.clone()].to_debug_ranges(snapshot)
3428 }
3429 }
3430
3431 impl<T: ToOffset> ToDebugRanges for Vec<T> {
3432 fn to_debug_ranges(&self, snapshot: &BufferSnapshot) -> Vec<Range<usize>> {
3433 self.as_slice().to_debug_ranges(snapshot)
3434 }
3435 }
3436
3437 impl<T: ToOffset> ToDebugRanges for Vec<Range<T>> {
3438 fn to_debug_ranges(&self, snapshot: &BufferSnapshot) -> Vec<Range<usize>> {
3439 self.as_slice().to_debug_ranges(snapshot)
3440 }
3441 }
3442
3443 impl<T: ToOffset> ToDebugRanges for [T] {
3444 fn to_debug_ranges(&self, snapshot: &BufferSnapshot) -> Vec<Range<usize>> {
3445 self.iter()
3446 .map(|item| {
3447 let offset = item.to_offset(snapshot);
3448 offset..offset
3449 })
3450 .collect()
3451 }
3452 }
3453
3454 impl<T: ToOffset> ToDebugRanges for [Range<T>] {
3455 fn to_debug_ranges(&self, snapshot: &BufferSnapshot) -> Vec<Range<usize>> {
3456 self.iter()
3457 .map(|range| range.start.to_offset(snapshot)..range.end.to_offset(snapshot))
3458 .collect()
3459 }
3460 }
3461}