1mod anchor;
2pub mod locator;
3#[cfg(any(test, feature = "test-support"))]
4pub mod network;
5pub mod operation_queue;
6mod patch;
7mod selection;
8pub mod subscription;
9#[cfg(test)]
10mod tests;
11mod undo_map;
12
13pub use anchor::*;
14use anyhow::{Context as _, Result};
15use clock::LOCAL_BRANCH_REPLICA_ID;
16pub use clock::ReplicaId;
17use collections::{HashMap, HashSet};
18use locator::Locator;
19use operation_queue::OperationQueue;
20pub use patch::Patch;
21use postage::{oneshot, prelude::*};
22
23use regex::Regex;
24pub use rope::*;
25pub use selection::*;
26use std::{
27 borrow::Cow,
28 cmp::{self, Ordering, Reverse},
29 fmt::Display,
30 future::Future,
31 iter::Iterator,
32 num::NonZeroU64,
33 ops::{self, Deref, Range, Sub},
34 str,
35 sync::{Arc, LazyLock},
36 time::{Duration, Instant},
37};
38pub use subscription::*;
39pub use sum_tree::Bias;
40use sum_tree::{Dimensions, FilterCursor, SumTree, TreeMap, TreeSet};
41use undo_map::UndoMap;
42
43#[cfg(any(test, feature = "test-support"))]
44use util::RandomCharIter;
45
46static LINE_SEPARATORS_REGEX: LazyLock<Regex> =
47 LazyLock::new(|| Regex::new(r"\r\n|\r").expect("Failed to create LINE_SEPARATORS_REGEX"));
48
49pub type TransactionId = clock::Lamport;
50
51pub struct Buffer {
52 snapshot: BufferSnapshot,
53 history: History,
54 deferred_ops: OperationQueue<Operation>,
55 deferred_replicas: HashSet<ReplicaId>,
56 pub lamport_clock: clock::Lamport,
57 subscriptions: Topic,
58 edit_id_resolvers: HashMap<clock::Lamport, Vec<oneshot::Sender<()>>>,
59 wait_for_version_txs: Vec<(clock::Global, oneshot::Sender<()>)>,
60}
61
62#[repr(transparent)]
63#[derive(Clone, Copy, Debug, Hash, PartialEq, PartialOrd, Ord, Eq)]
64pub struct BufferId(NonZeroU64);
65
66impl Display for BufferId {
67 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
68 write!(f, "{}", self.0)
69 }
70}
71
72impl From<NonZeroU64> for BufferId {
73 fn from(id: NonZeroU64) -> Self {
74 BufferId(id)
75 }
76}
77
78impl BufferId {
79 /// Returns Err if `id` is outside of BufferId domain.
80 pub fn new(id: u64) -> anyhow::Result<Self> {
81 let id = NonZeroU64::new(id).context("Buffer id cannot be 0.")?;
82 Ok(Self(id))
83 }
84
85 /// Increments this buffer id, returning the old value.
86 /// So that's a post-increment operator in disguise.
87 pub fn next(&mut self) -> Self {
88 let old = *self;
89 self.0 = self.0.saturating_add(1);
90 old
91 }
92
93 pub fn to_proto(self) -> u64 {
94 self.into()
95 }
96}
97
98impl From<BufferId> for u64 {
99 fn from(id: BufferId) -> Self {
100 id.0.get()
101 }
102}
103
104#[derive(Clone)]
105pub struct BufferSnapshot {
106 replica_id: ReplicaId,
107 remote_id: BufferId,
108 visible_text: Rope,
109 deleted_text: Rope,
110 line_ending: LineEnding,
111 undo_map: UndoMap,
112 fragments: SumTree<Fragment>,
113 insertions: SumTree<InsertionFragment>,
114 insertion_slices: TreeSet<InsertionSlice>,
115 pub version: clock::Global,
116}
117
118#[derive(Clone, Debug)]
119pub struct HistoryEntry {
120 transaction: Transaction,
121 first_edit_at: Instant,
122 last_edit_at: Instant,
123 suppress_grouping: bool,
124}
125
126#[derive(Clone, Debug)]
127pub struct Transaction {
128 pub id: TransactionId,
129 pub edit_ids: Vec<clock::Lamport>,
130 pub start: clock::Global,
131}
132
133impl Transaction {
134 pub fn merge_in(&mut self, other: Transaction) {
135 self.edit_ids.extend(other.edit_ids);
136 }
137}
138
139impl HistoryEntry {
140 pub fn transaction_id(&self) -> TransactionId {
141 self.transaction.id
142 }
143}
144
145struct History {
146 base_text: Rope,
147 operations: TreeMap<clock::Lamport, Operation>,
148 undo_stack: Vec<HistoryEntry>,
149 redo_stack: Vec<HistoryEntry>,
150 transaction_depth: usize,
151 group_interval: Duration,
152}
153
154#[derive(Clone, Debug, Eq, PartialEq)]
155struct InsertionSlice {
156 edit_id: clock::Lamport,
157 insertion_id: clock::Lamport,
158 range: Range<usize>,
159}
160
161impl Ord for InsertionSlice {
162 fn cmp(&self, other: &Self) -> Ordering {
163 self.edit_id
164 .cmp(&other.edit_id)
165 .then_with(|| self.insertion_id.cmp(&other.insertion_id))
166 .then_with(|| self.range.start.cmp(&other.range.start))
167 .then_with(|| self.range.end.cmp(&other.range.end))
168 }
169}
170
171impl PartialOrd for InsertionSlice {
172 fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
173 Some(self.cmp(other))
174 }
175}
176
177impl InsertionSlice {
178 fn from_fragment(edit_id: clock::Lamport, fragment: &Fragment) -> Self {
179 Self {
180 edit_id,
181 insertion_id: fragment.timestamp,
182 range: fragment.insertion_offset..fragment.insertion_offset + fragment.len,
183 }
184 }
185}
186
187impl History {
188 pub fn new(base_text: Rope) -> Self {
189 Self {
190 base_text,
191 operations: Default::default(),
192 undo_stack: Vec::new(),
193 redo_stack: Vec::new(),
194 transaction_depth: 0,
195 // Don't group transactions in tests unless we opt in, because it's a footgun.
196 #[cfg(any(test, feature = "test-support"))]
197 group_interval: Duration::ZERO,
198 #[cfg(not(any(test, feature = "test-support")))]
199 group_interval: Duration::from_millis(300),
200 }
201 }
202
203 fn push(&mut self, op: Operation) {
204 self.operations.insert(op.timestamp(), op);
205 }
206
207 fn start_transaction(
208 &mut self,
209 start: clock::Global,
210 now: Instant,
211 clock: &mut clock::Lamport,
212 ) -> Option<TransactionId> {
213 self.transaction_depth += 1;
214 if self.transaction_depth == 1 {
215 let id = clock.tick();
216 self.undo_stack.push(HistoryEntry {
217 transaction: Transaction {
218 id,
219 start,
220 edit_ids: Default::default(),
221 },
222 first_edit_at: now,
223 last_edit_at: now,
224 suppress_grouping: false,
225 });
226 Some(id)
227 } else {
228 None
229 }
230 }
231
232 fn end_transaction(&mut self, now: Instant) -> Option<&HistoryEntry> {
233 assert_ne!(self.transaction_depth, 0);
234 self.transaction_depth -= 1;
235 if self.transaction_depth == 0 {
236 if self
237 .undo_stack
238 .last()
239 .unwrap()
240 .transaction
241 .edit_ids
242 .is_empty()
243 {
244 self.undo_stack.pop();
245 None
246 } else {
247 self.redo_stack.clear();
248 let entry = self.undo_stack.last_mut().unwrap();
249 entry.last_edit_at = now;
250 Some(entry)
251 }
252 } else {
253 None
254 }
255 }
256
257 fn group(&mut self) -> Option<TransactionId> {
258 let mut count = 0;
259 let mut entries = self.undo_stack.iter();
260 if let Some(mut entry) = entries.next_back() {
261 while let Some(prev_entry) = entries.next_back() {
262 if !prev_entry.suppress_grouping
263 && entry.first_edit_at - prev_entry.last_edit_at < self.group_interval
264 {
265 entry = prev_entry;
266 count += 1;
267 } else {
268 break;
269 }
270 }
271 }
272 self.group_trailing(count)
273 }
274
275 fn group_until(&mut self, transaction_id: TransactionId) {
276 let mut count = 0;
277 for entry in self.undo_stack.iter().rev() {
278 if entry.transaction_id() == transaction_id {
279 self.group_trailing(count);
280 break;
281 } else if entry.suppress_grouping {
282 break;
283 } else {
284 count += 1;
285 }
286 }
287 }
288
289 fn group_trailing(&mut self, n: usize) -> Option<TransactionId> {
290 let new_len = self.undo_stack.len() - n;
291 let (entries_to_keep, entries_to_merge) = self.undo_stack.split_at_mut(new_len);
292 if let Some(last_entry) = entries_to_keep.last_mut() {
293 for entry in &*entries_to_merge {
294 for edit_id in &entry.transaction.edit_ids {
295 last_entry.transaction.edit_ids.push(*edit_id);
296 }
297 }
298
299 if let Some(entry) = entries_to_merge.last_mut() {
300 last_entry.last_edit_at = entry.last_edit_at;
301 }
302 }
303
304 self.undo_stack.truncate(new_len);
305 self.undo_stack.last().map(|e| e.transaction.id)
306 }
307
308 fn finalize_last_transaction(&mut self) -> Option<&Transaction> {
309 self.undo_stack.last_mut().map(|entry| {
310 entry.suppress_grouping = true;
311 &entry.transaction
312 })
313 }
314
315 fn push_transaction(&mut self, transaction: Transaction, now: Instant) {
316 assert_eq!(self.transaction_depth, 0);
317 self.undo_stack.push(HistoryEntry {
318 transaction,
319 first_edit_at: now,
320 last_edit_at: now,
321 suppress_grouping: false,
322 });
323 }
324
325 /// Differs from `push_transaction` in that it does not clear the redo
326 /// stack. Intended to be used to create a parent transaction to merge
327 /// potential child transactions into.
328 ///
329 /// The caller is responsible for removing it from the undo history using
330 /// `forget_transaction` if no edits are merged into it. Otherwise, if edits
331 /// are merged into this transaction, the caller is responsible for ensuring
332 /// the redo stack is cleared. The easiest way to ensure the redo stack is
333 /// cleared is to create transactions with the usual `start_transaction` and
334 /// `end_transaction` methods and merging the resulting transactions into
335 /// the transaction created by this method
336 fn push_empty_transaction(
337 &mut self,
338 start: clock::Global,
339 now: Instant,
340 clock: &mut clock::Lamport,
341 ) -> TransactionId {
342 assert_eq!(self.transaction_depth, 0);
343 let id = clock.tick();
344 let transaction = Transaction {
345 id,
346 start,
347 edit_ids: Vec::new(),
348 };
349 self.undo_stack.push(HistoryEntry {
350 transaction,
351 first_edit_at: now,
352 last_edit_at: now,
353 suppress_grouping: false,
354 });
355 id
356 }
357
358 fn push_undo(&mut self, op_id: clock::Lamport) {
359 assert_ne!(self.transaction_depth, 0);
360 if let Some(Operation::Edit(_)) = self.operations.get(&op_id) {
361 let last_transaction = self.undo_stack.last_mut().unwrap();
362 last_transaction.transaction.edit_ids.push(op_id);
363 }
364 }
365
366 fn pop_undo(&mut self) -> Option<&HistoryEntry> {
367 assert_eq!(self.transaction_depth, 0);
368 if let Some(entry) = self.undo_stack.pop() {
369 self.redo_stack.push(entry);
370 self.redo_stack.last()
371 } else {
372 None
373 }
374 }
375
376 fn remove_from_undo(&mut self, transaction_id: TransactionId) -> Option<&HistoryEntry> {
377 assert_eq!(self.transaction_depth, 0);
378
379 let entry_ix = self
380 .undo_stack
381 .iter()
382 .rposition(|entry| entry.transaction.id == transaction_id)?;
383 let entry = self.undo_stack.remove(entry_ix);
384 self.redo_stack.push(entry);
385 self.redo_stack.last()
386 }
387
388 fn remove_from_undo_until(&mut self, transaction_id: TransactionId) -> &[HistoryEntry] {
389 assert_eq!(self.transaction_depth, 0);
390
391 let redo_stack_start_len = self.redo_stack.len();
392 if let Some(entry_ix) = self
393 .undo_stack
394 .iter()
395 .rposition(|entry| entry.transaction.id == transaction_id)
396 {
397 self.redo_stack
398 .extend(self.undo_stack.drain(entry_ix..).rev());
399 }
400 &self.redo_stack[redo_stack_start_len..]
401 }
402
403 fn forget(&mut self, transaction_id: TransactionId) -> Option<Transaction> {
404 assert_eq!(self.transaction_depth, 0);
405 if let Some(entry_ix) = self
406 .undo_stack
407 .iter()
408 .rposition(|entry| entry.transaction.id == transaction_id)
409 {
410 Some(self.undo_stack.remove(entry_ix).transaction)
411 } else if let Some(entry_ix) = self
412 .redo_stack
413 .iter()
414 .rposition(|entry| entry.transaction.id == transaction_id)
415 {
416 Some(self.redo_stack.remove(entry_ix).transaction)
417 } else {
418 None
419 }
420 }
421
422 fn transaction(&self, transaction_id: TransactionId) -> Option<&Transaction> {
423 let entry = self
424 .undo_stack
425 .iter()
426 .rfind(|entry| entry.transaction.id == transaction_id)
427 .or_else(|| {
428 self.redo_stack
429 .iter()
430 .rfind(|entry| entry.transaction.id == transaction_id)
431 })?;
432 Some(&entry.transaction)
433 }
434
435 fn transaction_mut(&mut self, transaction_id: TransactionId) -> Option<&mut Transaction> {
436 let entry = self
437 .undo_stack
438 .iter_mut()
439 .rfind(|entry| entry.transaction.id == transaction_id)
440 .or_else(|| {
441 self.redo_stack
442 .iter_mut()
443 .rfind(|entry| entry.transaction.id == transaction_id)
444 })?;
445 Some(&mut entry.transaction)
446 }
447
448 fn merge_transactions(&mut self, transaction: TransactionId, destination: TransactionId) {
449 if let Some(transaction) = self.forget(transaction)
450 && let Some(destination) = self.transaction_mut(destination)
451 {
452 destination.edit_ids.extend(transaction.edit_ids);
453 }
454 }
455
456 fn pop_redo(&mut self) -> Option<&HistoryEntry> {
457 assert_eq!(self.transaction_depth, 0);
458 if let Some(entry) = self.redo_stack.pop() {
459 self.undo_stack.push(entry);
460 self.undo_stack.last()
461 } else {
462 None
463 }
464 }
465
466 fn remove_from_redo(&mut self, transaction_id: TransactionId) -> &[HistoryEntry] {
467 assert_eq!(self.transaction_depth, 0);
468
469 let undo_stack_start_len = self.undo_stack.len();
470 if let Some(entry_ix) = self
471 .redo_stack
472 .iter()
473 .rposition(|entry| entry.transaction.id == transaction_id)
474 {
475 self.undo_stack
476 .extend(self.redo_stack.drain(entry_ix..).rev());
477 }
478 &self.undo_stack[undo_stack_start_len..]
479 }
480}
481
482struct Edits<'a, D: TextDimension, F: FnMut(&FragmentSummary) -> bool> {
483 visible_cursor: rope::Cursor<'a>,
484 deleted_cursor: rope::Cursor<'a>,
485 fragments_cursor: Option<FilterCursor<'a, 'static, F, Fragment, FragmentTextSummary>>,
486 undos: &'a UndoMap,
487 since: &'a clock::Global,
488 old_end: D,
489 new_end: D,
490 range: Range<(&'a Locator, usize)>,
491 buffer_id: BufferId,
492}
493
494#[derive(Clone, Debug, Default, Eq, PartialEq)]
495pub struct Edit<D> {
496 pub old: Range<D>,
497 pub new: Range<D>,
498}
499
500impl<D> Edit<D>
501where
502 D: Sub<D, Output = D> + PartialEq + Copy,
503{
504 pub fn old_len(&self) -> D {
505 self.old.end - self.old.start
506 }
507
508 pub fn new_len(&self) -> D {
509 self.new.end - self.new.start
510 }
511
512 pub fn is_empty(&self) -> bool {
513 self.old.start == self.old.end && self.new.start == self.new.end
514 }
515}
516
517impl<D1, D2> Edit<(D1, D2)> {
518 pub fn flatten(self) -> (Edit<D1>, Edit<D2>) {
519 (
520 Edit {
521 old: self.old.start.0..self.old.end.0,
522 new: self.new.start.0..self.new.end.0,
523 },
524 Edit {
525 old: self.old.start.1..self.old.end.1,
526 new: self.new.start.1..self.new.end.1,
527 },
528 )
529 }
530}
531
532#[derive(Eq, PartialEq, Clone, Debug)]
533pub struct Fragment {
534 pub id: Locator,
535 pub timestamp: clock::Lamport,
536 pub insertion_offset: usize,
537 pub len: usize,
538 pub visible: bool,
539 pub deletions: HashSet<clock::Lamport>,
540 pub max_undos: clock::Global,
541}
542
543#[derive(Eq, PartialEq, Clone, Debug)]
544pub struct FragmentSummary {
545 text: FragmentTextSummary,
546 max_id: Locator,
547 max_version: clock::Global,
548 min_insertion_version: clock::Global,
549 max_insertion_version: clock::Global,
550}
551
552#[derive(Copy, Default, Clone, Debug, PartialEq, Eq)]
553struct FragmentTextSummary {
554 visible: usize,
555 deleted: usize,
556}
557
558impl<'a> sum_tree::Dimension<'a, FragmentSummary> for FragmentTextSummary {
559 fn zero(_: &Option<clock::Global>) -> Self {
560 Default::default()
561 }
562
563 fn add_summary(&mut self, summary: &'a FragmentSummary, _: &Option<clock::Global>) {
564 self.visible += summary.text.visible;
565 self.deleted += summary.text.deleted;
566 }
567}
568
569#[derive(Eq, PartialEq, Clone, Debug)]
570struct InsertionFragment {
571 timestamp: clock::Lamport,
572 split_offset: usize,
573 fragment_id: Locator,
574}
575
576#[derive(Clone, Copy, Debug, Default, PartialEq, Eq, PartialOrd, Ord)]
577struct InsertionFragmentKey {
578 timestamp: clock::Lamport,
579 split_offset: usize,
580}
581
582#[derive(Clone, Debug, Eq, PartialEq)]
583pub enum Operation {
584 Edit(EditOperation),
585 Undo(UndoOperation),
586}
587
588#[derive(Clone, Debug, Eq, PartialEq)]
589pub struct EditOperation {
590 pub timestamp: clock::Lamport,
591 pub version: clock::Global,
592 pub ranges: Vec<Range<FullOffset>>,
593 pub new_text: Vec<Arc<str>>,
594}
595
596#[derive(Clone, Debug, Eq, PartialEq)]
597pub struct UndoOperation {
598 pub timestamp: clock::Lamport,
599 pub version: clock::Global,
600 pub counts: HashMap<clock::Lamport, u32>,
601}
602
603/// Stores information about the indentation of a line (tabs and spaces).
604#[derive(Clone, Copy, Debug, Eq, PartialEq)]
605pub struct LineIndent {
606 pub tabs: u32,
607 pub spaces: u32,
608 pub line_blank: bool,
609}
610
611impl LineIndent {
612 pub fn from_chunks(chunks: &mut Chunks) -> Self {
613 let mut tabs = 0;
614 let mut spaces = 0;
615 let mut line_blank = true;
616
617 'outer: while let Some(chunk) = chunks.peek() {
618 for ch in chunk.chars() {
619 if ch == '\t' {
620 tabs += 1;
621 } else if ch == ' ' {
622 spaces += 1;
623 } else {
624 if ch != '\n' {
625 line_blank = false;
626 }
627 break 'outer;
628 }
629 }
630
631 chunks.next();
632 }
633
634 Self {
635 tabs,
636 spaces,
637 line_blank,
638 }
639 }
640
641 /// Constructs a new `LineIndent` which only contains spaces.
642 pub fn spaces(spaces: u32) -> Self {
643 Self {
644 tabs: 0,
645 spaces,
646 line_blank: true,
647 }
648 }
649
650 /// Constructs a new `LineIndent` which only contains tabs.
651 pub fn tabs(tabs: u32) -> Self {
652 Self {
653 tabs,
654 spaces: 0,
655 line_blank: true,
656 }
657 }
658
659 /// Indicates whether the line is empty.
660 pub fn is_line_empty(&self) -> bool {
661 self.tabs == 0 && self.spaces == 0 && self.line_blank
662 }
663
664 /// Indicates whether the line is blank (contains only whitespace).
665 pub fn is_line_blank(&self) -> bool {
666 self.line_blank
667 }
668
669 /// Returns the number of indentation characters (tabs or spaces).
670 pub fn raw_len(&self) -> u32 {
671 self.tabs + self.spaces
672 }
673
674 /// Returns the number of indentation characters (tabs or spaces), taking tab size into account.
675 pub fn len(&self, tab_size: u32) -> u32 {
676 self.tabs * tab_size + self.spaces
677 }
678}
679
680impl From<&str> for LineIndent {
681 fn from(value: &str) -> Self {
682 Self::from_iter(value.chars())
683 }
684}
685
686impl FromIterator<char> for LineIndent {
687 fn from_iter<T: IntoIterator<Item = char>>(chars: T) -> Self {
688 let mut tabs = 0;
689 let mut spaces = 0;
690 let mut line_blank = true;
691 for c in chars {
692 if c == '\t' {
693 tabs += 1;
694 } else if c == ' ' {
695 spaces += 1;
696 } else {
697 if c != '\n' {
698 line_blank = false;
699 }
700 break;
701 }
702 }
703 Self {
704 tabs,
705 spaces,
706 line_blank,
707 }
708 }
709}
710
711impl Buffer {
712 pub fn new(replica_id: u16, remote_id: BufferId, base_text: impl Into<String>) -> Buffer {
713 let mut base_text = base_text.into();
714 let line_ending = LineEnding::detect(&base_text);
715 LineEnding::normalize(&mut base_text);
716 Self::new_normalized(replica_id, remote_id, line_ending, Rope::from(&*base_text))
717 }
718
719 pub fn new_normalized(
720 replica_id: u16,
721 remote_id: BufferId,
722 line_ending: LineEnding,
723 normalized: Rope,
724 ) -> Buffer {
725 let history = History::new(normalized);
726 let mut fragments = SumTree::new(&None);
727 let mut insertions = SumTree::default();
728
729 let mut lamport_clock = clock::Lamport::new(replica_id);
730 let mut version = clock::Global::new();
731
732 let visible_text = history.base_text.clone();
733 if !visible_text.is_empty() {
734 let insertion_timestamp = clock::Lamport {
735 replica_id: 0,
736 value: 1,
737 };
738 lamport_clock.observe(insertion_timestamp);
739 version.observe(insertion_timestamp);
740 let fragment_id = Locator::between(&Locator::min(), &Locator::max());
741 let fragment = Fragment {
742 id: fragment_id,
743 timestamp: insertion_timestamp,
744 insertion_offset: 0,
745 len: visible_text.len(),
746 visible: true,
747 deletions: Default::default(),
748 max_undos: Default::default(),
749 };
750 insertions.push(InsertionFragment::new(&fragment), ());
751 fragments.push(fragment, &None);
752 }
753
754 Buffer {
755 snapshot: BufferSnapshot {
756 replica_id,
757 remote_id,
758 visible_text,
759 deleted_text: Rope::new(),
760 line_ending,
761 fragments,
762 insertions,
763 version,
764 undo_map: Default::default(),
765 insertion_slices: Default::default(),
766 },
767 history,
768 deferred_ops: OperationQueue::new(),
769 deferred_replicas: HashSet::default(),
770 lamport_clock,
771 subscriptions: Default::default(),
772 edit_id_resolvers: Default::default(),
773 wait_for_version_txs: Default::default(),
774 }
775 }
776
777 pub fn version(&self) -> clock::Global {
778 self.version.clone()
779 }
780
781 pub fn snapshot(&self) -> BufferSnapshot {
782 self.snapshot.clone()
783 }
784
785 pub fn branch(&self) -> Self {
786 Self {
787 snapshot: self.snapshot.clone(),
788 history: History::new(self.base_text().clone()),
789 deferred_ops: OperationQueue::new(),
790 deferred_replicas: HashSet::default(),
791 lamport_clock: clock::Lamport::new(LOCAL_BRANCH_REPLICA_ID),
792 subscriptions: Default::default(),
793 edit_id_resolvers: Default::default(),
794 wait_for_version_txs: Default::default(),
795 }
796 }
797
798 pub fn replica_id(&self) -> ReplicaId {
799 self.lamport_clock.replica_id
800 }
801
802 pub fn remote_id(&self) -> BufferId {
803 self.remote_id
804 }
805
806 pub fn deferred_ops_len(&self) -> usize {
807 self.deferred_ops.len()
808 }
809
810 pub fn transaction_group_interval(&self) -> Duration {
811 self.history.group_interval
812 }
813
814 pub fn edit<R, I, S, T>(&mut self, edits: R) -> Operation
815 where
816 R: IntoIterator<IntoIter = I>,
817 I: ExactSizeIterator<Item = (Range<S>, T)>,
818 S: ToOffset,
819 T: Into<Arc<str>>,
820 {
821 let edits = edits
822 .into_iter()
823 .map(|(range, new_text)| (range, new_text.into()));
824
825 self.start_transaction();
826 let timestamp = self.lamport_clock.tick();
827 let operation = Operation::Edit(self.apply_local_edit(edits, timestamp));
828
829 self.history.push(operation.clone());
830 self.history.push_undo(operation.timestamp());
831 self.snapshot.version.observe(operation.timestamp());
832 self.end_transaction();
833 operation
834 }
835
836 fn apply_local_edit<S: ToOffset, T: Into<Arc<str>>>(
837 &mut self,
838 edits: impl ExactSizeIterator<Item = (Range<S>, T)>,
839 timestamp: clock::Lamport,
840 ) -> EditOperation {
841 let mut edits_patch = Patch::default();
842 let mut edit_op = EditOperation {
843 timestamp,
844 version: self.version(),
845 ranges: Vec::with_capacity(edits.len()),
846 new_text: Vec::with_capacity(edits.len()),
847 };
848 let mut new_insertions = Vec::new();
849 let mut insertion_offset = 0;
850 let mut insertion_slices = Vec::new();
851
852 let mut edits = edits
853 .map(|(range, new_text)| (range.to_offset(&*self), new_text))
854 .peekable();
855
856 let mut new_ropes =
857 RopeBuilder::new(self.visible_text.cursor(0), self.deleted_text.cursor(0));
858 let mut old_fragments = self.fragments.cursor::<FragmentTextSummary>(&None);
859 let mut new_fragments = old_fragments.slice(&edits.peek().unwrap().0.start, Bias::Right);
860 new_ropes.append(new_fragments.summary().text);
861
862 let mut fragment_start = old_fragments.start().visible;
863 for (range, new_text) in edits {
864 let new_text = LineEnding::normalize_arc(new_text.into());
865 let fragment_end = old_fragments.end().visible;
866
867 // If the current fragment ends before this range, then jump ahead to the first fragment
868 // that extends past the start of this range, reusing any intervening fragments.
869 if fragment_end < range.start {
870 // If the current fragment has been partially consumed, then consume the rest of it
871 // and advance to the next fragment before slicing.
872 if fragment_start > old_fragments.start().visible {
873 if fragment_end > fragment_start {
874 let mut suffix = old_fragments.item().unwrap().clone();
875 suffix.len = fragment_end - fragment_start;
876 suffix.insertion_offset += fragment_start - old_fragments.start().visible;
877 new_insertions.push(InsertionFragment::insert_new(&suffix));
878 new_ropes.push_fragment(&suffix, suffix.visible);
879 new_fragments.push(suffix, &None);
880 }
881 old_fragments.next();
882 }
883
884 let slice = old_fragments.slice(&range.start, Bias::Right);
885 new_ropes.append(slice.summary().text);
886 new_fragments.append(slice, &None);
887 fragment_start = old_fragments.start().visible;
888 }
889
890 let full_range_start = FullOffset(range.start + old_fragments.start().deleted);
891
892 // Preserve any portion of the current fragment that precedes this range.
893 if fragment_start < range.start {
894 let mut prefix = old_fragments.item().unwrap().clone();
895 prefix.len = range.start - fragment_start;
896 prefix.insertion_offset += fragment_start - old_fragments.start().visible;
897 prefix.id = Locator::between(&new_fragments.summary().max_id, &prefix.id);
898 new_insertions.push(InsertionFragment::insert_new(&prefix));
899 new_ropes.push_fragment(&prefix, prefix.visible);
900 new_fragments.push(prefix, &None);
901 fragment_start = range.start;
902 }
903
904 // Insert the new text before any existing fragments within the range.
905 if !new_text.is_empty() {
906 let new_start = new_fragments.summary().text.visible;
907
908 let fragment = Fragment {
909 id: Locator::between(
910 &new_fragments.summary().max_id,
911 old_fragments
912 .item()
913 .map_or(&Locator::max(), |old_fragment| &old_fragment.id),
914 ),
915 timestamp,
916 insertion_offset,
917 len: new_text.len(),
918 deletions: Default::default(),
919 max_undos: Default::default(),
920 visible: true,
921 };
922 edits_patch.push(Edit {
923 old: fragment_start..fragment_start,
924 new: new_start..new_start + new_text.len(),
925 });
926 insertion_slices.push(InsertionSlice::from_fragment(timestamp, &fragment));
927 new_insertions.push(InsertionFragment::insert_new(&fragment));
928 new_ropes.push_str(new_text.as_ref());
929 new_fragments.push(fragment, &None);
930 insertion_offset += new_text.len();
931 }
932
933 // Advance through every fragment that intersects this range, marking the intersecting
934 // portions as deleted.
935 while fragment_start < range.end {
936 let fragment = old_fragments.item().unwrap();
937 let fragment_end = old_fragments.end().visible;
938 let mut intersection = fragment.clone();
939 let intersection_end = cmp::min(range.end, fragment_end);
940 if fragment.visible {
941 intersection.len = intersection_end - fragment_start;
942 intersection.insertion_offset += fragment_start - old_fragments.start().visible;
943 intersection.id =
944 Locator::between(&new_fragments.summary().max_id, &intersection.id);
945 intersection.deletions.insert(timestamp);
946 intersection.visible = false;
947 }
948 if intersection.len > 0 {
949 if fragment.visible && !intersection.visible {
950 let new_start = new_fragments.summary().text.visible;
951 edits_patch.push(Edit {
952 old: fragment_start..intersection_end,
953 new: new_start..new_start,
954 });
955 insertion_slices
956 .push(InsertionSlice::from_fragment(timestamp, &intersection));
957 }
958 new_insertions.push(InsertionFragment::insert_new(&intersection));
959 new_ropes.push_fragment(&intersection, fragment.visible);
960 new_fragments.push(intersection, &None);
961 fragment_start = intersection_end;
962 }
963 if fragment_end <= range.end {
964 old_fragments.next();
965 }
966 }
967
968 let full_range_end = FullOffset(range.end + old_fragments.start().deleted);
969 edit_op.ranges.push(full_range_start..full_range_end);
970 edit_op.new_text.push(new_text);
971 }
972
973 // If the current fragment has been partially consumed, then consume the rest of it
974 // and advance to the next fragment before slicing.
975 if fragment_start > old_fragments.start().visible {
976 let fragment_end = old_fragments.end().visible;
977 if fragment_end > fragment_start {
978 let mut suffix = old_fragments.item().unwrap().clone();
979 suffix.len = fragment_end - fragment_start;
980 suffix.insertion_offset += fragment_start - old_fragments.start().visible;
981 new_insertions.push(InsertionFragment::insert_new(&suffix));
982 new_ropes.push_fragment(&suffix, suffix.visible);
983 new_fragments.push(suffix, &None);
984 }
985 old_fragments.next();
986 }
987
988 let suffix = old_fragments.suffix();
989 new_ropes.append(suffix.summary().text);
990 new_fragments.append(suffix, &None);
991 let (visible_text, deleted_text) = new_ropes.finish();
992 drop(old_fragments);
993
994 self.snapshot.fragments = new_fragments;
995 self.snapshot.insertions.edit(new_insertions, ());
996 self.snapshot.visible_text = visible_text;
997 self.snapshot.deleted_text = deleted_text;
998 self.subscriptions.publish_mut(&edits_patch);
999 self.snapshot.insertion_slices.extend(insertion_slices);
1000 edit_op
1001 }
1002
1003 pub fn set_line_ending(&mut self, line_ending: LineEnding) {
1004 self.snapshot.line_ending = line_ending;
1005 }
1006
1007 pub fn apply_ops<I: IntoIterator<Item = Operation>>(&mut self, ops: I) {
1008 let mut deferred_ops = Vec::new();
1009 for op in ops {
1010 self.history.push(op.clone());
1011 if self.can_apply_op(&op) {
1012 self.apply_op(op);
1013 } else {
1014 self.deferred_replicas.insert(op.replica_id());
1015 deferred_ops.push(op);
1016 }
1017 }
1018 self.deferred_ops.insert(deferred_ops);
1019 self.flush_deferred_ops();
1020 }
1021
1022 fn apply_op(&mut self, op: Operation) {
1023 match op {
1024 Operation::Edit(edit) => {
1025 if !self.version.observed(edit.timestamp) {
1026 self.apply_remote_edit(
1027 &edit.version,
1028 &edit.ranges,
1029 &edit.new_text,
1030 edit.timestamp,
1031 );
1032 self.snapshot.version.observe(edit.timestamp);
1033 self.lamport_clock.observe(edit.timestamp);
1034 self.resolve_edit(edit.timestamp);
1035 }
1036 }
1037 Operation::Undo(undo) => {
1038 if !self.version.observed(undo.timestamp) {
1039 self.apply_undo(&undo);
1040 self.snapshot.version.observe(undo.timestamp);
1041 self.lamport_clock.observe(undo.timestamp);
1042 }
1043 }
1044 }
1045 self.wait_for_version_txs.retain_mut(|(version, tx)| {
1046 if self.snapshot.version().observed_all(version) {
1047 tx.try_send(()).ok();
1048 false
1049 } else {
1050 true
1051 }
1052 });
1053 }
1054
1055 fn apply_remote_edit(
1056 &mut self,
1057 version: &clock::Global,
1058 ranges: &[Range<FullOffset>],
1059 new_text: &[Arc<str>],
1060 timestamp: clock::Lamport,
1061 ) {
1062 if ranges.is_empty() {
1063 return;
1064 }
1065
1066 let edits = ranges.iter().zip(new_text.iter());
1067 let mut edits_patch = Patch::default();
1068 let mut insertion_slices = Vec::new();
1069 let cx = Some(version.clone());
1070 let mut new_insertions = Vec::new();
1071 let mut insertion_offset = 0;
1072 let mut new_ropes =
1073 RopeBuilder::new(self.visible_text.cursor(0), self.deleted_text.cursor(0));
1074 let mut old_fragments = self
1075 .fragments
1076 .cursor::<Dimensions<VersionedFullOffset, usize>>(&cx);
1077 let mut new_fragments =
1078 old_fragments.slice(&VersionedFullOffset::Offset(ranges[0].start), Bias::Left);
1079 new_ropes.append(new_fragments.summary().text);
1080
1081 let mut fragment_start = old_fragments.start().0.full_offset();
1082 for (range, new_text) in edits {
1083 let fragment_end = old_fragments.end().0.full_offset();
1084
1085 // If the current fragment ends before this range, then jump ahead to the first fragment
1086 // that extends past the start of this range, reusing any intervening fragments.
1087 if fragment_end < range.start {
1088 // If the current fragment has been partially consumed, then consume the rest of it
1089 // and advance to the next fragment before slicing.
1090 if fragment_start > old_fragments.start().0.full_offset() {
1091 if fragment_end > fragment_start {
1092 let mut suffix = old_fragments.item().unwrap().clone();
1093 suffix.len = fragment_end.0 - fragment_start.0;
1094 suffix.insertion_offset +=
1095 fragment_start - old_fragments.start().0.full_offset();
1096 new_insertions.push(InsertionFragment::insert_new(&suffix));
1097 new_ropes.push_fragment(&suffix, suffix.visible);
1098 new_fragments.push(suffix, &None);
1099 }
1100 old_fragments.next();
1101 }
1102
1103 let slice =
1104 old_fragments.slice(&VersionedFullOffset::Offset(range.start), Bias::Left);
1105 new_ropes.append(slice.summary().text);
1106 new_fragments.append(slice, &None);
1107 fragment_start = old_fragments.start().0.full_offset();
1108 }
1109
1110 // If we are at the end of a non-concurrent fragment, advance to the next one.
1111 let fragment_end = old_fragments.end().0.full_offset();
1112 if fragment_end == range.start && fragment_end > fragment_start {
1113 let mut fragment = old_fragments.item().unwrap().clone();
1114 fragment.len = fragment_end.0 - fragment_start.0;
1115 fragment.insertion_offset += fragment_start - old_fragments.start().0.full_offset();
1116 new_insertions.push(InsertionFragment::insert_new(&fragment));
1117 new_ropes.push_fragment(&fragment, fragment.visible);
1118 new_fragments.push(fragment, &None);
1119 old_fragments.next();
1120 fragment_start = old_fragments.start().0.full_offset();
1121 }
1122
1123 // Skip over insertions that are concurrent to this edit, but have a lower lamport
1124 // timestamp.
1125 while let Some(fragment) = old_fragments.item() {
1126 if fragment_start == range.start && fragment.timestamp > timestamp {
1127 new_ropes.push_fragment(fragment, fragment.visible);
1128 new_fragments.push(fragment.clone(), &None);
1129 old_fragments.next();
1130 debug_assert_eq!(fragment_start, range.start);
1131 } else {
1132 break;
1133 }
1134 }
1135 debug_assert!(fragment_start <= range.start);
1136
1137 // Preserve any portion of the current fragment that precedes this range.
1138 if fragment_start < range.start {
1139 let mut prefix = old_fragments.item().unwrap().clone();
1140 prefix.len = range.start.0 - fragment_start.0;
1141 prefix.insertion_offset += fragment_start - old_fragments.start().0.full_offset();
1142 prefix.id = Locator::between(&new_fragments.summary().max_id, &prefix.id);
1143 new_insertions.push(InsertionFragment::insert_new(&prefix));
1144 fragment_start = range.start;
1145 new_ropes.push_fragment(&prefix, prefix.visible);
1146 new_fragments.push(prefix, &None);
1147 }
1148
1149 // Insert the new text before any existing fragments within the range.
1150 if !new_text.is_empty() {
1151 let mut old_start = old_fragments.start().1;
1152 if old_fragments.item().is_some_and(|f| f.visible) {
1153 old_start += fragment_start.0 - old_fragments.start().0.full_offset().0;
1154 }
1155 let new_start = new_fragments.summary().text.visible;
1156 let fragment = Fragment {
1157 id: Locator::between(
1158 &new_fragments.summary().max_id,
1159 old_fragments
1160 .item()
1161 .map_or(&Locator::max(), |old_fragment| &old_fragment.id),
1162 ),
1163 timestamp,
1164 insertion_offset,
1165 len: new_text.len(),
1166 deletions: Default::default(),
1167 max_undos: Default::default(),
1168 visible: true,
1169 };
1170 edits_patch.push(Edit {
1171 old: old_start..old_start,
1172 new: new_start..new_start + new_text.len(),
1173 });
1174 insertion_slices.push(InsertionSlice::from_fragment(timestamp, &fragment));
1175 new_insertions.push(InsertionFragment::insert_new(&fragment));
1176 new_ropes.push_str(new_text);
1177 new_fragments.push(fragment, &None);
1178 insertion_offset += new_text.len();
1179 }
1180
1181 // Advance through every fragment that intersects this range, marking the intersecting
1182 // portions as deleted.
1183 while fragment_start < range.end {
1184 let fragment = old_fragments.item().unwrap();
1185 let fragment_end = old_fragments.end().0.full_offset();
1186 let mut intersection = fragment.clone();
1187 let intersection_end = cmp::min(range.end, fragment_end);
1188 if fragment.was_visible(version, &self.undo_map) {
1189 intersection.len = intersection_end.0 - fragment_start.0;
1190 intersection.insertion_offset +=
1191 fragment_start - old_fragments.start().0.full_offset();
1192 intersection.id =
1193 Locator::between(&new_fragments.summary().max_id, &intersection.id);
1194 intersection.deletions.insert(timestamp);
1195 intersection.visible = false;
1196 insertion_slices.push(InsertionSlice::from_fragment(timestamp, &intersection));
1197 }
1198 if intersection.len > 0 {
1199 if fragment.visible && !intersection.visible {
1200 let old_start = old_fragments.start().1
1201 + (fragment_start.0 - old_fragments.start().0.full_offset().0);
1202 let new_start = new_fragments.summary().text.visible;
1203 edits_patch.push(Edit {
1204 old: old_start..old_start + intersection.len,
1205 new: new_start..new_start,
1206 });
1207 }
1208 new_insertions.push(InsertionFragment::insert_new(&intersection));
1209 new_ropes.push_fragment(&intersection, fragment.visible);
1210 new_fragments.push(intersection, &None);
1211 fragment_start = intersection_end;
1212 }
1213 if fragment_end <= range.end {
1214 old_fragments.next();
1215 }
1216 }
1217 }
1218
1219 // If the current fragment has been partially consumed, then consume the rest of it
1220 // and advance to the next fragment before slicing.
1221 if fragment_start > old_fragments.start().0.full_offset() {
1222 let fragment_end = old_fragments.end().0.full_offset();
1223 if fragment_end > fragment_start {
1224 let mut suffix = old_fragments.item().unwrap().clone();
1225 suffix.len = fragment_end.0 - fragment_start.0;
1226 suffix.insertion_offset += fragment_start - old_fragments.start().0.full_offset();
1227 new_insertions.push(InsertionFragment::insert_new(&suffix));
1228 new_ropes.push_fragment(&suffix, suffix.visible);
1229 new_fragments.push(suffix, &None);
1230 }
1231 old_fragments.next();
1232 }
1233
1234 let suffix = old_fragments.suffix();
1235 new_ropes.append(suffix.summary().text);
1236 new_fragments.append(suffix, &None);
1237 let (visible_text, deleted_text) = new_ropes.finish();
1238 drop(old_fragments);
1239
1240 self.snapshot.fragments = new_fragments;
1241 self.snapshot.visible_text = visible_text;
1242 self.snapshot.deleted_text = deleted_text;
1243 self.snapshot.insertions.edit(new_insertions, ());
1244 self.snapshot.insertion_slices.extend(insertion_slices);
1245 self.subscriptions.publish_mut(&edits_patch)
1246 }
1247
1248 fn fragment_ids_for_edits<'a>(
1249 &'a self,
1250 edit_ids: impl Iterator<Item = &'a clock::Lamport>,
1251 ) -> Vec<&'a Locator> {
1252 // Get all of the insertion slices changed by the given edits.
1253 let mut insertion_slices = Vec::new();
1254 for edit_id in edit_ids {
1255 let insertion_slice = InsertionSlice {
1256 edit_id: *edit_id,
1257 insertion_id: clock::Lamport::default(),
1258 range: 0..0,
1259 };
1260 let slices = self
1261 .snapshot
1262 .insertion_slices
1263 .iter_from(&insertion_slice)
1264 .take_while(|slice| slice.edit_id == *edit_id);
1265 insertion_slices.extend(slices)
1266 }
1267 insertion_slices
1268 .sort_unstable_by_key(|s| (s.insertion_id, s.range.start, Reverse(s.range.end)));
1269
1270 // Get all of the fragments corresponding to these insertion slices.
1271 let mut fragment_ids = Vec::new();
1272 let mut insertions_cursor = self.insertions.cursor::<InsertionFragmentKey>(());
1273 for insertion_slice in &insertion_slices {
1274 if insertion_slice.insertion_id != insertions_cursor.start().timestamp
1275 || insertion_slice.range.start > insertions_cursor.start().split_offset
1276 {
1277 insertions_cursor.seek_forward(
1278 &InsertionFragmentKey {
1279 timestamp: insertion_slice.insertion_id,
1280 split_offset: insertion_slice.range.start,
1281 },
1282 Bias::Left,
1283 );
1284 }
1285 while let Some(item) = insertions_cursor.item() {
1286 if item.timestamp != insertion_slice.insertion_id
1287 || item.split_offset >= insertion_slice.range.end
1288 {
1289 break;
1290 }
1291 fragment_ids.push(&item.fragment_id);
1292 insertions_cursor.next();
1293 }
1294 }
1295 fragment_ids.sort_unstable();
1296 fragment_ids
1297 }
1298
1299 fn apply_undo(&mut self, undo: &UndoOperation) {
1300 self.snapshot.undo_map.insert(undo);
1301
1302 let mut edits = Patch::default();
1303 let mut old_fragments = self
1304 .fragments
1305 .cursor::<Dimensions<Option<&Locator>, usize>>(&None);
1306 let mut new_fragments = SumTree::new(&None);
1307 let mut new_ropes =
1308 RopeBuilder::new(self.visible_text.cursor(0), self.deleted_text.cursor(0));
1309
1310 for fragment_id in self.fragment_ids_for_edits(undo.counts.keys()) {
1311 let preceding_fragments = old_fragments.slice(&Some(fragment_id), Bias::Left);
1312 new_ropes.append(preceding_fragments.summary().text);
1313 new_fragments.append(preceding_fragments, &None);
1314
1315 if let Some(fragment) = old_fragments.item() {
1316 let mut fragment = fragment.clone();
1317 let fragment_was_visible = fragment.visible;
1318
1319 fragment.visible = fragment.is_visible(&self.undo_map);
1320 fragment.max_undos.observe(undo.timestamp);
1321
1322 let old_start = old_fragments.start().1;
1323 let new_start = new_fragments.summary().text.visible;
1324 if fragment_was_visible && !fragment.visible {
1325 edits.push(Edit {
1326 old: old_start..old_start + fragment.len,
1327 new: new_start..new_start,
1328 });
1329 } else if !fragment_was_visible && fragment.visible {
1330 edits.push(Edit {
1331 old: old_start..old_start,
1332 new: new_start..new_start + fragment.len,
1333 });
1334 }
1335 new_ropes.push_fragment(&fragment, fragment_was_visible);
1336 new_fragments.push(fragment, &None);
1337
1338 old_fragments.next();
1339 }
1340 }
1341
1342 let suffix = old_fragments.suffix();
1343 new_ropes.append(suffix.summary().text);
1344 new_fragments.append(suffix, &None);
1345
1346 drop(old_fragments);
1347 let (visible_text, deleted_text) = new_ropes.finish();
1348 self.snapshot.fragments = new_fragments;
1349 self.snapshot.visible_text = visible_text;
1350 self.snapshot.deleted_text = deleted_text;
1351 self.subscriptions.publish_mut(&edits);
1352 }
1353
1354 fn flush_deferred_ops(&mut self) {
1355 self.deferred_replicas.clear();
1356 let mut deferred_ops = Vec::new();
1357 for op in self.deferred_ops.drain().iter().cloned() {
1358 if self.can_apply_op(&op) {
1359 self.apply_op(op);
1360 } else {
1361 self.deferred_replicas.insert(op.replica_id());
1362 deferred_ops.push(op);
1363 }
1364 }
1365 self.deferred_ops.insert(deferred_ops);
1366 }
1367
1368 fn can_apply_op(&self, op: &Operation) -> bool {
1369 if self.deferred_replicas.contains(&op.replica_id()) {
1370 false
1371 } else {
1372 self.version.observed_all(match op {
1373 Operation::Edit(edit) => &edit.version,
1374 Operation::Undo(undo) => &undo.version,
1375 })
1376 }
1377 }
1378
1379 pub fn has_deferred_ops(&self) -> bool {
1380 !self.deferred_ops.is_empty()
1381 }
1382
1383 pub fn peek_undo_stack(&self) -> Option<&HistoryEntry> {
1384 self.history.undo_stack.last()
1385 }
1386
1387 pub fn peek_redo_stack(&self) -> Option<&HistoryEntry> {
1388 self.history.redo_stack.last()
1389 }
1390
1391 pub fn start_transaction(&mut self) -> Option<TransactionId> {
1392 self.start_transaction_at(Instant::now())
1393 }
1394
1395 pub fn start_transaction_at(&mut self, now: Instant) -> Option<TransactionId> {
1396 self.history
1397 .start_transaction(self.version.clone(), now, &mut self.lamport_clock)
1398 }
1399
1400 pub fn end_transaction(&mut self) -> Option<(TransactionId, clock::Global)> {
1401 self.end_transaction_at(Instant::now())
1402 }
1403
1404 pub fn end_transaction_at(&mut self, now: Instant) -> Option<(TransactionId, clock::Global)> {
1405 if let Some(entry) = self.history.end_transaction(now) {
1406 let since = entry.transaction.start.clone();
1407 let id = self.history.group().unwrap();
1408 Some((id, since))
1409 } else {
1410 None
1411 }
1412 }
1413
1414 pub fn finalize_last_transaction(&mut self) -> Option<&Transaction> {
1415 self.history.finalize_last_transaction()
1416 }
1417
1418 pub fn group_until_transaction(&mut self, transaction_id: TransactionId) {
1419 self.history.group_until(transaction_id);
1420 }
1421
1422 pub fn base_text(&self) -> &Rope {
1423 &self.history.base_text
1424 }
1425
1426 pub fn operations(&self) -> &TreeMap<clock::Lamport, Operation> {
1427 &self.history.operations
1428 }
1429
1430 pub fn undo(&mut self) -> Option<(TransactionId, Operation)> {
1431 if let Some(entry) = self.history.pop_undo() {
1432 let transaction = entry.transaction.clone();
1433 let transaction_id = transaction.id;
1434 let op = self.undo_or_redo(transaction);
1435 Some((transaction_id, op))
1436 } else {
1437 None
1438 }
1439 }
1440
1441 pub fn undo_transaction(&mut self, transaction_id: TransactionId) -> Option<Operation> {
1442 let transaction = self
1443 .history
1444 .remove_from_undo(transaction_id)?
1445 .transaction
1446 .clone();
1447 Some(self.undo_or_redo(transaction))
1448 }
1449
1450 pub fn undo_to_transaction(&mut self, transaction_id: TransactionId) -> Vec<Operation> {
1451 let transactions = self
1452 .history
1453 .remove_from_undo_until(transaction_id)
1454 .iter()
1455 .map(|entry| entry.transaction.clone())
1456 .collect::<Vec<_>>();
1457
1458 transactions
1459 .into_iter()
1460 .map(|transaction| self.undo_or_redo(transaction))
1461 .collect()
1462 }
1463
1464 pub fn forget_transaction(&mut self, transaction_id: TransactionId) -> Option<Transaction> {
1465 self.history.forget(transaction_id)
1466 }
1467
1468 pub fn get_transaction(&self, transaction_id: TransactionId) -> Option<&Transaction> {
1469 self.history.transaction(transaction_id)
1470 }
1471
1472 pub fn merge_transactions(&mut self, transaction: TransactionId, destination: TransactionId) {
1473 self.history.merge_transactions(transaction, destination);
1474 }
1475
1476 pub fn redo(&mut self) -> Option<(TransactionId, Operation)> {
1477 if let Some(entry) = self.history.pop_redo() {
1478 let transaction = entry.transaction.clone();
1479 let transaction_id = transaction.id;
1480 let op = self.undo_or_redo(transaction);
1481 Some((transaction_id, op))
1482 } else {
1483 None
1484 }
1485 }
1486
1487 pub fn redo_to_transaction(&mut self, transaction_id: TransactionId) -> Vec<Operation> {
1488 let transactions = self
1489 .history
1490 .remove_from_redo(transaction_id)
1491 .iter()
1492 .map(|entry| entry.transaction.clone())
1493 .collect::<Vec<_>>();
1494
1495 transactions
1496 .into_iter()
1497 .map(|transaction| self.undo_or_redo(transaction))
1498 .collect()
1499 }
1500
1501 fn undo_or_redo(&mut self, transaction: Transaction) -> Operation {
1502 let mut counts = HashMap::default();
1503 for edit_id in transaction.edit_ids {
1504 counts.insert(edit_id, self.undo_map.undo_count(edit_id).saturating_add(1));
1505 }
1506
1507 let operation = self.undo_operations(counts);
1508 self.history.push(operation.clone());
1509 operation
1510 }
1511
1512 pub fn undo_operations(&mut self, counts: HashMap<clock::Lamport, u32>) -> Operation {
1513 let timestamp = self.lamport_clock.tick();
1514 let version = self.version();
1515 self.snapshot.version.observe(timestamp);
1516 let undo = UndoOperation {
1517 timestamp,
1518 version,
1519 counts,
1520 };
1521 self.apply_undo(&undo);
1522 Operation::Undo(undo)
1523 }
1524
1525 pub fn push_transaction(&mut self, transaction: Transaction, now: Instant) {
1526 self.history.push_transaction(transaction, now);
1527 }
1528
1529 /// Differs from `push_transaction` in that it does not clear the redo stack.
1530 /// The caller responsible for
1531 /// Differs from `push_transaction` in that it does not clear the redo
1532 /// stack. Intended to be used to create a parent transaction to merge
1533 /// potential child transactions into.
1534 ///
1535 /// The caller is responsible for removing it from the undo history using
1536 /// `forget_transaction` if no edits are merged into it. Otherwise, if edits
1537 /// are merged into this transaction, the caller is responsible for ensuring
1538 /// the redo stack is cleared. The easiest way to ensure the redo stack is
1539 /// cleared is to create transactions with the usual `start_transaction` and
1540 /// `end_transaction` methods and merging the resulting transactions into
1541 /// the transaction created by this method
1542 pub fn push_empty_transaction(&mut self, now: Instant) -> TransactionId {
1543 self.history
1544 .push_empty_transaction(self.version.clone(), now, &mut self.lamport_clock)
1545 }
1546
1547 pub fn edited_ranges_for_transaction_id<D>(
1548 &self,
1549 transaction_id: TransactionId,
1550 ) -> impl '_ + Iterator<Item = Range<D>>
1551 where
1552 D: TextDimension,
1553 {
1554 self.history
1555 .transaction(transaction_id)
1556 .into_iter()
1557 .flat_map(|transaction| self.edited_ranges_for_transaction(transaction))
1558 }
1559
1560 pub fn edited_ranges_for_edit_ids<'a, D>(
1561 &'a self,
1562 edit_ids: impl IntoIterator<Item = &'a clock::Lamport>,
1563 ) -> impl 'a + Iterator<Item = Range<D>>
1564 where
1565 D: TextDimension,
1566 {
1567 // get fragment ranges
1568 let mut cursor = self
1569 .fragments
1570 .cursor::<Dimensions<Option<&Locator>, usize>>(&None);
1571 let offset_ranges = self
1572 .fragment_ids_for_edits(edit_ids.into_iter())
1573 .into_iter()
1574 .filter_map(move |fragment_id| {
1575 cursor.seek_forward(&Some(fragment_id), Bias::Left);
1576 let fragment = cursor.item()?;
1577 let start_offset = cursor.start().1;
1578 let end_offset = start_offset + if fragment.visible { fragment.len } else { 0 };
1579 Some(start_offset..end_offset)
1580 });
1581
1582 // combine adjacent ranges
1583 let mut prev_range: Option<Range<usize>> = None;
1584 let disjoint_ranges = offset_ranges
1585 .map(Some)
1586 .chain([None])
1587 .filter_map(move |range| {
1588 if let Some((range, prev_range)) = range.as_ref().zip(prev_range.as_mut())
1589 && prev_range.end == range.start
1590 {
1591 prev_range.end = range.end;
1592 return None;
1593 }
1594 let result = prev_range.clone();
1595 prev_range = range;
1596 result
1597 });
1598
1599 // convert to the desired text dimension.
1600 let mut position = D::zero(());
1601 let mut rope_cursor = self.visible_text.cursor(0);
1602 disjoint_ranges.map(move |range| {
1603 position.add_assign(&rope_cursor.summary(range.start));
1604 let start = position;
1605 position.add_assign(&rope_cursor.summary(range.end));
1606 let end = position;
1607 start..end
1608 })
1609 }
1610
1611 pub fn edited_ranges_for_transaction<'a, D>(
1612 &'a self,
1613 transaction: &'a Transaction,
1614 ) -> impl 'a + Iterator<Item = Range<D>>
1615 where
1616 D: TextDimension,
1617 {
1618 self.edited_ranges_for_edit_ids(&transaction.edit_ids)
1619 }
1620
1621 pub fn subscribe(&mut self) -> Subscription {
1622 self.subscriptions.subscribe()
1623 }
1624
1625 pub fn wait_for_edits<It: IntoIterator<Item = clock::Lamport>>(
1626 &mut self,
1627 edit_ids: It,
1628 ) -> impl 'static + Future<Output = Result<()>> + use<It> {
1629 let mut futures = Vec::new();
1630 for edit_id in edit_ids {
1631 if !self.version.observed(edit_id) {
1632 let (tx, rx) = oneshot::channel();
1633 self.edit_id_resolvers.entry(edit_id).or_default().push(tx);
1634 futures.push(rx);
1635 }
1636 }
1637
1638 async move {
1639 for mut future in futures {
1640 if future.recv().await.is_none() {
1641 anyhow::bail!("gave up waiting for edits");
1642 }
1643 }
1644 Ok(())
1645 }
1646 }
1647
1648 pub fn wait_for_anchors<It: IntoIterator<Item = Anchor>>(
1649 &mut self,
1650 anchors: It,
1651 ) -> impl 'static + Future<Output = Result<()>> + use<It> {
1652 let mut futures = Vec::new();
1653 for anchor in anchors {
1654 if !self.version.observed(anchor.timestamp)
1655 && anchor != Anchor::MAX
1656 && anchor != Anchor::MIN
1657 {
1658 let (tx, rx) = oneshot::channel();
1659 self.edit_id_resolvers
1660 .entry(anchor.timestamp)
1661 .or_default()
1662 .push(tx);
1663 futures.push(rx);
1664 }
1665 }
1666
1667 async move {
1668 for mut future in futures {
1669 if future.recv().await.is_none() {
1670 anyhow::bail!("gave up waiting for anchors");
1671 }
1672 }
1673 Ok(())
1674 }
1675 }
1676
1677 pub fn wait_for_version(
1678 &mut self,
1679 version: clock::Global,
1680 ) -> impl Future<Output = Result<()>> + use<> {
1681 let mut rx = None;
1682 if !self.snapshot.version.observed_all(&version) {
1683 let channel = oneshot::channel();
1684 self.wait_for_version_txs.push((version, channel.0));
1685 rx = Some(channel.1);
1686 }
1687 async move {
1688 if let Some(mut rx) = rx
1689 && rx.recv().await.is_none()
1690 {
1691 anyhow::bail!("gave up waiting for version");
1692 }
1693 Ok(())
1694 }
1695 }
1696
1697 pub fn give_up_waiting(&mut self) {
1698 self.edit_id_resolvers.clear();
1699 self.wait_for_version_txs.clear();
1700 }
1701
1702 fn resolve_edit(&mut self, edit_id: clock::Lamport) {
1703 for mut tx in self
1704 .edit_id_resolvers
1705 .remove(&edit_id)
1706 .into_iter()
1707 .flatten()
1708 {
1709 tx.try_send(()).ok();
1710 }
1711 }
1712}
1713
1714#[cfg(any(test, feature = "test-support"))]
1715impl Buffer {
1716 #[track_caller]
1717 pub fn edit_via_marked_text(&mut self, marked_string: &str) {
1718 let edits = self.edits_for_marked_text(marked_string);
1719 self.edit(edits);
1720 }
1721
1722 #[track_caller]
1723 pub fn edits_for_marked_text(&self, marked_string: &str) -> Vec<(Range<usize>, String)> {
1724 let old_text = self.text();
1725 let (new_text, mut ranges) = util::test::marked_text_ranges(marked_string, false);
1726 if ranges.is_empty() {
1727 ranges.push(0..new_text.len());
1728 }
1729
1730 assert_eq!(
1731 old_text[..ranges[0].start],
1732 new_text[..ranges[0].start],
1733 "invalid edit"
1734 );
1735
1736 let mut delta = 0;
1737 let mut edits = Vec::new();
1738 let mut ranges = ranges.into_iter().peekable();
1739
1740 while let Some(inserted_range) = ranges.next() {
1741 let new_start = inserted_range.start;
1742 let old_start = (new_start as isize - delta) as usize;
1743
1744 let following_text = if let Some(next_range) = ranges.peek() {
1745 &new_text[inserted_range.end..next_range.start]
1746 } else {
1747 &new_text[inserted_range.end..]
1748 };
1749
1750 let inserted_len = inserted_range.len();
1751 let deleted_len = old_text[old_start..]
1752 .find(following_text)
1753 .expect("invalid edit");
1754
1755 let old_range = old_start..old_start + deleted_len;
1756 edits.push((old_range, new_text[inserted_range].to_string()));
1757 delta += inserted_len as isize - deleted_len as isize;
1758 }
1759
1760 assert_eq!(
1761 old_text.len() as isize + delta,
1762 new_text.len() as isize,
1763 "invalid edit"
1764 );
1765
1766 edits
1767 }
1768
1769 pub fn check_invariants(&self) {
1770 // Ensure every fragment is ordered by locator in the fragment tree and corresponds
1771 // to an insertion fragment in the insertions tree.
1772 let mut prev_fragment_id = Locator::min();
1773 for fragment in self.snapshot.fragments.items(&None) {
1774 assert!(fragment.id > prev_fragment_id);
1775 prev_fragment_id = fragment.id.clone();
1776
1777 let insertion_fragment = self
1778 .snapshot
1779 .insertions
1780 .get(
1781 &InsertionFragmentKey {
1782 timestamp: fragment.timestamp,
1783 split_offset: fragment.insertion_offset,
1784 },
1785 (),
1786 )
1787 .unwrap();
1788 assert_eq!(
1789 insertion_fragment.fragment_id, fragment.id,
1790 "fragment: {:?}\ninsertion: {:?}",
1791 fragment, insertion_fragment
1792 );
1793 }
1794
1795 let mut cursor = self.snapshot.fragments.cursor::<Option<&Locator>>(&None);
1796 for insertion_fragment in self.snapshot.insertions.cursor::<()>(()) {
1797 cursor.seek(&Some(&insertion_fragment.fragment_id), Bias::Left);
1798 let fragment = cursor.item().unwrap();
1799 assert_eq!(insertion_fragment.fragment_id, fragment.id);
1800 assert_eq!(insertion_fragment.split_offset, fragment.insertion_offset);
1801 }
1802
1803 let fragment_summary = self.snapshot.fragments.summary();
1804 assert_eq!(
1805 fragment_summary.text.visible,
1806 self.snapshot.visible_text.len()
1807 );
1808 assert_eq!(
1809 fragment_summary.text.deleted,
1810 self.snapshot.deleted_text.len()
1811 );
1812
1813 assert!(!self.text().contains("\r\n"));
1814 }
1815
1816 pub fn set_group_interval(&mut self, group_interval: Duration) {
1817 self.history.group_interval = group_interval;
1818 }
1819
1820 pub fn random_byte_range(&self, start_offset: usize, rng: &mut impl rand::Rng) -> Range<usize> {
1821 let end = self.clip_offset(rng.random_range(start_offset..=self.len()), Bias::Right);
1822 let start = self.clip_offset(rng.random_range(start_offset..=end), Bias::Right);
1823 start..end
1824 }
1825
1826 pub fn get_random_edits<T>(
1827 &self,
1828 rng: &mut T,
1829 edit_count: usize,
1830 ) -> Vec<(Range<usize>, Arc<str>)>
1831 where
1832 T: rand::Rng,
1833 {
1834 let mut edits: Vec<(Range<usize>, Arc<str>)> = Vec::new();
1835 let mut last_end = None;
1836 for _ in 0..edit_count {
1837 if last_end.is_some_and(|last_end| last_end >= self.len()) {
1838 break;
1839 }
1840 let new_start = last_end.map_or(0, |last_end| last_end + 1);
1841 let range = self.random_byte_range(new_start, rng);
1842 last_end = Some(range.end);
1843
1844 let new_text_len = rng.random_range(0..10);
1845 let new_text: String = RandomCharIter::new(&mut *rng).take(new_text_len).collect();
1846
1847 edits.push((range, new_text.into()));
1848 }
1849 edits
1850 }
1851
1852 pub fn randomly_edit<T>(
1853 &mut self,
1854 rng: &mut T,
1855 edit_count: usize,
1856 ) -> (Vec<(Range<usize>, Arc<str>)>, Operation)
1857 where
1858 T: rand::Rng,
1859 {
1860 let mut edits = self.get_random_edits(rng, edit_count);
1861 log::info!("mutating buffer {} with {:?}", self.replica_id, edits);
1862
1863 let op = self.edit(edits.iter().cloned());
1864 if let Operation::Edit(edit) = &op {
1865 assert_eq!(edits.len(), edit.new_text.len());
1866 for (edit, new_text) in edits.iter_mut().zip(&edit.new_text) {
1867 edit.1 = new_text.clone();
1868 }
1869 } else {
1870 unreachable!()
1871 }
1872
1873 (edits, op)
1874 }
1875
1876 pub fn randomly_undo_redo(&mut self, rng: &mut impl rand::Rng) -> Vec<Operation> {
1877 use rand::prelude::*;
1878
1879 let mut ops = Vec::new();
1880 for _ in 0..rng.random_range(1..=5) {
1881 if let Some(entry) = self.history.undo_stack.choose(rng) {
1882 let transaction = entry.transaction.clone();
1883 log::info!(
1884 "undoing buffer {} transaction {:?}",
1885 self.replica_id,
1886 transaction
1887 );
1888 ops.push(self.undo_or_redo(transaction));
1889 }
1890 }
1891 ops
1892 }
1893}
1894
1895impl Deref for Buffer {
1896 type Target = BufferSnapshot;
1897
1898 fn deref(&self) -> &Self::Target {
1899 &self.snapshot
1900 }
1901}
1902
1903impl BufferSnapshot {
1904 pub fn as_rope(&self) -> &Rope {
1905 &self.visible_text
1906 }
1907
1908 pub fn rope_for_version(&self, version: &clock::Global) -> Rope {
1909 let mut rope = Rope::new();
1910
1911 let mut cursor = self
1912 .fragments
1913 .filter::<_, FragmentTextSummary>(&None, move |summary| {
1914 !version.observed_all(&summary.max_version)
1915 });
1916 cursor.next();
1917
1918 let mut visible_cursor = self.visible_text.cursor(0);
1919 let mut deleted_cursor = self.deleted_text.cursor(0);
1920
1921 while let Some(fragment) = cursor.item() {
1922 if cursor.start().visible > visible_cursor.offset() {
1923 let text = visible_cursor.slice(cursor.start().visible);
1924 rope.append(text);
1925 }
1926
1927 if fragment.was_visible(version, &self.undo_map) {
1928 if fragment.visible {
1929 let text = visible_cursor.slice(cursor.end().visible);
1930 rope.append(text);
1931 } else {
1932 deleted_cursor.seek_forward(cursor.start().deleted);
1933 let text = deleted_cursor.slice(cursor.end().deleted);
1934 rope.append(text);
1935 }
1936 } else if fragment.visible {
1937 visible_cursor.seek_forward(cursor.end().visible);
1938 }
1939
1940 cursor.next();
1941 }
1942
1943 if cursor.start().visible > visible_cursor.offset() {
1944 let text = visible_cursor.slice(cursor.start().visible);
1945 rope.append(text);
1946 }
1947
1948 rope
1949 }
1950
1951 pub fn remote_id(&self) -> BufferId {
1952 self.remote_id
1953 }
1954
1955 pub fn replica_id(&self) -> ReplicaId {
1956 self.replica_id
1957 }
1958
1959 pub fn row_count(&self) -> u32 {
1960 self.max_point().row + 1
1961 }
1962
1963 pub fn len(&self) -> usize {
1964 self.visible_text.len()
1965 }
1966
1967 pub fn is_empty(&self) -> bool {
1968 self.len() == 0
1969 }
1970
1971 pub fn chars(&self) -> impl Iterator<Item = char> + '_ {
1972 self.chars_at(0)
1973 }
1974
1975 pub fn chars_for_range<T: ToOffset>(&self, range: Range<T>) -> impl Iterator<Item = char> + '_ {
1976 self.text_for_range(range).flat_map(str::chars)
1977 }
1978
1979 pub fn reversed_chars_for_range<T: ToOffset>(
1980 &self,
1981 range: Range<T>,
1982 ) -> impl Iterator<Item = char> + '_ {
1983 self.reversed_chunks_in_range(range)
1984 .flat_map(|chunk| chunk.chars().rev())
1985 }
1986
1987 pub fn contains_str_at<T>(&self, position: T, needle: &str) -> bool
1988 where
1989 T: ToOffset,
1990 {
1991 let position = position.to_offset(self);
1992 position == self.clip_offset(position, Bias::Left)
1993 && self
1994 .bytes_in_range(position..self.len())
1995 .flatten()
1996 .copied()
1997 .take(needle.len())
1998 .eq(needle.bytes())
1999 }
2000
2001 pub fn common_prefix_at<T>(&self, position: T, needle: &str) -> Range<T>
2002 where
2003 T: ToOffset + TextDimension,
2004 {
2005 let offset = position.to_offset(self);
2006 let common_prefix_len = needle
2007 .char_indices()
2008 .map(|(index, _)| index)
2009 .chain([needle.len()])
2010 .take_while(|&len| len <= offset)
2011 .filter(|&len| {
2012 let left = self
2013 .chars_for_range(offset - len..offset)
2014 .flat_map(char::to_lowercase);
2015 let right = needle[..len].chars().flat_map(char::to_lowercase);
2016 left.eq(right)
2017 })
2018 .last()
2019 .unwrap_or(0);
2020 let start_offset = offset - common_prefix_len;
2021 let start = self.text_summary_for_range(0..start_offset);
2022 start..position
2023 }
2024
2025 pub fn text(&self) -> String {
2026 self.visible_text.to_string()
2027 }
2028
2029 pub fn line_ending(&self) -> LineEnding {
2030 self.line_ending
2031 }
2032
2033 pub fn deleted_text(&self) -> String {
2034 self.deleted_text.to_string()
2035 }
2036
2037 pub fn fragments(&self) -> impl Iterator<Item = &Fragment> {
2038 self.fragments.iter()
2039 }
2040
2041 pub fn text_summary(&self) -> TextSummary {
2042 self.visible_text.summary()
2043 }
2044
2045 pub fn max_point(&self) -> Point {
2046 self.visible_text.max_point()
2047 }
2048
2049 pub fn max_point_utf16(&self) -> PointUtf16 {
2050 self.visible_text.max_point_utf16()
2051 }
2052
2053 pub fn point_to_offset(&self, point: Point) -> usize {
2054 self.visible_text.point_to_offset(point)
2055 }
2056
2057 pub fn point_utf16_to_offset(&self, point: PointUtf16) -> usize {
2058 self.visible_text.point_utf16_to_offset(point)
2059 }
2060
2061 pub fn unclipped_point_utf16_to_offset(&self, point: Unclipped<PointUtf16>) -> usize {
2062 self.visible_text.unclipped_point_utf16_to_offset(point)
2063 }
2064
2065 pub fn unclipped_point_utf16_to_point(&self, point: Unclipped<PointUtf16>) -> Point {
2066 self.visible_text.unclipped_point_utf16_to_point(point)
2067 }
2068
2069 pub fn offset_utf16_to_offset(&self, offset: OffsetUtf16) -> usize {
2070 self.visible_text.offset_utf16_to_offset(offset)
2071 }
2072
2073 pub fn offset_to_offset_utf16(&self, offset: usize) -> OffsetUtf16 {
2074 self.visible_text.offset_to_offset_utf16(offset)
2075 }
2076
2077 pub fn offset_to_point(&self, offset: usize) -> Point {
2078 self.visible_text.offset_to_point(offset)
2079 }
2080
2081 pub fn offset_to_point_utf16(&self, offset: usize) -> PointUtf16 {
2082 self.visible_text.offset_to_point_utf16(offset)
2083 }
2084
2085 pub fn point_to_point_utf16(&self, point: Point) -> PointUtf16 {
2086 self.visible_text.point_to_point_utf16(point)
2087 }
2088
2089 pub fn version(&self) -> &clock::Global {
2090 &self.version
2091 }
2092
2093 pub fn chars_at<T: ToOffset>(&self, position: T) -> impl Iterator<Item = char> + '_ {
2094 let offset = position.to_offset(self);
2095 self.visible_text.chars_at(offset)
2096 }
2097
2098 pub fn reversed_chars_at<T: ToOffset>(&self, position: T) -> impl Iterator<Item = char> + '_ {
2099 let offset = position.to_offset(self);
2100 self.visible_text.reversed_chars_at(offset)
2101 }
2102
2103 pub fn reversed_chunks_in_range<T: ToOffset>(&self, range: Range<T>) -> rope::Chunks<'_> {
2104 let range = range.start.to_offset(self)..range.end.to_offset(self);
2105 self.visible_text.reversed_chunks_in_range(range)
2106 }
2107
2108 pub fn bytes_in_range<T: ToOffset>(&self, range: Range<T>) -> rope::Bytes<'_> {
2109 let start = range.start.to_offset(self);
2110 let end = range.end.to_offset(self);
2111 self.visible_text.bytes_in_range(start..end)
2112 }
2113
2114 pub fn reversed_bytes_in_range<T: ToOffset>(&self, range: Range<T>) -> rope::Bytes<'_> {
2115 let start = range.start.to_offset(self);
2116 let end = range.end.to_offset(self);
2117 self.visible_text.reversed_bytes_in_range(start..end)
2118 }
2119
2120 pub fn text_for_range<T: ToOffset>(&self, range: Range<T>) -> Chunks<'_> {
2121 let start = range.start.to_offset(self);
2122 let end = range.end.to_offset(self);
2123 self.visible_text.chunks_in_range(start..end)
2124 }
2125
2126 pub fn line_len(&self, row: u32) -> u32 {
2127 let row_start_offset = Point::new(row, 0).to_offset(self);
2128 let row_end_offset = if row >= self.max_point().row {
2129 self.len()
2130 } else {
2131 Point::new(row + 1, 0).to_previous_offset(self)
2132 };
2133 (row_end_offset - row_start_offset) as u32
2134 }
2135
2136 pub fn line_indents_in_row_range(
2137 &self,
2138 row_range: Range<u32>,
2139 ) -> impl Iterator<Item = (u32, LineIndent)> + '_ {
2140 let start = Point::new(row_range.start, 0).to_offset(self);
2141 let end = Point::new(row_range.end, self.line_len(row_range.end)).to_offset(self);
2142
2143 let mut chunks = self.as_rope().chunks_in_range(start..end);
2144 let mut row = row_range.start;
2145 let mut done = false;
2146 std::iter::from_fn(move || {
2147 if done {
2148 None
2149 } else {
2150 let indent = (row, LineIndent::from_chunks(&mut chunks));
2151 done = !chunks.next_line();
2152 row += 1;
2153 Some(indent)
2154 }
2155 })
2156 }
2157
2158 /// Returns the line indents in the given row range, exclusive of end row, in reversed order.
2159 pub fn reversed_line_indents_in_row_range(
2160 &self,
2161 row_range: Range<u32>,
2162 ) -> impl Iterator<Item = (u32, LineIndent)> + '_ {
2163 let start = Point::new(row_range.start, 0).to_offset(self);
2164
2165 let end_point;
2166 let end;
2167 if row_range.end > row_range.start {
2168 end_point = Point::new(row_range.end - 1, self.line_len(row_range.end - 1));
2169 end = end_point.to_offset(self);
2170 } else {
2171 end_point = Point::new(row_range.start, 0);
2172 end = start;
2173 };
2174
2175 let mut chunks = self.as_rope().chunks_in_range(start..end);
2176 // Move the cursor to the start of the last line if it's not empty.
2177 chunks.seek(end);
2178 if end_point.column > 0 {
2179 chunks.prev_line();
2180 }
2181
2182 let mut row = end_point.row;
2183 let mut done = false;
2184 std::iter::from_fn(move || {
2185 if done {
2186 None
2187 } else {
2188 let initial_offset = chunks.offset();
2189 let indent = (row, LineIndent::from_chunks(&mut chunks));
2190 if chunks.offset() > initial_offset {
2191 chunks.prev_line();
2192 }
2193 done = !chunks.prev_line();
2194 if !done {
2195 row -= 1;
2196 }
2197
2198 Some(indent)
2199 }
2200 })
2201 }
2202
2203 pub fn line_indent_for_row(&self, row: u32) -> LineIndent {
2204 LineIndent::from_iter(self.chars_at(Point::new(row, 0)))
2205 }
2206
2207 pub fn is_line_blank(&self, row: u32) -> bool {
2208 self.text_for_range(Point::new(row, 0)..Point::new(row, self.line_len(row)))
2209 .all(|chunk| chunk.matches(|c: char| !c.is_whitespace()).next().is_none())
2210 }
2211
2212 pub fn text_summary_for_range<D, O: ToOffset>(&self, range: Range<O>) -> D
2213 where
2214 D: TextDimension,
2215 {
2216 self.visible_text
2217 .cursor(range.start.to_offset(self))
2218 .summary(range.end.to_offset(self))
2219 }
2220
2221 pub fn summaries_for_anchors<'a, D, A>(&'a self, anchors: A) -> impl 'a + Iterator<Item = D>
2222 where
2223 D: 'a + TextDimension,
2224 A: 'a + IntoIterator<Item = &'a Anchor>,
2225 {
2226 let anchors = anchors.into_iter();
2227 self.summaries_for_anchors_with_payload::<D, _, ()>(anchors.map(|a| (a, ())))
2228 .map(|d| d.0)
2229 }
2230
2231 pub fn summaries_for_anchors_with_payload<'a, D, A, T>(
2232 &'a self,
2233 anchors: A,
2234 ) -> impl 'a + Iterator<Item = (D, T)>
2235 where
2236 D: 'a + TextDimension,
2237 A: 'a + IntoIterator<Item = (&'a Anchor, T)>,
2238 {
2239 let anchors = anchors.into_iter();
2240 let mut insertion_cursor = self.insertions.cursor::<InsertionFragmentKey>(());
2241 let mut fragment_cursor = self
2242 .fragments
2243 .cursor::<Dimensions<Option<&Locator>, usize>>(&None);
2244 let mut text_cursor = self.visible_text.cursor(0);
2245 let mut position = D::zero(());
2246
2247 anchors.map(move |(anchor, payload)| {
2248 if *anchor == Anchor::MIN {
2249 return (D::zero(()), payload);
2250 } else if *anchor == Anchor::MAX {
2251 return (D::from_text_summary(&self.visible_text.summary()), payload);
2252 }
2253
2254 let anchor_key = InsertionFragmentKey {
2255 timestamp: anchor.timestamp,
2256 split_offset: anchor.offset,
2257 };
2258 insertion_cursor.seek(&anchor_key, anchor.bias);
2259 if let Some(insertion) = insertion_cursor.item() {
2260 let comparison = sum_tree::KeyedItem::key(insertion).cmp(&anchor_key);
2261 if comparison == Ordering::Greater
2262 || (anchor.bias == Bias::Left
2263 && comparison == Ordering::Equal
2264 && anchor.offset > 0)
2265 {
2266 insertion_cursor.prev();
2267 }
2268 } else {
2269 insertion_cursor.prev();
2270 }
2271 let insertion = insertion_cursor.item().expect("invalid insertion");
2272 assert_eq!(insertion.timestamp, anchor.timestamp, "invalid insertion");
2273
2274 fragment_cursor.seek_forward(&Some(&insertion.fragment_id), Bias::Left);
2275 let fragment = fragment_cursor.item().unwrap();
2276 let mut fragment_offset = fragment_cursor.start().1;
2277 if fragment.visible {
2278 fragment_offset += anchor.offset - insertion.split_offset;
2279 }
2280
2281 position.add_assign(&text_cursor.summary(fragment_offset));
2282 (position, payload)
2283 })
2284 }
2285
2286 pub fn summary_for_anchor<D>(&self, anchor: &Anchor) -> D
2287 where
2288 D: TextDimension,
2289 {
2290 self.text_summary_for_range(0..self.offset_for_anchor(anchor))
2291 }
2292
2293 pub fn offset_for_anchor(&self, anchor: &Anchor) -> usize {
2294 if *anchor == Anchor::MIN {
2295 0
2296 } else if *anchor == Anchor::MAX {
2297 self.visible_text.len()
2298 } else {
2299 debug_assert!(anchor.buffer_id == Some(self.remote_id));
2300 let anchor_key = InsertionFragmentKey {
2301 timestamp: anchor.timestamp,
2302 split_offset: anchor.offset,
2303 };
2304 let mut insertion_cursor = self.insertions.cursor::<InsertionFragmentKey>(());
2305 insertion_cursor.seek(&anchor_key, anchor.bias);
2306 if let Some(insertion) = insertion_cursor.item() {
2307 let comparison = sum_tree::KeyedItem::key(insertion).cmp(&anchor_key);
2308 if comparison == Ordering::Greater
2309 || (anchor.bias == Bias::Left
2310 && comparison == Ordering::Equal
2311 && anchor.offset > 0)
2312 {
2313 insertion_cursor.prev();
2314 }
2315 } else {
2316 insertion_cursor.prev();
2317 }
2318
2319 let Some(insertion) = insertion_cursor
2320 .item()
2321 .filter(|insertion| insertion.timestamp == anchor.timestamp)
2322 else {
2323 panic!(
2324 "invalid anchor {:?}. buffer id: {}, version: {:?}",
2325 anchor, self.remote_id, self.version
2326 );
2327 };
2328
2329 let mut fragment_cursor = self
2330 .fragments
2331 .cursor::<Dimensions<Option<&Locator>, usize>>(&None);
2332 fragment_cursor.seek(&Some(&insertion.fragment_id), Bias::Left);
2333 let fragment = fragment_cursor.item().unwrap();
2334 let mut fragment_offset = fragment_cursor.start().1;
2335 if fragment.visible {
2336 fragment_offset += anchor.offset - insertion.split_offset;
2337 }
2338 fragment_offset
2339 }
2340 }
2341
2342 fn fragment_id_for_anchor(&self, anchor: &Anchor) -> &Locator {
2343 self.try_fragment_id_for_anchor(anchor).unwrap_or_else(|| {
2344 panic!(
2345 "invalid anchor {:?}. buffer id: {}, version: {:?}",
2346 anchor, self.remote_id, self.version,
2347 )
2348 })
2349 }
2350
2351 fn try_fragment_id_for_anchor(&self, anchor: &Anchor) -> Option<&Locator> {
2352 if *anchor == Anchor::MIN {
2353 Some(Locator::min_ref())
2354 } else if *anchor == Anchor::MAX {
2355 Some(Locator::max_ref())
2356 } else {
2357 let anchor_key = InsertionFragmentKey {
2358 timestamp: anchor.timestamp,
2359 split_offset: anchor.offset,
2360 };
2361 let mut insertion_cursor = self.insertions.cursor::<InsertionFragmentKey>(());
2362 insertion_cursor.seek(&anchor_key, anchor.bias);
2363 if let Some(insertion) = insertion_cursor.item() {
2364 let comparison = sum_tree::KeyedItem::key(insertion).cmp(&anchor_key);
2365 if comparison == Ordering::Greater
2366 || (anchor.bias == Bias::Left
2367 && comparison == Ordering::Equal
2368 && anchor.offset > 0)
2369 {
2370 insertion_cursor.prev();
2371 }
2372 } else {
2373 insertion_cursor.prev();
2374 }
2375
2376 insertion_cursor
2377 .item()
2378 .filter(|insertion| {
2379 !cfg!(debug_assertions) || insertion.timestamp == anchor.timestamp
2380 })
2381 .map(|insertion| &insertion.fragment_id)
2382 }
2383 }
2384
2385 pub fn anchor_before<T: ToOffset>(&self, position: T) -> Anchor {
2386 self.anchor_at(position, Bias::Left)
2387 }
2388
2389 pub fn anchor_after<T: ToOffset>(&self, position: T) -> Anchor {
2390 self.anchor_at(position, Bias::Right)
2391 }
2392
2393 pub fn anchor_at<T: ToOffset>(&self, position: T, bias: Bias) -> Anchor {
2394 self.anchor_at_offset(position.to_offset(self), bias)
2395 }
2396
2397 fn anchor_at_offset(&self, offset: usize, bias: Bias) -> Anchor {
2398 if bias == Bias::Left && offset == 0 {
2399 Anchor::MIN
2400 } else if bias == Bias::Right && offset == self.len() {
2401 Anchor::MAX
2402 } else {
2403 if !self.visible_text.is_char_boundary(offset) {
2404 // find the character
2405 let char_start = self.visible_text.floor_char_boundary(offset);
2406 // `char_start` must be less than len and a char boundary
2407 let ch = self.visible_text.chars_at(char_start).next().unwrap();
2408 let char_range = char_start..char_start + ch.len_utf8();
2409 panic!(
2410 "byte index {} is not a char boundary; it is inside {:?} (bytes {:?})",
2411 offset, ch, char_range,
2412 );
2413 }
2414 let mut fragment_cursor = self.fragments.cursor::<usize>(&None);
2415 fragment_cursor.seek(&offset, bias);
2416 let fragment = fragment_cursor.item().unwrap();
2417 let overshoot = offset - *fragment_cursor.start();
2418 Anchor {
2419 timestamp: fragment.timestamp,
2420 offset: fragment.insertion_offset + overshoot,
2421 bias,
2422 buffer_id: Some(self.remote_id),
2423 }
2424 }
2425 }
2426
2427 pub fn can_resolve(&self, anchor: &Anchor) -> bool {
2428 *anchor == Anchor::MIN
2429 || *anchor == Anchor::MAX
2430 || (Some(self.remote_id) == anchor.buffer_id && self.version.observed(anchor.timestamp))
2431 }
2432
2433 pub fn clip_offset(&self, offset: usize, bias: Bias) -> usize {
2434 self.visible_text.clip_offset(offset, bias)
2435 }
2436
2437 pub fn clip_point(&self, point: Point, bias: Bias) -> Point {
2438 self.visible_text.clip_point(point, bias)
2439 }
2440
2441 pub fn clip_offset_utf16(&self, offset: OffsetUtf16, bias: Bias) -> OffsetUtf16 {
2442 self.visible_text.clip_offset_utf16(offset, bias)
2443 }
2444
2445 pub fn clip_point_utf16(&self, point: Unclipped<PointUtf16>, bias: Bias) -> PointUtf16 {
2446 self.visible_text.clip_point_utf16(point, bias)
2447 }
2448
2449 pub fn edits_since<'a, D>(
2450 &'a self,
2451 since: &'a clock::Global,
2452 ) -> impl 'a + Iterator<Item = Edit<D>>
2453 where
2454 D: TextDimension + Ord,
2455 {
2456 self.edits_since_in_range(since, Anchor::MIN..Anchor::MAX)
2457 }
2458
2459 pub fn anchored_edits_since<'a, D>(
2460 &'a self,
2461 since: &'a clock::Global,
2462 ) -> impl 'a + Iterator<Item = (Edit<D>, Range<Anchor>)>
2463 where
2464 D: TextDimension + Ord,
2465 {
2466 self.anchored_edits_since_in_range(since, Anchor::MIN..Anchor::MAX)
2467 }
2468
2469 pub fn edits_since_in_range<'a, D>(
2470 &'a self,
2471 since: &'a clock::Global,
2472 range: Range<Anchor>,
2473 ) -> impl 'a + Iterator<Item = Edit<D>>
2474 where
2475 D: TextDimension + Ord,
2476 {
2477 self.anchored_edits_since_in_range(since, range)
2478 .map(|item| item.0)
2479 }
2480
2481 pub fn anchored_edits_since_in_range<'a, D>(
2482 &'a self,
2483 since: &'a clock::Global,
2484 range: Range<Anchor>,
2485 ) -> impl 'a + Iterator<Item = (Edit<D>, Range<Anchor>)>
2486 where
2487 D: TextDimension + Ord,
2488 {
2489 let fragments_cursor = if *since == self.version {
2490 None
2491 } else {
2492 let mut cursor = self.fragments.filter(&None, move |summary| {
2493 !since.observed_all(&summary.max_version)
2494 });
2495 cursor.next();
2496 Some(cursor)
2497 };
2498 let mut cursor = self
2499 .fragments
2500 .cursor::<Dimensions<Option<&Locator>, FragmentTextSummary>>(&None);
2501
2502 let start_fragment_id = self.fragment_id_for_anchor(&range.start);
2503 cursor.seek(&Some(start_fragment_id), Bias::Left);
2504 let mut visible_start = cursor.start().1.visible;
2505 let mut deleted_start = cursor.start().1.deleted;
2506 if let Some(fragment) = cursor.item() {
2507 let overshoot = range.start.offset - fragment.insertion_offset;
2508 if fragment.visible {
2509 visible_start += overshoot;
2510 } else {
2511 deleted_start += overshoot;
2512 }
2513 }
2514 let end_fragment_id = self.fragment_id_for_anchor(&range.end);
2515
2516 Edits {
2517 visible_cursor: self.visible_text.cursor(visible_start),
2518 deleted_cursor: self.deleted_text.cursor(deleted_start),
2519 fragments_cursor,
2520 undos: &self.undo_map,
2521 since,
2522 old_end: D::zero(()),
2523 new_end: D::zero(()),
2524 range: (start_fragment_id, range.start.offset)..(end_fragment_id, range.end.offset),
2525 buffer_id: self.remote_id,
2526 }
2527 }
2528
2529 pub fn has_edits_since_in_range(&self, since: &clock::Global, range: Range<Anchor>) -> bool {
2530 if *since != self.version {
2531 let start_fragment_id = self.fragment_id_for_anchor(&range.start);
2532 let end_fragment_id = self.fragment_id_for_anchor(&range.end);
2533 let mut cursor = self.fragments.filter::<_, usize>(&None, move |summary| {
2534 !since.observed_all(&summary.max_version)
2535 });
2536 cursor.next();
2537 while let Some(fragment) = cursor.item() {
2538 if fragment.id > *end_fragment_id {
2539 break;
2540 }
2541 if fragment.id > *start_fragment_id {
2542 let was_visible = fragment.was_visible(since, &self.undo_map);
2543 let is_visible = fragment.visible;
2544 if was_visible != is_visible {
2545 return true;
2546 }
2547 }
2548 cursor.next();
2549 }
2550 }
2551 false
2552 }
2553
2554 pub fn has_edits_since(&self, since: &clock::Global) -> bool {
2555 if *since != self.version {
2556 let mut cursor = self.fragments.filter::<_, usize>(&None, move |summary| {
2557 !since.observed_all(&summary.max_version)
2558 });
2559 cursor.next();
2560 while let Some(fragment) = cursor.item() {
2561 let was_visible = fragment.was_visible(since, &self.undo_map);
2562 let is_visible = fragment.visible;
2563 if was_visible != is_visible {
2564 return true;
2565 }
2566 cursor.next();
2567 }
2568 }
2569 false
2570 }
2571
2572 pub fn range_to_version(&self, range: Range<usize>, version: &clock::Global) -> Range<usize> {
2573 let mut offsets = self.offsets_to_version([range.start, range.end], version);
2574 offsets.next().unwrap()..offsets.next().unwrap()
2575 }
2576
2577 /// Converts the given sequence of offsets into their corresponding offsets
2578 /// at a prior version of this buffer.
2579 pub fn offsets_to_version<'a>(
2580 &'a self,
2581 offsets: impl 'a + IntoIterator<Item = usize>,
2582 version: &'a clock::Global,
2583 ) -> impl 'a + Iterator<Item = usize> {
2584 let mut edits = self.edits_since(version).peekable();
2585 let mut last_old_end = 0;
2586 let mut last_new_end = 0;
2587 offsets.into_iter().map(move |new_offset| {
2588 while let Some(edit) = edits.peek() {
2589 if edit.new.start > new_offset {
2590 break;
2591 }
2592
2593 if edit.new.end <= new_offset {
2594 last_new_end = edit.new.end;
2595 last_old_end = edit.old.end;
2596 edits.next();
2597 continue;
2598 }
2599
2600 let overshoot = new_offset - edit.new.start;
2601 return (edit.old.start + overshoot).min(edit.old.end);
2602 }
2603
2604 last_old_end + new_offset.saturating_sub(last_new_end)
2605 })
2606 }
2607
2608 /// Visually annotates a position or range with the `Debug` representation of a value. The
2609 /// callsite of this function is used as a key - previous annotations will be removed.
2610 #[cfg(debug_assertions)]
2611 #[track_caller]
2612 pub fn debug<R, V>(&self, ranges: &R, value: V)
2613 where
2614 R: debug::ToDebugRanges,
2615 V: std::fmt::Debug,
2616 {
2617 self.debug_with_key(std::panic::Location::caller(), ranges, value);
2618 }
2619
2620 /// Visually annotates a position or range with the `Debug` representation of a value. Previous
2621 /// debug annotations with the same key will be removed. The key is also used to determine the
2622 /// annotation's color.
2623 #[cfg(debug_assertions)]
2624 pub fn debug_with_key<K, R, V>(&self, key: &K, ranges: &R, value: V)
2625 where
2626 K: std::hash::Hash + 'static,
2627 R: debug::ToDebugRanges,
2628 V: std::fmt::Debug,
2629 {
2630 let ranges = ranges
2631 .to_debug_ranges(self)
2632 .into_iter()
2633 .map(|range| self.anchor_after(range.start)..self.anchor_before(range.end))
2634 .collect();
2635 debug::GlobalDebugRanges::with_locked(|debug_ranges| {
2636 debug_ranges.insert(key, ranges, format!("{value:?}").into());
2637 });
2638 }
2639}
2640
2641struct RopeBuilder<'a> {
2642 old_visible_cursor: rope::Cursor<'a>,
2643 old_deleted_cursor: rope::Cursor<'a>,
2644 new_visible: Rope,
2645 new_deleted: Rope,
2646}
2647
2648impl<'a> RopeBuilder<'a> {
2649 fn new(old_visible_cursor: rope::Cursor<'a>, old_deleted_cursor: rope::Cursor<'a>) -> Self {
2650 Self {
2651 old_visible_cursor,
2652 old_deleted_cursor,
2653 new_visible: Rope::new(),
2654 new_deleted: Rope::new(),
2655 }
2656 }
2657
2658 fn append(&mut self, len: FragmentTextSummary) {
2659 self.push(len.visible, true, true);
2660 self.push(len.deleted, false, false);
2661 }
2662
2663 fn push_fragment(&mut self, fragment: &Fragment, was_visible: bool) {
2664 debug_assert!(fragment.len > 0);
2665 self.push(fragment.len, was_visible, fragment.visible)
2666 }
2667
2668 fn push(&mut self, len: usize, was_visible: bool, is_visible: bool) {
2669 let text = if was_visible {
2670 self.old_visible_cursor
2671 .slice(self.old_visible_cursor.offset() + len)
2672 } else {
2673 self.old_deleted_cursor
2674 .slice(self.old_deleted_cursor.offset() + len)
2675 };
2676 if is_visible {
2677 self.new_visible.append(text);
2678 } else {
2679 self.new_deleted.append(text);
2680 }
2681 }
2682
2683 fn push_str(&mut self, text: &str) {
2684 self.new_visible.push(text);
2685 }
2686
2687 fn finish(mut self) -> (Rope, Rope) {
2688 self.new_visible.append(self.old_visible_cursor.suffix());
2689 self.new_deleted.append(self.old_deleted_cursor.suffix());
2690 (self.new_visible, self.new_deleted)
2691 }
2692}
2693
2694impl<D: TextDimension + Ord, F: FnMut(&FragmentSummary) -> bool> Iterator for Edits<'_, D, F> {
2695 type Item = (Edit<D>, Range<Anchor>);
2696
2697 fn next(&mut self) -> Option<Self::Item> {
2698 let mut pending_edit: Option<Self::Item> = None;
2699 let cursor = self.fragments_cursor.as_mut()?;
2700
2701 while let Some(fragment) = cursor.item() {
2702 if fragment.id < *self.range.start.0 {
2703 cursor.next();
2704 continue;
2705 } else if fragment.id > *self.range.end.0 {
2706 break;
2707 }
2708
2709 if cursor.start().visible > self.visible_cursor.offset() {
2710 let summary = self.visible_cursor.summary(cursor.start().visible);
2711 self.old_end.add_assign(&summary);
2712 self.new_end.add_assign(&summary);
2713 }
2714
2715 if pending_edit
2716 .as_ref()
2717 .is_some_and(|(change, _)| change.new.end < self.new_end)
2718 {
2719 break;
2720 }
2721
2722 let start_anchor = Anchor {
2723 timestamp: fragment.timestamp,
2724 offset: fragment.insertion_offset,
2725 bias: Bias::Right,
2726 buffer_id: Some(self.buffer_id),
2727 };
2728 let end_anchor = Anchor {
2729 timestamp: fragment.timestamp,
2730 offset: fragment.insertion_offset + fragment.len,
2731 bias: Bias::Left,
2732 buffer_id: Some(self.buffer_id),
2733 };
2734
2735 if !fragment.was_visible(self.since, self.undos) && fragment.visible {
2736 let mut visible_end = cursor.end().visible;
2737 if fragment.id == *self.range.end.0 {
2738 visible_end = cmp::min(
2739 visible_end,
2740 cursor.start().visible + (self.range.end.1 - fragment.insertion_offset),
2741 );
2742 }
2743
2744 let fragment_summary = self.visible_cursor.summary(visible_end);
2745 let mut new_end = self.new_end;
2746 new_end.add_assign(&fragment_summary);
2747 if let Some((edit, range)) = pending_edit.as_mut() {
2748 edit.new.end = new_end;
2749 range.end = end_anchor;
2750 } else {
2751 pending_edit = Some((
2752 Edit {
2753 old: self.old_end..self.old_end,
2754 new: self.new_end..new_end,
2755 },
2756 start_anchor..end_anchor,
2757 ));
2758 }
2759
2760 self.new_end = new_end;
2761 } else if fragment.was_visible(self.since, self.undos) && !fragment.visible {
2762 let mut deleted_end = cursor.end().deleted;
2763 if fragment.id == *self.range.end.0 {
2764 deleted_end = cmp::min(
2765 deleted_end,
2766 cursor.start().deleted + (self.range.end.1 - fragment.insertion_offset),
2767 );
2768 }
2769
2770 if cursor.start().deleted > self.deleted_cursor.offset() {
2771 self.deleted_cursor.seek_forward(cursor.start().deleted);
2772 }
2773 let fragment_summary = self.deleted_cursor.summary(deleted_end);
2774 let mut old_end = self.old_end;
2775 old_end.add_assign(&fragment_summary);
2776 if let Some((edit, range)) = pending_edit.as_mut() {
2777 edit.old.end = old_end;
2778 range.end = end_anchor;
2779 } else {
2780 pending_edit = Some((
2781 Edit {
2782 old: self.old_end..old_end,
2783 new: self.new_end..self.new_end,
2784 },
2785 start_anchor..end_anchor,
2786 ));
2787 }
2788
2789 self.old_end = old_end;
2790 }
2791
2792 cursor.next();
2793 }
2794
2795 pending_edit
2796 }
2797}
2798
2799impl Fragment {
2800 fn is_visible(&self, undos: &UndoMap) -> bool {
2801 !undos.is_undone(self.timestamp) && self.deletions.iter().all(|d| undos.is_undone(*d))
2802 }
2803
2804 fn was_visible(&self, version: &clock::Global, undos: &UndoMap) -> bool {
2805 (version.observed(self.timestamp) && !undos.was_undone(self.timestamp, version))
2806 && self
2807 .deletions
2808 .iter()
2809 .all(|d| !version.observed(*d) || undos.was_undone(*d, version))
2810 }
2811}
2812
2813impl sum_tree::Item for Fragment {
2814 type Summary = FragmentSummary;
2815
2816 fn summary(&self, _cx: &Option<clock::Global>) -> Self::Summary {
2817 let mut max_version = clock::Global::new();
2818 max_version.observe(self.timestamp);
2819 for deletion in &self.deletions {
2820 max_version.observe(*deletion);
2821 }
2822 max_version.join(&self.max_undos);
2823
2824 let mut min_insertion_version = clock::Global::new();
2825 min_insertion_version.observe(self.timestamp);
2826 let max_insertion_version = min_insertion_version.clone();
2827 if self.visible {
2828 FragmentSummary {
2829 max_id: self.id.clone(),
2830 text: FragmentTextSummary {
2831 visible: self.len,
2832 deleted: 0,
2833 },
2834 max_version,
2835 min_insertion_version,
2836 max_insertion_version,
2837 }
2838 } else {
2839 FragmentSummary {
2840 max_id: self.id.clone(),
2841 text: FragmentTextSummary {
2842 visible: 0,
2843 deleted: self.len,
2844 },
2845 max_version,
2846 min_insertion_version,
2847 max_insertion_version,
2848 }
2849 }
2850 }
2851}
2852
2853impl sum_tree::Summary for FragmentSummary {
2854 type Context<'a> = &'a Option<clock::Global>;
2855
2856 fn zero(_cx: Self::Context<'_>) -> Self {
2857 Default::default()
2858 }
2859
2860 fn add_summary(&mut self, other: &Self, _: Self::Context<'_>) {
2861 self.max_id.assign(&other.max_id);
2862 self.text.visible += &other.text.visible;
2863 self.text.deleted += &other.text.deleted;
2864 self.max_version.join(&other.max_version);
2865 self.min_insertion_version
2866 .meet(&other.min_insertion_version);
2867 self.max_insertion_version
2868 .join(&other.max_insertion_version);
2869 }
2870}
2871
2872impl Default for FragmentSummary {
2873 fn default() -> Self {
2874 FragmentSummary {
2875 max_id: Locator::min(),
2876 text: FragmentTextSummary::default(),
2877 max_version: clock::Global::new(),
2878 min_insertion_version: clock::Global::new(),
2879 max_insertion_version: clock::Global::new(),
2880 }
2881 }
2882}
2883
2884impl sum_tree::Item for InsertionFragment {
2885 type Summary = InsertionFragmentKey;
2886
2887 fn summary(&self, _cx: ()) -> Self::Summary {
2888 InsertionFragmentKey {
2889 timestamp: self.timestamp,
2890 split_offset: self.split_offset,
2891 }
2892 }
2893}
2894
2895impl sum_tree::KeyedItem for InsertionFragment {
2896 type Key = InsertionFragmentKey;
2897
2898 fn key(&self) -> Self::Key {
2899 sum_tree::Item::summary(self, ())
2900 }
2901}
2902
2903impl InsertionFragment {
2904 fn new(fragment: &Fragment) -> Self {
2905 Self {
2906 timestamp: fragment.timestamp,
2907 split_offset: fragment.insertion_offset,
2908 fragment_id: fragment.id.clone(),
2909 }
2910 }
2911
2912 fn insert_new(fragment: &Fragment) -> sum_tree::Edit<Self> {
2913 sum_tree::Edit::Insert(Self::new(fragment))
2914 }
2915}
2916
2917impl sum_tree::ContextLessSummary for InsertionFragmentKey {
2918 fn zero() -> Self {
2919 Default::default()
2920 }
2921
2922 fn add_summary(&mut self, summary: &Self) {
2923 *self = *summary;
2924 }
2925}
2926
2927#[derive(Copy, Clone, Debug, Default, PartialEq, Eq, PartialOrd, Ord, Hash)]
2928pub struct FullOffset(pub usize);
2929
2930impl ops::AddAssign<usize> for FullOffset {
2931 fn add_assign(&mut self, rhs: usize) {
2932 self.0 += rhs;
2933 }
2934}
2935
2936impl ops::Add<usize> for FullOffset {
2937 type Output = Self;
2938
2939 fn add(mut self, rhs: usize) -> Self::Output {
2940 self += rhs;
2941 self
2942 }
2943}
2944
2945impl ops::Sub for FullOffset {
2946 type Output = usize;
2947
2948 fn sub(self, rhs: Self) -> Self::Output {
2949 self.0 - rhs.0
2950 }
2951}
2952
2953impl sum_tree::Dimension<'_, FragmentSummary> for usize {
2954 fn zero(_: &Option<clock::Global>) -> Self {
2955 Default::default()
2956 }
2957
2958 fn add_summary(&mut self, summary: &FragmentSummary, _: &Option<clock::Global>) {
2959 *self += summary.text.visible;
2960 }
2961}
2962
2963impl sum_tree::Dimension<'_, FragmentSummary> for FullOffset {
2964 fn zero(_: &Option<clock::Global>) -> Self {
2965 Default::default()
2966 }
2967
2968 fn add_summary(&mut self, summary: &FragmentSummary, _: &Option<clock::Global>) {
2969 self.0 += summary.text.visible + summary.text.deleted;
2970 }
2971}
2972
2973impl<'a> sum_tree::Dimension<'a, FragmentSummary> for Option<&'a Locator> {
2974 fn zero(_: &Option<clock::Global>) -> Self {
2975 Default::default()
2976 }
2977
2978 fn add_summary(&mut self, summary: &'a FragmentSummary, _: &Option<clock::Global>) {
2979 *self = Some(&summary.max_id);
2980 }
2981}
2982
2983impl sum_tree::SeekTarget<'_, FragmentSummary, FragmentTextSummary> for usize {
2984 fn cmp(
2985 &self,
2986 cursor_location: &FragmentTextSummary,
2987 _: &Option<clock::Global>,
2988 ) -> cmp::Ordering {
2989 Ord::cmp(self, &cursor_location.visible)
2990 }
2991}
2992
2993#[derive(Copy, Clone, Debug, Eq, PartialEq)]
2994enum VersionedFullOffset {
2995 Offset(FullOffset),
2996 Invalid,
2997}
2998
2999impl VersionedFullOffset {
3000 fn full_offset(&self) -> FullOffset {
3001 if let Self::Offset(position) = self {
3002 *position
3003 } else {
3004 panic!("invalid version")
3005 }
3006 }
3007}
3008
3009impl Default for VersionedFullOffset {
3010 fn default() -> Self {
3011 Self::Offset(Default::default())
3012 }
3013}
3014
3015impl<'a> sum_tree::Dimension<'a, FragmentSummary> for VersionedFullOffset {
3016 fn zero(_cx: &Option<clock::Global>) -> Self {
3017 Default::default()
3018 }
3019
3020 fn add_summary(&mut self, summary: &'a FragmentSummary, cx: &Option<clock::Global>) {
3021 if let Self::Offset(offset) = self {
3022 let version = cx.as_ref().unwrap();
3023 if version.observed_all(&summary.max_insertion_version) {
3024 *offset += summary.text.visible + summary.text.deleted;
3025 } else if version.observed_any(&summary.min_insertion_version) {
3026 *self = Self::Invalid;
3027 }
3028 }
3029 }
3030}
3031
3032impl sum_tree::SeekTarget<'_, FragmentSummary, Self> for VersionedFullOffset {
3033 fn cmp(&self, cursor_position: &Self, _: &Option<clock::Global>) -> cmp::Ordering {
3034 match (self, cursor_position) {
3035 (Self::Offset(a), Self::Offset(b)) => Ord::cmp(a, b),
3036 (Self::Offset(_), Self::Invalid) => cmp::Ordering::Less,
3037 (Self::Invalid, _) => unreachable!(),
3038 }
3039 }
3040}
3041
3042impl Operation {
3043 fn replica_id(&self) -> ReplicaId {
3044 operation_queue::Operation::lamport_timestamp(self).replica_id
3045 }
3046
3047 pub fn timestamp(&self) -> clock::Lamport {
3048 match self {
3049 Operation::Edit(edit) => edit.timestamp,
3050 Operation::Undo(undo) => undo.timestamp,
3051 }
3052 }
3053
3054 pub fn as_edit(&self) -> Option<&EditOperation> {
3055 match self {
3056 Operation::Edit(edit) => Some(edit),
3057 _ => None,
3058 }
3059 }
3060
3061 pub fn is_edit(&self) -> bool {
3062 matches!(self, Operation::Edit { .. })
3063 }
3064}
3065
3066impl operation_queue::Operation for Operation {
3067 fn lamport_timestamp(&self) -> clock::Lamport {
3068 match self {
3069 Operation::Edit(edit) => edit.timestamp,
3070 Operation::Undo(undo) => undo.timestamp,
3071 }
3072 }
3073}
3074
3075pub trait ToOffset {
3076 fn to_offset(&self, snapshot: &BufferSnapshot) -> usize;
3077 /// Turns this point into the next offset in the buffer that comes after this, respecting utf8 boundaries.
3078 fn to_next_offset(&self, snapshot: &BufferSnapshot) -> usize {
3079 snapshot
3080 .visible_text
3081 .ceil_char_boundary(self.to_offset(snapshot) + 1)
3082 }
3083 /// Turns this point into the previous offset in the buffer that comes before this, respecting utf8 boundaries.
3084 fn to_previous_offset(&self, snapshot: &BufferSnapshot) -> usize {
3085 snapshot
3086 .visible_text
3087 .floor_char_boundary(self.to_offset(snapshot).saturating_sub(1))
3088 }
3089}
3090
3091impl ToOffset for Point {
3092 fn to_offset(&self, snapshot: &BufferSnapshot) -> usize {
3093 snapshot.point_to_offset(*self)
3094 }
3095}
3096
3097impl ToOffset for usize {
3098 #[track_caller]
3099 fn to_offset(&self, snapshot: &BufferSnapshot) -> usize {
3100 assert!(
3101 *self <= snapshot.len(),
3102 "offset {} is out of range, snapshot length is {}",
3103 self,
3104 snapshot.len()
3105 );
3106 *self
3107 }
3108}
3109
3110impl ToOffset for Anchor {
3111 fn to_offset(&self, snapshot: &BufferSnapshot) -> usize {
3112 snapshot.summary_for_anchor(self)
3113 }
3114}
3115
3116impl<T: ToOffset> ToOffset for &T {
3117 fn to_offset(&self, content: &BufferSnapshot) -> usize {
3118 (*self).to_offset(content)
3119 }
3120}
3121
3122impl ToOffset for PointUtf16 {
3123 fn to_offset(&self, snapshot: &BufferSnapshot) -> usize {
3124 snapshot.point_utf16_to_offset(*self)
3125 }
3126}
3127
3128impl ToOffset for Unclipped<PointUtf16> {
3129 fn to_offset(&self, snapshot: &BufferSnapshot) -> usize {
3130 snapshot.unclipped_point_utf16_to_offset(*self)
3131 }
3132}
3133
3134pub trait ToPoint {
3135 fn to_point(&self, snapshot: &BufferSnapshot) -> Point;
3136}
3137
3138impl ToPoint for Anchor {
3139 fn to_point(&self, snapshot: &BufferSnapshot) -> Point {
3140 snapshot.summary_for_anchor(self)
3141 }
3142}
3143
3144impl ToPoint for usize {
3145 fn to_point(&self, snapshot: &BufferSnapshot) -> Point {
3146 snapshot.offset_to_point(*self)
3147 }
3148}
3149
3150impl ToPoint for Point {
3151 fn to_point(&self, _: &BufferSnapshot) -> Point {
3152 *self
3153 }
3154}
3155
3156impl ToPoint for Unclipped<PointUtf16> {
3157 fn to_point(&self, snapshot: &BufferSnapshot) -> Point {
3158 snapshot.unclipped_point_utf16_to_point(*self)
3159 }
3160}
3161
3162pub trait ToPointUtf16 {
3163 fn to_point_utf16(&self, snapshot: &BufferSnapshot) -> PointUtf16;
3164}
3165
3166impl ToPointUtf16 for Anchor {
3167 fn to_point_utf16(&self, snapshot: &BufferSnapshot) -> PointUtf16 {
3168 snapshot.summary_for_anchor(self)
3169 }
3170}
3171
3172impl ToPointUtf16 for usize {
3173 fn to_point_utf16(&self, snapshot: &BufferSnapshot) -> PointUtf16 {
3174 snapshot.offset_to_point_utf16(*self)
3175 }
3176}
3177
3178impl ToPointUtf16 for PointUtf16 {
3179 fn to_point_utf16(&self, _: &BufferSnapshot) -> PointUtf16 {
3180 *self
3181 }
3182}
3183
3184impl ToPointUtf16 for Point {
3185 fn to_point_utf16(&self, snapshot: &BufferSnapshot) -> PointUtf16 {
3186 snapshot.point_to_point_utf16(*self)
3187 }
3188}
3189
3190pub trait ToOffsetUtf16 {
3191 fn to_offset_utf16(&self, snapshot: &BufferSnapshot) -> OffsetUtf16;
3192}
3193
3194impl ToOffsetUtf16 for Anchor {
3195 fn to_offset_utf16(&self, snapshot: &BufferSnapshot) -> OffsetUtf16 {
3196 snapshot.summary_for_anchor(self)
3197 }
3198}
3199
3200impl ToOffsetUtf16 for usize {
3201 fn to_offset_utf16(&self, snapshot: &BufferSnapshot) -> OffsetUtf16 {
3202 snapshot.offset_to_offset_utf16(*self)
3203 }
3204}
3205
3206impl ToOffsetUtf16 for OffsetUtf16 {
3207 fn to_offset_utf16(&self, _snapshot: &BufferSnapshot) -> OffsetUtf16 {
3208 *self
3209 }
3210}
3211
3212pub trait FromAnchor {
3213 fn from_anchor(anchor: &Anchor, snapshot: &BufferSnapshot) -> Self;
3214}
3215
3216impl FromAnchor for Anchor {
3217 fn from_anchor(anchor: &Anchor, _snapshot: &BufferSnapshot) -> Self {
3218 *anchor
3219 }
3220}
3221
3222impl FromAnchor for Point {
3223 fn from_anchor(anchor: &Anchor, snapshot: &BufferSnapshot) -> Self {
3224 snapshot.summary_for_anchor(anchor)
3225 }
3226}
3227
3228impl FromAnchor for PointUtf16 {
3229 fn from_anchor(anchor: &Anchor, snapshot: &BufferSnapshot) -> Self {
3230 snapshot.summary_for_anchor(anchor)
3231 }
3232}
3233
3234impl FromAnchor for usize {
3235 fn from_anchor(anchor: &Anchor, snapshot: &BufferSnapshot) -> Self {
3236 snapshot.summary_for_anchor(anchor)
3237 }
3238}
3239
3240#[derive(Clone, Copy, Debug, PartialEq)]
3241pub enum LineEnding {
3242 Unix,
3243 Windows,
3244}
3245
3246impl Default for LineEnding {
3247 fn default() -> Self {
3248 #[cfg(unix)]
3249 return Self::Unix;
3250
3251 #[cfg(not(unix))]
3252 return Self::Windows;
3253 }
3254}
3255
3256impl LineEnding {
3257 pub fn as_str(&self) -> &'static str {
3258 match self {
3259 LineEnding::Unix => "\n",
3260 LineEnding::Windows => "\r\n",
3261 }
3262 }
3263
3264 pub fn detect(text: &str) -> Self {
3265 let mut max_ix = cmp::min(text.len(), 1000);
3266 while !text.is_char_boundary(max_ix) {
3267 max_ix -= 1;
3268 }
3269
3270 if let Some(ix) = text[..max_ix].find(['\n']) {
3271 if ix > 0 && text.as_bytes()[ix - 1] == b'\r' {
3272 Self::Windows
3273 } else {
3274 Self::Unix
3275 }
3276 } else {
3277 Self::default()
3278 }
3279 }
3280
3281 pub fn normalize(text: &mut String) {
3282 if let Cow::Owned(replaced) = LINE_SEPARATORS_REGEX.replace_all(text, "\n") {
3283 *text = replaced;
3284 }
3285 }
3286
3287 pub fn normalize_arc(text: Arc<str>) -> Arc<str> {
3288 if let Cow::Owned(replaced) = LINE_SEPARATORS_REGEX.replace_all(&text, "\n") {
3289 replaced.into()
3290 } else {
3291 text
3292 }
3293 }
3294
3295 pub fn normalize_cow(text: Cow<str>) -> Cow<str> {
3296 if let Cow::Owned(replaced) = LINE_SEPARATORS_REGEX.replace_all(&text, "\n") {
3297 replaced.into()
3298 } else {
3299 text
3300 }
3301 }
3302}
3303
3304#[cfg(debug_assertions)]
3305pub mod debug {
3306 use super::*;
3307 use parking_lot::Mutex;
3308 use std::any::TypeId;
3309 use std::hash::{Hash, Hasher};
3310
3311 static GLOBAL_DEBUG_RANGES: Mutex<Option<GlobalDebugRanges>> = Mutex::new(None);
3312
3313 pub struct GlobalDebugRanges {
3314 pub ranges: Vec<DebugRange>,
3315 key_to_occurrence_index: HashMap<Key, usize>,
3316 next_occurrence_index: usize,
3317 }
3318
3319 pub struct DebugRange {
3320 key: Key,
3321 pub ranges: Vec<Range<Anchor>>,
3322 pub value: Arc<str>,
3323 pub occurrence_index: usize,
3324 }
3325
3326 #[derive(Debug, Clone, PartialEq, Eq, Hash)]
3327 struct Key {
3328 type_id: TypeId,
3329 hash: u64,
3330 }
3331
3332 impl GlobalDebugRanges {
3333 pub fn with_locked<R>(f: impl FnOnce(&mut Self) -> R) -> R {
3334 let mut state = GLOBAL_DEBUG_RANGES.lock();
3335 if state.is_none() {
3336 *state = Some(GlobalDebugRanges {
3337 ranges: Vec::new(),
3338 key_to_occurrence_index: HashMap::default(),
3339 next_occurrence_index: 0,
3340 });
3341 }
3342 if let Some(global_debug_ranges) = state.as_mut() {
3343 f(global_debug_ranges)
3344 } else {
3345 unreachable!()
3346 }
3347 }
3348
3349 pub fn insert<K: Hash + 'static>(
3350 &mut self,
3351 key: &K,
3352 ranges: Vec<Range<Anchor>>,
3353 value: Arc<str>,
3354 ) {
3355 let occurrence_index = *self
3356 .key_to_occurrence_index
3357 .entry(Key::new(key))
3358 .or_insert_with(|| {
3359 let occurrence_index = self.next_occurrence_index;
3360 self.next_occurrence_index += 1;
3361 occurrence_index
3362 });
3363 let key = Key::new(key);
3364 let existing = self
3365 .ranges
3366 .iter()
3367 .enumerate()
3368 .rfind(|(_, existing)| existing.key == key);
3369 if let Some((existing_ix, _)) = existing {
3370 self.ranges.remove(existing_ix);
3371 }
3372 self.ranges.push(DebugRange {
3373 ranges,
3374 key,
3375 value,
3376 occurrence_index,
3377 });
3378 }
3379
3380 pub fn remove<K: Hash + 'static>(&mut self, key: &K) {
3381 self.remove_impl(&Key::new(key));
3382 }
3383
3384 fn remove_impl(&mut self, key: &Key) {
3385 let existing = self
3386 .ranges
3387 .iter()
3388 .enumerate()
3389 .rfind(|(_, existing)| &existing.key == key);
3390 if let Some((existing_ix, _)) = existing {
3391 self.ranges.remove(existing_ix);
3392 }
3393 }
3394
3395 pub fn remove_all_with_key_type<K: 'static>(&mut self) {
3396 self.ranges
3397 .retain(|item| item.key.type_id != TypeId::of::<K>());
3398 }
3399 }
3400
3401 impl Key {
3402 fn new<K: Hash + 'static>(key: &K) -> Self {
3403 let type_id = TypeId::of::<K>();
3404 let mut hasher = collections::FxHasher::default();
3405 key.hash(&mut hasher);
3406 Key {
3407 type_id,
3408 hash: hasher.finish(),
3409 }
3410 }
3411 }
3412
3413 pub trait ToDebugRanges {
3414 fn to_debug_ranges(&self, snapshot: &BufferSnapshot) -> Vec<Range<usize>>;
3415 }
3416
3417 impl<T: ToOffset> ToDebugRanges for T {
3418 fn to_debug_ranges(&self, snapshot: &BufferSnapshot) -> Vec<Range<usize>> {
3419 [self.to_offset(snapshot)].to_debug_ranges(snapshot)
3420 }
3421 }
3422
3423 impl<T: ToOffset + Clone> ToDebugRanges for Range<T> {
3424 fn to_debug_ranges(&self, snapshot: &BufferSnapshot) -> Vec<Range<usize>> {
3425 [self.clone()].to_debug_ranges(snapshot)
3426 }
3427 }
3428
3429 impl<T: ToOffset> ToDebugRanges for Vec<T> {
3430 fn to_debug_ranges(&self, snapshot: &BufferSnapshot) -> Vec<Range<usize>> {
3431 self.as_slice().to_debug_ranges(snapshot)
3432 }
3433 }
3434
3435 impl<T: ToOffset> ToDebugRanges for Vec<Range<T>> {
3436 fn to_debug_ranges(&self, snapshot: &BufferSnapshot) -> Vec<Range<usize>> {
3437 self.as_slice().to_debug_ranges(snapshot)
3438 }
3439 }
3440
3441 impl<T: ToOffset> ToDebugRanges for [T] {
3442 fn to_debug_ranges(&self, snapshot: &BufferSnapshot) -> Vec<Range<usize>> {
3443 self.iter()
3444 .map(|item| {
3445 let offset = item.to_offset(snapshot);
3446 offset..offset
3447 })
3448 .collect()
3449 }
3450 }
3451
3452 impl<T: ToOffset> ToDebugRanges for [Range<T>] {
3453 fn to_debug_ranges(&self, snapshot: &BufferSnapshot) -> Vec<Range<usize>> {
3454 self.iter()
3455 .map(|range| range.start.to_offset(snapshot)..range.end.to_offset(snapshot))
3456 .collect()
3457 }
3458 }
3459}