1mod anchor;
2pub mod locator;
3#[cfg(any(test, feature = "test-support"))]
4pub mod network;
5pub mod operation_queue;
6mod patch;
7mod selection;
8pub mod subscription;
9#[cfg(test)]
10mod tests;
11mod undo_map;
12
13pub use anchor::*;
14use anyhow::{Context as _, Result};
15use clock::Lamport;
16pub use clock::ReplicaId;
17use collections::{HashMap, HashSet};
18use locator::Locator;
19use operation_queue::OperationQueue;
20pub use patch::Patch;
21use postage::{oneshot, prelude::*};
22
23pub use rope::*;
24pub use selection::*;
25use std::{
26 cmp::{self, Ordering, Reverse},
27 fmt::Display,
28 future::Future,
29 iter::Iterator,
30 num::NonZeroU64,
31 ops::{self, Deref, Range, Sub},
32 str,
33 sync::Arc,
34 time::{Duration, Instant},
35};
36pub use subscription::*;
37pub use sum_tree::Bias;
38use sum_tree::{Dimensions, FilterCursor, SumTree, TreeMap, TreeSet};
39use undo_map::UndoMap;
40
41#[cfg(any(test, feature = "test-support"))]
42use util::RandomCharIter;
43
44pub type TransactionId = clock::Lamport;
45
46pub struct Buffer {
47 snapshot: BufferSnapshot,
48 history: History,
49 deferred_ops: OperationQueue<Operation>,
50 deferred_replicas: HashSet<ReplicaId>,
51 pub lamport_clock: clock::Lamport,
52 subscriptions: Topic,
53 edit_id_resolvers: HashMap<clock::Lamport, Vec<oneshot::Sender<()>>>,
54 wait_for_version_txs: Vec<(clock::Global, oneshot::Sender<()>)>,
55}
56
57#[repr(transparent)]
58#[derive(Clone, Copy, Debug, Hash, PartialEq, PartialOrd, Ord, Eq)]
59pub struct BufferId(NonZeroU64);
60
61impl Display for BufferId {
62 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
63 write!(f, "{}", self.0)
64 }
65}
66
67impl From<NonZeroU64> for BufferId {
68 fn from(id: NonZeroU64) -> Self {
69 BufferId(id)
70 }
71}
72
73impl BufferId {
74 /// Returns Err if `id` is outside of BufferId domain.
75 pub fn new(id: u64) -> anyhow::Result<Self> {
76 let id = NonZeroU64::new(id).context("Buffer id cannot be 0.")?;
77 Ok(Self(id))
78 }
79
80 /// Increments this buffer id, returning the old value.
81 /// So that's a post-increment operator in disguise.
82 pub fn next(&mut self) -> Self {
83 let old = *self;
84 self.0 = self.0.saturating_add(1);
85 old
86 }
87
88 pub fn to_proto(self) -> u64 {
89 self.into()
90 }
91}
92
93impl From<BufferId> for u64 {
94 fn from(id: BufferId) -> Self {
95 id.0.get()
96 }
97}
98
99#[derive(Clone)]
100pub struct BufferSnapshot {
101 replica_id: ReplicaId,
102 remote_id: BufferId,
103 visible_text: Rope,
104 deleted_text: Rope,
105 line_ending: LineEnding,
106 undo_map: UndoMap,
107 fragments: SumTree<Fragment>,
108 insertions: SumTree<InsertionFragment>,
109 insertion_slices: TreeSet<InsertionSlice>,
110 pub version: clock::Global,
111}
112
113#[derive(Clone, Debug)]
114pub struct HistoryEntry {
115 transaction: Transaction,
116 first_edit_at: Instant,
117 last_edit_at: Instant,
118 suppress_grouping: bool,
119}
120
121#[derive(Clone, Debug)]
122pub struct Transaction {
123 pub id: TransactionId,
124 pub edit_ids: Vec<clock::Lamport>,
125 pub start: clock::Global,
126}
127
128impl Transaction {
129 pub fn merge_in(&mut self, other: Transaction) {
130 self.edit_ids.extend(other.edit_ids);
131 }
132}
133
134impl HistoryEntry {
135 pub fn transaction_id(&self) -> TransactionId {
136 self.transaction.id
137 }
138}
139
140struct History {
141 base_text: Rope,
142 operations: TreeMap<clock::Lamport, Operation>,
143 undo_stack: Vec<HistoryEntry>,
144 redo_stack: Vec<HistoryEntry>,
145 transaction_depth: usize,
146 group_interval: Duration,
147}
148
149#[derive(Clone, Debug, Eq, PartialEq)]
150struct InsertionSlice {
151 edit_id: clock::Lamport,
152 insertion_id: clock::Lamport,
153 range: Range<usize>,
154}
155
156impl Ord for InsertionSlice {
157 fn cmp(&self, other: &Self) -> Ordering {
158 self.edit_id
159 .cmp(&other.edit_id)
160 .then_with(|| self.insertion_id.cmp(&other.insertion_id))
161 .then_with(|| self.range.start.cmp(&other.range.start))
162 .then_with(|| self.range.end.cmp(&other.range.end))
163 }
164}
165
166impl PartialOrd for InsertionSlice {
167 fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
168 Some(self.cmp(other))
169 }
170}
171
172impl InsertionSlice {
173 fn from_fragment(edit_id: clock::Lamport, fragment: &Fragment) -> Self {
174 Self {
175 edit_id,
176 insertion_id: fragment.timestamp,
177 range: fragment.insertion_offset..fragment.insertion_offset + fragment.len,
178 }
179 }
180}
181
182impl History {
183 pub fn new(base_text: Rope) -> Self {
184 Self {
185 base_text,
186 operations: Default::default(),
187 undo_stack: Vec::new(),
188 redo_stack: Vec::new(),
189 transaction_depth: 0,
190 // Don't group transactions in tests unless we opt in, because it's a footgun.
191 #[cfg(any(test, feature = "test-support"))]
192 group_interval: Duration::ZERO,
193 #[cfg(not(any(test, feature = "test-support")))]
194 group_interval: Duration::from_millis(300),
195 }
196 }
197
198 fn push(&mut self, op: Operation) {
199 self.operations.insert(op.timestamp(), op);
200 }
201
202 fn start_transaction(
203 &mut self,
204 start: clock::Global,
205 now: Instant,
206 clock: &mut clock::Lamport,
207 ) -> Option<TransactionId> {
208 self.transaction_depth += 1;
209 if self.transaction_depth == 1 {
210 let id = clock.tick();
211 self.undo_stack.push(HistoryEntry {
212 transaction: Transaction {
213 id,
214 start,
215 edit_ids: Default::default(),
216 },
217 first_edit_at: now,
218 last_edit_at: now,
219 suppress_grouping: false,
220 });
221 Some(id)
222 } else {
223 None
224 }
225 }
226
227 fn end_transaction(&mut self, now: Instant) -> Option<&HistoryEntry> {
228 assert_ne!(self.transaction_depth, 0);
229 self.transaction_depth -= 1;
230 if self.transaction_depth == 0 {
231 if self
232 .undo_stack
233 .last()
234 .unwrap()
235 .transaction
236 .edit_ids
237 .is_empty()
238 {
239 self.undo_stack.pop();
240 None
241 } else {
242 self.redo_stack.clear();
243 let entry = self.undo_stack.last_mut().unwrap();
244 entry.last_edit_at = now;
245 Some(entry)
246 }
247 } else {
248 None
249 }
250 }
251
252 fn group(&mut self) -> Option<TransactionId> {
253 let mut count = 0;
254 let mut entries = self.undo_stack.iter();
255 if let Some(mut entry) = entries.next_back() {
256 while let Some(prev_entry) = entries.next_back() {
257 if !prev_entry.suppress_grouping
258 && entry.first_edit_at - prev_entry.last_edit_at < self.group_interval
259 {
260 entry = prev_entry;
261 count += 1;
262 } else {
263 break;
264 }
265 }
266 }
267 self.group_trailing(count)
268 }
269
270 fn group_until(&mut self, transaction_id: TransactionId) {
271 let mut count = 0;
272 for entry in self.undo_stack.iter().rev() {
273 if entry.transaction_id() == transaction_id {
274 self.group_trailing(count);
275 break;
276 } else if entry.suppress_grouping {
277 break;
278 } else {
279 count += 1;
280 }
281 }
282 }
283
284 fn group_trailing(&mut self, n: usize) -> Option<TransactionId> {
285 let new_len = self.undo_stack.len() - n;
286 let (entries_to_keep, entries_to_merge) = self.undo_stack.split_at_mut(new_len);
287 if let Some(last_entry) = entries_to_keep.last_mut() {
288 for entry in &*entries_to_merge {
289 for edit_id in &entry.transaction.edit_ids {
290 last_entry.transaction.edit_ids.push(*edit_id);
291 }
292 }
293
294 if let Some(entry) = entries_to_merge.last_mut() {
295 last_entry.last_edit_at = entry.last_edit_at;
296 }
297 }
298
299 self.undo_stack.truncate(new_len);
300 self.undo_stack.last().map(|e| e.transaction.id)
301 }
302
303 fn finalize_last_transaction(&mut self) -> Option<&Transaction> {
304 self.undo_stack.last_mut().map(|entry| {
305 entry.suppress_grouping = true;
306 &entry.transaction
307 })
308 }
309
310 fn push_transaction(&mut self, transaction: Transaction, now: Instant) {
311 assert_eq!(self.transaction_depth, 0);
312 self.undo_stack.push(HistoryEntry {
313 transaction,
314 first_edit_at: now,
315 last_edit_at: now,
316 suppress_grouping: false,
317 });
318 }
319
320 /// Differs from `push_transaction` in that it does not clear the redo
321 /// stack. Intended to be used to create a parent transaction to merge
322 /// potential child transactions into.
323 ///
324 /// The caller is responsible for removing it from the undo history using
325 /// `forget_transaction` if no edits are merged into it. Otherwise, if edits
326 /// are merged into this transaction, the caller is responsible for ensuring
327 /// the redo stack is cleared. The easiest way to ensure the redo stack is
328 /// cleared is to create transactions with the usual `start_transaction` and
329 /// `end_transaction` methods and merging the resulting transactions into
330 /// the transaction created by this method
331 fn push_empty_transaction(
332 &mut self,
333 start: clock::Global,
334 now: Instant,
335 clock: &mut clock::Lamport,
336 ) -> TransactionId {
337 assert_eq!(self.transaction_depth, 0);
338 let id = clock.tick();
339 let transaction = Transaction {
340 id,
341 start,
342 edit_ids: Vec::new(),
343 };
344 self.undo_stack.push(HistoryEntry {
345 transaction,
346 first_edit_at: now,
347 last_edit_at: now,
348 suppress_grouping: false,
349 });
350 id
351 }
352
353 fn push_undo(&mut self, op_id: clock::Lamport) {
354 assert_ne!(self.transaction_depth, 0);
355 if let Some(Operation::Edit(_)) = self.operations.get(&op_id) {
356 let last_transaction = self.undo_stack.last_mut().unwrap();
357 last_transaction.transaction.edit_ids.push(op_id);
358 }
359 }
360
361 fn pop_undo(&mut self) -> Option<&HistoryEntry> {
362 assert_eq!(self.transaction_depth, 0);
363 if let Some(entry) = self.undo_stack.pop() {
364 self.redo_stack.push(entry);
365 self.redo_stack.last()
366 } else {
367 None
368 }
369 }
370
371 fn remove_from_undo(&mut self, transaction_id: TransactionId) -> Option<&HistoryEntry> {
372 assert_eq!(self.transaction_depth, 0);
373
374 let entry_ix = self
375 .undo_stack
376 .iter()
377 .rposition(|entry| entry.transaction.id == transaction_id)?;
378 let entry = self.undo_stack.remove(entry_ix);
379 self.redo_stack.push(entry);
380 self.redo_stack.last()
381 }
382
383 fn remove_from_undo_until(&mut self, transaction_id: TransactionId) -> &[HistoryEntry] {
384 assert_eq!(self.transaction_depth, 0);
385
386 let redo_stack_start_len = self.redo_stack.len();
387 if let Some(entry_ix) = self
388 .undo_stack
389 .iter()
390 .rposition(|entry| entry.transaction.id == transaction_id)
391 {
392 self.redo_stack
393 .extend(self.undo_stack.drain(entry_ix..).rev());
394 }
395 &self.redo_stack[redo_stack_start_len..]
396 }
397
398 fn forget(&mut self, transaction_id: TransactionId) -> Option<Transaction> {
399 assert_eq!(self.transaction_depth, 0);
400 if let Some(entry_ix) = self
401 .undo_stack
402 .iter()
403 .rposition(|entry| entry.transaction.id == transaction_id)
404 {
405 Some(self.undo_stack.remove(entry_ix).transaction)
406 } else if let Some(entry_ix) = self
407 .redo_stack
408 .iter()
409 .rposition(|entry| entry.transaction.id == transaction_id)
410 {
411 Some(self.redo_stack.remove(entry_ix).transaction)
412 } else {
413 None
414 }
415 }
416
417 fn transaction(&self, transaction_id: TransactionId) -> Option<&Transaction> {
418 let entry = self
419 .undo_stack
420 .iter()
421 .rfind(|entry| entry.transaction.id == transaction_id)
422 .or_else(|| {
423 self.redo_stack
424 .iter()
425 .rfind(|entry| entry.transaction.id == transaction_id)
426 })?;
427 Some(&entry.transaction)
428 }
429
430 fn transaction_mut(&mut self, transaction_id: TransactionId) -> Option<&mut Transaction> {
431 let entry = self
432 .undo_stack
433 .iter_mut()
434 .rfind(|entry| entry.transaction.id == transaction_id)
435 .or_else(|| {
436 self.redo_stack
437 .iter_mut()
438 .rfind(|entry| entry.transaction.id == transaction_id)
439 })?;
440 Some(&mut entry.transaction)
441 }
442
443 fn merge_transactions(&mut self, transaction: TransactionId, destination: TransactionId) {
444 if let Some(transaction) = self.forget(transaction)
445 && let Some(destination) = self.transaction_mut(destination)
446 {
447 destination.edit_ids.extend(transaction.edit_ids);
448 }
449 }
450
451 fn pop_redo(&mut self) -> Option<&HistoryEntry> {
452 assert_eq!(self.transaction_depth, 0);
453 if let Some(entry) = self.redo_stack.pop() {
454 self.undo_stack.push(entry);
455 self.undo_stack.last()
456 } else {
457 None
458 }
459 }
460
461 fn remove_from_redo(&mut self, transaction_id: TransactionId) -> &[HistoryEntry] {
462 assert_eq!(self.transaction_depth, 0);
463
464 let undo_stack_start_len = self.undo_stack.len();
465 if let Some(entry_ix) = self
466 .redo_stack
467 .iter()
468 .rposition(|entry| entry.transaction.id == transaction_id)
469 {
470 self.undo_stack
471 .extend(self.redo_stack.drain(entry_ix..).rev());
472 }
473 &self.undo_stack[undo_stack_start_len..]
474 }
475}
476
477struct Edits<'a, D: TextDimension, F: FnMut(&FragmentSummary) -> bool> {
478 visible_cursor: rope::Cursor<'a>,
479 deleted_cursor: rope::Cursor<'a>,
480 fragments_cursor: Option<FilterCursor<'a, 'static, F, Fragment, FragmentTextSummary>>,
481 undos: &'a UndoMap,
482 since: &'a clock::Global,
483 old_end: D,
484 new_end: D,
485 range: Range<(&'a Locator, usize)>,
486 buffer_id: BufferId,
487}
488
489#[derive(Clone, Debug, Default, Eq, PartialEq)]
490pub struct Edit<D> {
491 pub old: Range<D>,
492 pub new: Range<D>,
493}
494
495impl<D> Edit<D>
496where
497 D: Sub<D, Output = D> + PartialEq + Copy,
498{
499 pub fn old_len(&self) -> D {
500 self.old.end - self.old.start
501 }
502
503 pub fn new_len(&self) -> D {
504 self.new.end - self.new.start
505 }
506
507 pub fn is_empty(&self) -> bool {
508 self.old.start == self.old.end && self.new.start == self.new.end
509 }
510}
511
512impl<D1, D2> Edit<(D1, D2)> {
513 pub fn flatten(self) -> (Edit<D1>, Edit<D2>) {
514 (
515 Edit {
516 old: self.old.start.0..self.old.end.0,
517 new: self.new.start.0..self.new.end.0,
518 },
519 Edit {
520 old: self.old.start.1..self.old.end.1,
521 new: self.new.start.1..self.new.end.1,
522 },
523 )
524 }
525}
526
527#[derive(Eq, PartialEq, Clone, Debug)]
528pub struct Fragment {
529 pub id: Locator,
530 pub timestamp: clock::Lamport,
531 pub insertion_offset: usize,
532 pub len: usize,
533 pub visible: bool,
534 pub deletions: HashSet<clock::Lamport>,
535 pub max_undos: clock::Global,
536}
537
538#[derive(Eq, PartialEq, Clone, Debug)]
539pub struct FragmentSummary {
540 text: FragmentTextSummary,
541 max_id: Locator,
542 max_version: clock::Global,
543 min_insertion_version: clock::Global,
544 max_insertion_version: clock::Global,
545}
546
547#[derive(Copy, Default, Clone, Debug, PartialEq, Eq)]
548struct FragmentTextSummary {
549 visible: usize,
550 deleted: usize,
551}
552
553impl<'a> sum_tree::Dimension<'a, FragmentSummary> for FragmentTextSummary {
554 fn zero(_: &Option<clock::Global>) -> Self {
555 Default::default()
556 }
557
558 fn add_summary(&mut self, summary: &'a FragmentSummary, _: &Option<clock::Global>) {
559 self.visible += summary.text.visible;
560 self.deleted += summary.text.deleted;
561 }
562}
563
564#[derive(Eq, PartialEq, Clone, Debug)]
565struct InsertionFragment {
566 timestamp: clock::Lamport,
567 split_offset: usize,
568 fragment_id: Locator,
569}
570
571#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord)]
572struct InsertionFragmentKey {
573 timestamp: clock::Lamport,
574 split_offset: usize,
575}
576
577#[derive(Clone, Debug, Eq, PartialEq)]
578pub enum Operation {
579 Edit(EditOperation),
580 Undo(UndoOperation),
581}
582
583#[derive(Clone, Debug, Eq, PartialEq)]
584pub struct EditOperation {
585 pub timestamp: clock::Lamport,
586 pub version: clock::Global,
587 pub ranges: Vec<Range<FullOffset>>,
588 pub new_text: Vec<Arc<str>>,
589}
590
591#[derive(Clone, Debug, Eq, PartialEq)]
592pub struct UndoOperation {
593 pub timestamp: clock::Lamport,
594 pub version: clock::Global,
595 pub counts: HashMap<clock::Lamport, u32>,
596}
597
598/// Stores information about the indentation of a line (tabs and spaces).
599#[derive(Clone, Copy, Debug, Eq, PartialEq)]
600pub struct LineIndent {
601 pub tabs: u32,
602 pub spaces: u32,
603 pub line_blank: bool,
604}
605
606impl LineIndent {
607 pub fn from_chunks(chunks: &mut Chunks) -> Self {
608 let mut tabs = 0;
609 let mut spaces = 0;
610 let mut line_blank = true;
611
612 'outer: while let Some(chunk) = chunks.peek() {
613 for ch in chunk.chars() {
614 if ch == '\t' {
615 tabs += 1;
616 } else if ch == ' ' {
617 spaces += 1;
618 } else {
619 if ch != '\n' {
620 line_blank = false;
621 }
622 break 'outer;
623 }
624 }
625
626 chunks.next();
627 }
628
629 Self {
630 tabs,
631 spaces,
632 line_blank,
633 }
634 }
635
636 /// Constructs a new `LineIndent` which only contains spaces.
637 pub fn spaces(spaces: u32) -> Self {
638 Self {
639 tabs: 0,
640 spaces,
641 line_blank: true,
642 }
643 }
644
645 /// Constructs a new `LineIndent` which only contains tabs.
646 pub fn tabs(tabs: u32) -> Self {
647 Self {
648 tabs,
649 spaces: 0,
650 line_blank: true,
651 }
652 }
653
654 /// Indicates whether the line is empty.
655 pub fn is_line_empty(&self) -> bool {
656 self.tabs == 0 && self.spaces == 0 && self.line_blank
657 }
658
659 /// Indicates whether the line is blank (contains only whitespace).
660 pub fn is_line_blank(&self) -> bool {
661 self.line_blank
662 }
663
664 /// Returns the number of indentation characters (tabs or spaces).
665 pub fn raw_len(&self) -> u32 {
666 self.tabs + self.spaces
667 }
668
669 /// Returns the number of indentation characters (tabs or spaces), taking tab size into account.
670 pub fn len(&self, tab_size: u32) -> u32 {
671 self.tabs * tab_size + self.spaces
672 }
673}
674
675impl From<&str> for LineIndent {
676 fn from(value: &str) -> Self {
677 Self::from_iter(value.chars())
678 }
679}
680
681impl FromIterator<char> for LineIndent {
682 fn from_iter<T: IntoIterator<Item = char>>(chars: T) -> Self {
683 let mut tabs = 0;
684 let mut spaces = 0;
685 let mut line_blank = true;
686 for c in chars {
687 if c == '\t' {
688 tabs += 1;
689 } else if c == ' ' {
690 spaces += 1;
691 } else {
692 if c != '\n' {
693 line_blank = false;
694 }
695 break;
696 }
697 }
698 Self {
699 tabs,
700 spaces,
701 line_blank,
702 }
703 }
704}
705
706impl Buffer {
707 pub fn new(replica_id: ReplicaId, remote_id: BufferId, base_text: impl Into<String>) -> Buffer {
708 let mut base_text = base_text.into();
709 let line_ending = LineEnding::detect(&base_text);
710 LineEnding::normalize(&mut base_text);
711 Self::new_normalized(replica_id, remote_id, line_ending, Rope::from(&*base_text))
712 }
713
714 pub fn new_normalized(
715 replica_id: ReplicaId,
716 remote_id: BufferId,
717 line_ending: LineEnding,
718 normalized: Rope,
719 ) -> Buffer {
720 let history = History::new(normalized);
721 let mut fragments = SumTree::new(&None);
722 let mut insertions = SumTree::default();
723
724 let mut lamport_clock = clock::Lamport::new(replica_id);
725 let mut version = clock::Global::new();
726
727 let visible_text = history.base_text.clone();
728 if !visible_text.is_empty() {
729 let insertion_timestamp = clock::Lamport::new(ReplicaId::LOCAL);
730 lamport_clock.observe(insertion_timestamp);
731 version.observe(insertion_timestamp);
732 let fragment_id = Locator::between(&Locator::min(), &Locator::max());
733 let fragment = Fragment {
734 id: fragment_id,
735 timestamp: insertion_timestamp,
736 insertion_offset: 0,
737 len: visible_text.len(),
738 visible: true,
739 deletions: Default::default(),
740 max_undos: Default::default(),
741 };
742 insertions.push(InsertionFragment::new(&fragment), ());
743 fragments.push(fragment, &None);
744 }
745
746 Buffer {
747 snapshot: BufferSnapshot {
748 replica_id,
749 remote_id,
750 visible_text,
751 deleted_text: Rope::new(),
752 line_ending,
753 fragments,
754 insertions,
755 version,
756 undo_map: Default::default(),
757 insertion_slices: Default::default(),
758 },
759 history,
760 deferred_ops: OperationQueue::new(),
761 deferred_replicas: HashSet::default(),
762 lamport_clock,
763 subscriptions: Default::default(),
764 edit_id_resolvers: Default::default(),
765 wait_for_version_txs: Default::default(),
766 }
767 }
768
769 pub fn version(&self) -> clock::Global {
770 self.version.clone()
771 }
772
773 pub fn snapshot(&self) -> BufferSnapshot {
774 self.snapshot.clone()
775 }
776
777 pub fn branch(&self) -> Self {
778 Self {
779 snapshot: self.snapshot.clone(),
780 history: History::new(self.base_text().clone()),
781 deferred_ops: OperationQueue::new(),
782 deferred_replicas: HashSet::default(),
783 lamport_clock: clock::Lamport::new(ReplicaId::LOCAL_BRANCH),
784 subscriptions: Default::default(),
785 edit_id_resolvers: Default::default(),
786 wait_for_version_txs: Default::default(),
787 }
788 }
789
790 pub fn replica_id(&self) -> ReplicaId {
791 self.lamport_clock.replica_id
792 }
793
794 pub fn remote_id(&self) -> BufferId {
795 self.remote_id
796 }
797
798 pub fn deferred_ops_len(&self) -> usize {
799 self.deferred_ops.len()
800 }
801
802 pub fn transaction_group_interval(&self) -> Duration {
803 self.history.group_interval
804 }
805
806 pub fn edit<R, I, S, T>(&mut self, edits: R) -> Operation
807 where
808 R: IntoIterator<IntoIter = I>,
809 I: ExactSizeIterator<Item = (Range<S>, T)>,
810 S: ToOffset,
811 T: Into<Arc<str>>,
812 {
813 let edits = edits
814 .into_iter()
815 .map(|(range, new_text)| (range, new_text.into()));
816
817 self.start_transaction();
818 let timestamp = self.lamport_clock.tick();
819 let operation = Operation::Edit(self.apply_local_edit(edits, timestamp));
820
821 self.history.push(operation.clone());
822 self.history.push_undo(operation.timestamp());
823 self.snapshot.version.observe(operation.timestamp());
824 self.end_transaction();
825 operation
826 }
827
828 fn apply_local_edit<S: ToOffset, T: Into<Arc<str>>>(
829 &mut self,
830 edits: impl ExactSizeIterator<Item = (Range<S>, T)>,
831 timestamp: clock::Lamport,
832 ) -> EditOperation {
833 let mut edits_patch = Patch::default();
834 let mut edit_op = EditOperation {
835 timestamp,
836 version: self.version(),
837 ranges: Vec::with_capacity(edits.len()),
838 new_text: Vec::with_capacity(edits.len()),
839 };
840 let mut new_insertions = Vec::new();
841 let mut insertion_offset = 0;
842 let mut insertion_slices = Vec::new();
843
844 let mut edits = edits
845 .map(|(range, new_text)| (range.to_offset(&*self), new_text))
846 .peekable();
847
848 let mut new_ropes =
849 RopeBuilder::new(self.visible_text.cursor(0), self.deleted_text.cursor(0));
850 let mut old_fragments = self.fragments.cursor::<FragmentTextSummary>(&None);
851 let mut new_fragments = old_fragments.slice(&edits.peek().unwrap().0.start, Bias::Right);
852 new_ropes.append(new_fragments.summary().text);
853
854 let mut fragment_start = old_fragments.start().visible;
855 for (range, new_text) in edits {
856 let new_text = LineEnding::normalize_arc(new_text.into());
857 let fragment_end = old_fragments.end().visible;
858
859 // If the current fragment ends before this range, then jump ahead to the first fragment
860 // that extends past the start of this range, reusing any intervening fragments.
861 if fragment_end < range.start {
862 // If the current fragment has been partially consumed, then consume the rest of it
863 // and advance to the next fragment before slicing.
864 if fragment_start > old_fragments.start().visible {
865 if fragment_end > fragment_start {
866 let mut suffix = old_fragments.item().unwrap().clone();
867 suffix.len = fragment_end - fragment_start;
868 suffix.insertion_offset += fragment_start - old_fragments.start().visible;
869 new_insertions.push(InsertionFragment::insert_new(&suffix));
870 new_ropes.push_fragment(&suffix, suffix.visible);
871 new_fragments.push(suffix, &None);
872 }
873 old_fragments.next();
874 }
875
876 let slice = old_fragments.slice(&range.start, Bias::Right);
877 new_ropes.append(slice.summary().text);
878 new_fragments.append(slice, &None);
879 fragment_start = old_fragments.start().visible;
880 }
881
882 let full_range_start = FullOffset(range.start + old_fragments.start().deleted);
883
884 // Preserve any portion of the current fragment that precedes this range.
885 if fragment_start < range.start {
886 let mut prefix = old_fragments.item().unwrap().clone();
887 prefix.len = range.start - fragment_start;
888 prefix.insertion_offset += fragment_start - old_fragments.start().visible;
889 prefix.id = Locator::between(&new_fragments.summary().max_id, &prefix.id);
890 new_insertions.push(InsertionFragment::insert_new(&prefix));
891 new_ropes.push_fragment(&prefix, prefix.visible);
892 new_fragments.push(prefix, &None);
893 fragment_start = range.start;
894 }
895
896 // Insert the new text before any existing fragments within the range.
897 if !new_text.is_empty() {
898 let new_start = new_fragments.summary().text.visible;
899
900 let fragment = Fragment {
901 id: Locator::between(
902 &new_fragments.summary().max_id,
903 old_fragments
904 .item()
905 .map_or(&Locator::max(), |old_fragment| &old_fragment.id),
906 ),
907 timestamp,
908 insertion_offset,
909 len: new_text.len(),
910 deletions: Default::default(),
911 max_undos: Default::default(),
912 visible: true,
913 };
914 edits_patch.push(Edit {
915 old: fragment_start..fragment_start,
916 new: new_start..new_start + new_text.len(),
917 });
918 insertion_slices.push(InsertionSlice::from_fragment(timestamp, &fragment));
919 new_insertions.push(InsertionFragment::insert_new(&fragment));
920 new_ropes.push_str(new_text.as_ref());
921 new_fragments.push(fragment, &None);
922 insertion_offset += new_text.len();
923 }
924
925 // Advance through every fragment that intersects this range, marking the intersecting
926 // portions as deleted.
927 while fragment_start < range.end {
928 let fragment = old_fragments.item().unwrap();
929 let fragment_end = old_fragments.end().visible;
930 let mut intersection = fragment.clone();
931 let intersection_end = cmp::min(range.end, fragment_end);
932 if fragment.visible {
933 intersection.len = intersection_end - fragment_start;
934 intersection.insertion_offset += fragment_start - old_fragments.start().visible;
935 intersection.id =
936 Locator::between(&new_fragments.summary().max_id, &intersection.id);
937 intersection.deletions.insert(timestamp);
938 intersection.visible = false;
939 }
940 if intersection.len > 0 {
941 if fragment.visible && !intersection.visible {
942 let new_start = new_fragments.summary().text.visible;
943 edits_patch.push(Edit {
944 old: fragment_start..intersection_end,
945 new: new_start..new_start,
946 });
947 insertion_slices
948 .push(InsertionSlice::from_fragment(timestamp, &intersection));
949 }
950 new_insertions.push(InsertionFragment::insert_new(&intersection));
951 new_ropes.push_fragment(&intersection, fragment.visible);
952 new_fragments.push(intersection, &None);
953 fragment_start = intersection_end;
954 }
955 if fragment_end <= range.end {
956 old_fragments.next();
957 }
958 }
959
960 let full_range_end = FullOffset(range.end + old_fragments.start().deleted);
961 edit_op.ranges.push(full_range_start..full_range_end);
962 edit_op.new_text.push(new_text);
963 }
964
965 // If the current fragment has been partially consumed, then consume the rest of it
966 // and advance to the next fragment before slicing.
967 if fragment_start > old_fragments.start().visible {
968 let fragment_end = old_fragments.end().visible;
969 if fragment_end > fragment_start {
970 let mut suffix = old_fragments.item().unwrap().clone();
971 suffix.len = fragment_end - fragment_start;
972 suffix.insertion_offset += fragment_start - old_fragments.start().visible;
973 new_insertions.push(InsertionFragment::insert_new(&suffix));
974 new_ropes.push_fragment(&suffix, suffix.visible);
975 new_fragments.push(suffix, &None);
976 }
977 old_fragments.next();
978 }
979
980 let suffix = old_fragments.suffix();
981 new_ropes.append(suffix.summary().text);
982 new_fragments.append(suffix, &None);
983 let (visible_text, deleted_text) = new_ropes.finish();
984 drop(old_fragments);
985
986 self.snapshot.fragments = new_fragments;
987 self.snapshot.insertions.edit(new_insertions, ());
988 self.snapshot.visible_text = visible_text;
989 self.snapshot.deleted_text = deleted_text;
990 self.subscriptions.publish_mut(&edits_patch);
991 self.snapshot.insertion_slices.extend(insertion_slices);
992 edit_op
993 }
994
995 pub fn set_line_ending(&mut self, line_ending: LineEnding) {
996 self.snapshot.line_ending = line_ending;
997 }
998
999 pub fn apply_ops<I: IntoIterator<Item = Operation>>(&mut self, ops: I) {
1000 let mut deferred_ops = Vec::new();
1001 for op in ops {
1002 self.history.push(op.clone());
1003 if self.can_apply_op(&op) {
1004 self.apply_op(op);
1005 } else {
1006 self.deferred_replicas.insert(op.replica_id());
1007 deferred_ops.push(op);
1008 }
1009 }
1010 self.deferred_ops.insert(deferred_ops);
1011 self.flush_deferred_ops();
1012 }
1013
1014 fn apply_op(&mut self, op: Operation) {
1015 match op {
1016 Operation::Edit(edit) => {
1017 if !self.version.observed(edit.timestamp) {
1018 self.apply_remote_edit(
1019 &edit.version,
1020 &edit.ranges,
1021 &edit.new_text,
1022 edit.timestamp,
1023 );
1024 self.snapshot.version.observe(edit.timestamp);
1025 self.lamport_clock.observe(edit.timestamp);
1026 self.resolve_edit(edit.timestamp);
1027 }
1028 }
1029 Operation::Undo(undo) => {
1030 if !self.version.observed(undo.timestamp) {
1031 self.apply_undo(&undo);
1032 self.snapshot.version.observe(undo.timestamp);
1033 self.lamport_clock.observe(undo.timestamp);
1034 }
1035 }
1036 }
1037 self.wait_for_version_txs.retain_mut(|(version, tx)| {
1038 if self.snapshot.version().observed_all(version) {
1039 tx.try_send(()).ok();
1040 false
1041 } else {
1042 true
1043 }
1044 });
1045 }
1046
1047 fn apply_remote_edit(
1048 &mut self,
1049 version: &clock::Global,
1050 ranges: &[Range<FullOffset>],
1051 new_text: &[Arc<str>],
1052 timestamp: clock::Lamport,
1053 ) {
1054 if ranges.is_empty() {
1055 return;
1056 }
1057
1058 let edits = ranges.iter().zip(new_text.iter());
1059 let mut edits_patch = Patch::default();
1060 let mut insertion_slices = Vec::new();
1061 let cx = Some(version.clone());
1062 let mut new_insertions = Vec::new();
1063 let mut insertion_offset = 0;
1064 let mut new_ropes =
1065 RopeBuilder::new(self.visible_text.cursor(0), self.deleted_text.cursor(0));
1066 let mut old_fragments = self
1067 .fragments
1068 .cursor::<Dimensions<VersionedFullOffset, usize>>(&cx);
1069 let mut new_fragments =
1070 old_fragments.slice(&VersionedFullOffset::Offset(ranges[0].start), Bias::Left);
1071 new_ropes.append(new_fragments.summary().text);
1072
1073 let mut fragment_start = old_fragments.start().0.full_offset();
1074 for (range, new_text) in edits {
1075 let fragment_end = old_fragments.end().0.full_offset();
1076
1077 // If the current fragment ends before this range, then jump ahead to the first fragment
1078 // that extends past the start of this range, reusing any intervening fragments.
1079 if fragment_end < range.start {
1080 // If the current fragment has been partially consumed, then consume the rest of it
1081 // and advance to the next fragment before slicing.
1082 if fragment_start > old_fragments.start().0.full_offset() {
1083 if fragment_end > fragment_start {
1084 let mut suffix = old_fragments.item().unwrap().clone();
1085 suffix.len = fragment_end.0 - fragment_start.0;
1086 suffix.insertion_offset +=
1087 fragment_start - old_fragments.start().0.full_offset();
1088 new_insertions.push(InsertionFragment::insert_new(&suffix));
1089 new_ropes.push_fragment(&suffix, suffix.visible);
1090 new_fragments.push(suffix, &None);
1091 }
1092 old_fragments.next();
1093 }
1094
1095 let slice =
1096 old_fragments.slice(&VersionedFullOffset::Offset(range.start), Bias::Left);
1097 new_ropes.append(slice.summary().text);
1098 new_fragments.append(slice, &None);
1099 fragment_start = old_fragments.start().0.full_offset();
1100 }
1101
1102 // If we are at the end of a non-concurrent fragment, advance to the next one.
1103 let fragment_end = old_fragments.end().0.full_offset();
1104 if fragment_end == range.start && fragment_end > fragment_start {
1105 let mut fragment = old_fragments.item().unwrap().clone();
1106 fragment.len = fragment_end.0 - fragment_start.0;
1107 fragment.insertion_offset += fragment_start - old_fragments.start().0.full_offset();
1108 new_insertions.push(InsertionFragment::insert_new(&fragment));
1109 new_ropes.push_fragment(&fragment, fragment.visible);
1110 new_fragments.push(fragment, &None);
1111 old_fragments.next();
1112 fragment_start = old_fragments.start().0.full_offset();
1113 }
1114
1115 // Skip over insertions that are concurrent to this edit, but have a lower lamport
1116 // timestamp.
1117 while let Some(fragment) = old_fragments.item() {
1118 if fragment_start == range.start && fragment.timestamp > timestamp {
1119 new_ropes.push_fragment(fragment, fragment.visible);
1120 new_fragments.push(fragment.clone(), &None);
1121 old_fragments.next();
1122 debug_assert_eq!(fragment_start, range.start);
1123 } else {
1124 break;
1125 }
1126 }
1127 debug_assert!(fragment_start <= range.start);
1128
1129 // Preserve any portion of the current fragment that precedes this range.
1130 if fragment_start < range.start {
1131 let mut prefix = old_fragments.item().unwrap().clone();
1132 prefix.len = range.start.0 - fragment_start.0;
1133 prefix.insertion_offset += fragment_start - old_fragments.start().0.full_offset();
1134 prefix.id = Locator::between(&new_fragments.summary().max_id, &prefix.id);
1135 new_insertions.push(InsertionFragment::insert_new(&prefix));
1136 fragment_start = range.start;
1137 new_ropes.push_fragment(&prefix, prefix.visible);
1138 new_fragments.push(prefix, &None);
1139 }
1140
1141 // Insert the new text before any existing fragments within the range.
1142 if !new_text.is_empty() {
1143 let mut old_start = old_fragments.start().1;
1144 if old_fragments.item().is_some_and(|f| f.visible) {
1145 old_start += fragment_start.0 - old_fragments.start().0.full_offset().0;
1146 }
1147 let new_start = new_fragments.summary().text.visible;
1148 let fragment = Fragment {
1149 id: Locator::between(
1150 &new_fragments.summary().max_id,
1151 old_fragments
1152 .item()
1153 .map_or(&Locator::max(), |old_fragment| &old_fragment.id),
1154 ),
1155 timestamp,
1156 insertion_offset,
1157 len: new_text.len(),
1158 deletions: Default::default(),
1159 max_undos: Default::default(),
1160 visible: true,
1161 };
1162 edits_patch.push(Edit {
1163 old: old_start..old_start,
1164 new: new_start..new_start + new_text.len(),
1165 });
1166 insertion_slices.push(InsertionSlice::from_fragment(timestamp, &fragment));
1167 new_insertions.push(InsertionFragment::insert_new(&fragment));
1168 new_ropes.push_str(new_text);
1169 new_fragments.push(fragment, &None);
1170 insertion_offset += new_text.len();
1171 }
1172
1173 // Advance through every fragment that intersects this range, marking the intersecting
1174 // portions as deleted.
1175 while fragment_start < range.end {
1176 let fragment = old_fragments.item().unwrap();
1177 let fragment_end = old_fragments.end().0.full_offset();
1178 let mut intersection = fragment.clone();
1179 let intersection_end = cmp::min(range.end, fragment_end);
1180 if fragment.was_visible(version, &self.undo_map) {
1181 intersection.len = intersection_end.0 - fragment_start.0;
1182 intersection.insertion_offset +=
1183 fragment_start - old_fragments.start().0.full_offset();
1184 intersection.id =
1185 Locator::between(&new_fragments.summary().max_id, &intersection.id);
1186 intersection.deletions.insert(timestamp);
1187 intersection.visible = false;
1188 insertion_slices.push(InsertionSlice::from_fragment(timestamp, &intersection));
1189 }
1190 if intersection.len > 0 {
1191 if fragment.visible && !intersection.visible {
1192 let old_start = old_fragments.start().1
1193 + (fragment_start.0 - old_fragments.start().0.full_offset().0);
1194 let new_start = new_fragments.summary().text.visible;
1195 edits_patch.push(Edit {
1196 old: old_start..old_start + intersection.len,
1197 new: new_start..new_start,
1198 });
1199 }
1200 new_insertions.push(InsertionFragment::insert_new(&intersection));
1201 new_ropes.push_fragment(&intersection, fragment.visible);
1202 new_fragments.push(intersection, &None);
1203 fragment_start = intersection_end;
1204 }
1205 if fragment_end <= range.end {
1206 old_fragments.next();
1207 }
1208 }
1209 }
1210
1211 // If the current fragment has been partially consumed, then consume the rest of it
1212 // and advance to the next fragment before slicing.
1213 if fragment_start > old_fragments.start().0.full_offset() {
1214 let fragment_end = old_fragments.end().0.full_offset();
1215 if fragment_end > fragment_start {
1216 let mut suffix = old_fragments.item().unwrap().clone();
1217 suffix.len = fragment_end.0 - fragment_start.0;
1218 suffix.insertion_offset += fragment_start - old_fragments.start().0.full_offset();
1219 new_insertions.push(InsertionFragment::insert_new(&suffix));
1220 new_ropes.push_fragment(&suffix, suffix.visible);
1221 new_fragments.push(suffix, &None);
1222 }
1223 old_fragments.next();
1224 }
1225
1226 let suffix = old_fragments.suffix();
1227 new_ropes.append(suffix.summary().text);
1228 new_fragments.append(suffix, &None);
1229 let (visible_text, deleted_text) = new_ropes.finish();
1230 drop(old_fragments);
1231
1232 self.snapshot.fragments = new_fragments;
1233 self.snapshot.visible_text = visible_text;
1234 self.snapshot.deleted_text = deleted_text;
1235 self.snapshot.insertions.edit(new_insertions, ());
1236 self.snapshot.insertion_slices.extend(insertion_slices);
1237 self.subscriptions.publish_mut(&edits_patch)
1238 }
1239
1240 fn fragment_ids_for_edits<'a>(
1241 &'a self,
1242 edit_ids: impl Iterator<Item = &'a clock::Lamport>,
1243 ) -> Vec<&'a Locator> {
1244 // Get all of the insertion slices changed by the given edits.
1245 let mut insertion_slices = Vec::new();
1246 for edit_id in edit_ids {
1247 let insertion_slice = InsertionSlice {
1248 edit_id: *edit_id,
1249 insertion_id: clock::Lamport::MIN,
1250 range: 0..0,
1251 };
1252 let slices = self
1253 .snapshot
1254 .insertion_slices
1255 .iter_from(&insertion_slice)
1256 .take_while(|slice| slice.edit_id == *edit_id);
1257 insertion_slices.extend(slices)
1258 }
1259 insertion_slices
1260 .sort_unstable_by_key(|s| (s.insertion_id, s.range.start, Reverse(s.range.end)));
1261
1262 // Get all of the fragments corresponding to these insertion slices.
1263 let mut fragment_ids = Vec::new();
1264 let mut insertions_cursor = self.insertions.cursor::<InsertionFragmentKey>(());
1265 for insertion_slice in &insertion_slices {
1266 if insertion_slice.insertion_id != insertions_cursor.start().timestamp
1267 || insertion_slice.range.start > insertions_cursor.start().split_offset
1268 {
1269 insertions_cursor.seek_forward(
1270 &InsertionFragmentKey {
1271 timestamp: insertion_slice.insertion_id,
1272 split_offset: insertion_slice.range.start,
1273 },
1274 Bias::Left,
1275 );
1276 }
1277 while let Some(item) = insertions_cursor.item() {
1278 if item.timestamp != insertion_slice.insertion_id
1279 || item.split_offset >= insertion_slice.range.end
1280 {
1281 break;
1282 }
1283 fragment_ids.push(&item.fragment_id);
1284 insertions_cursor.next();
1285 }
1286 }
1287 fragment_ids.sort_unstable();
1288 fragment_ids
1289 }
1290
1291 fn apply_undo(&mut self, undo: &UndoOperation) {
1292 self.snapshot.undo_map.insert(undo);
1293
1294 let mut edits = Patch::default();
1295 let mut old_fragments = self
1296 .fragments
1297 .cursor::<Dimensions<Option<&Locator>, usize>>(&None);
1298 let mut new_fragments = SumTree::new(&None);
1299 let mut new_ropes =
1300 RopeBuilder::new(self.visible_text.cursor(0), self.deleted_text.cursor(0));
1301
1302 for fragment_id in self.fragment_ids_for_edits(undo.counts.keys()) {
1303 let preceding_fragments = old_fragments.slice(&Some(fragment_id), Bias::Left);
1304 new_ropes.append(preceding_fragments.summary().text);
1305 new_fragments.append(preceding_fragments, &None);
1306
1307 if let Some(fragment) = old_fragments.item() {
1308 let mut fragment = fragment.clone();
1309 let fragment_was_visible = fragment.visible;
1310
1311 fragment.visible = fragment.is_visible(&self.undo_map);
1312 fragment.max_undos.observe(undo.timestamp);
1313
1314 let old_start = old_fragments.start().1;
1315 let new_start = new_fragments.summary().text.visible;
1316 if fragment_was_visible && !fragment.visible {
1317 edits.push(Edit {
1318 old: old_start..old_start + fragment.len,
1319 new: new_start..new_start,
1320 });
1321 } else if !fragment_was_visible && fragment.visible {
1322 edits.push(Edit {
1323 old: old_start..old_start,
1324 new: new_start..new_start + fragment.len,
1325 });
1326 }
1327 new_ropes.push_fragment(&fragment, fragment_was_visible);
1328 new_fragments.push(fragment, &None);
1329
1330 old_fragments.next();
1331 }
1332 }
1333
1334 let suffix = old_fragments.suffix();
1335 new_ropes.append(suffix.summary().text);
1336 new_fragments.append(suffix, &None);
1337
1338 drop(old_fragments);
1339 let (visible_text, deleted_text) = new_ropes.finish();
1340 self.snapshot.fragments = new_fragments;
1341 self.snapshot.visible_text = visible_text;
1342 self.snapshot.deleted_text = deleted_text;
1343 self.subscriptions.publish_mut(&edits);
1344 }
1345
1346 fn flush_deferred_ops(&mut self) {
1347 self.deferred_replicas.clear();
1348 let mut deferred_ops = Vec::new();
1349 for op in self.deferred_ops.drain().iter().cloned() {
1350 if self.can_apply_op(&op) {
1351 self.apply_op(op);
1352 } else {
1353 self.deferred_replicas.insert(op.replica_id());
1354 deferred_ops.push(op);
1355 }
1356 }
1357 self.deferred_ops.insert(deferred_ops);
1358 }
1359
1360 fn can_apply_op(&self, op: &Operation) -> bool {
1361 if self.deferred_replicas.contains(&op.replica_id()) {
1362 false
1363 } else {
1364 self.version.observed_all(match op {
1365 Operation::Edit(edit) => &edit.version,
1366 Operation::Undo(undo) => &undo.version,
1367 })
1368 }
1369 }
1370
1371 pub fn has_deferred_ops(&self) -> bool {
1372 !self.deferred_ops.is_empty()
1373 }
1374
1375 pub fn peek_undo_stack(&self) -> Option<&HistoryEntry> {
1376 self.history.undo_stack.last()
1377 }
1378
1379 pub fn peek_redo_stack(&self) -> Option<&HistoryEntry> {
1380 self.history.redo_stack.last()
1381 }
1382
1383 pub fn start_transaction(&mut self) -> Option<TransactionId> {
1384 self.start_transaction_at(Instant::now())
1385 }
1386
1387 pub fn start_transaction_at(&mut self, now: Instant) -> Option<TransactionId> {
1388 self.history
1389 .start_transaction(self.version.clone(), now, &mut self.lamport_clock)
1390 }
1391
1392 pub fn end_transaction(&mut self) -> Option<(TransactionId, clock::Global)> {
1393 self.end_transaction_at(Instant::now())
1394 }
1395
1396 pub fn end_transaction_at(&mut self, now: Instant) -> Option<(TransactionId, clock::Global)> {
1397 if let Some(entry) = self.history.end_transaction(now) {
1398 let since = entry.transaction.start.clone();
1399 let id = self.history.group().unwrap();
1400 Some((id, since))
1401 } else {
1402 None
1403 }
1404 }
1405
1406 pub fn finalize_last_transaction(&mut self) -> Option<&Transaction> {
1407 self.history.finalize_last_transaction()
1408 }
1409
1410 pub fn group_until_transaction(&mut self, transaction_id: TransactionId) {
1411 self.history.group_until(transaction_id);
1412 }
1413
1414 pub fn base_text(&self) -> &Rope {
1415 &self.history.base_text
1416 }
1417
1418 pub fn operations(&self) -> &TreeMap<clock::Lamport, Operation> {
1419 &self.history.operations
1420 }
1421
1422 pub fn undo(&mut self) -> Option<(TransactionId, Operation)> {
1423 if let Some(entry) = self.history.pop_undo() {
1424 let transaction = entry.transaction.clone();
1425 let transaction_id = transaction.id;
1426 let op = self.undo_or_redo(transaction);
1427 Some((transaction_id, op))
1428 } else {
1429 None
1430 }
1431 }
1432
1433 pub fn undo_transaction(&mut self, transaction_id: TransactionId) -> Option<Operation> {
1434 let transaction = self
1435 .history
1436 .remove_from_undo(transaction_id)?
1437 .transaction
1438 .clone();
1439 Some(self.undo_or_redo(transaction))
1440 }
1441
1442 pub fn undo_to_transaction(&mut self, transaction_id: TransactionId) -> Vec<Operation> {
1443 let transactions = self
1444 .history
1445 .remove_from_undo_until(transaction_id)
1446 .iter()
1447 .map(|entry| entry.transaction.clone())
1448 .collect::<Vec<_>>();
1449
1450 transactions
1451 .into_iter()
1452 .map(|transaction| self.undo_or_redo(transaction))
1453 .collect()
1454 }
1455
1456 pub fn forget_transaction(&mut self, transaction_id: TransactionId) -> Option<Transaction> {
1457 self.history.forget(transaction_id)
1458 }
1459
1460 pub fn get_transaction(&self, transaction_id: TransactionId) -> Option<&Transaction> {
1461 self.history.transaction(transaction_id)
1462 }
1463
1464 pub fn merge_transactions(&mut self, transaction: TransactionId, destination: TransactionId) {
1465 self.history.merge_transactions(transaction, destination);
1466 }
1467
1468 pub fn redo(&mut self) -> Option<(TransactionId, Operation)> {
1469 if let Some(entry) = self.history.pop_redo() {
1470 let transaction = entry.transaction.clone();
1471 let transaction_id = transaction.id;
1472 let op = self.undo_or_redo(transaction);
1473 Some((transaction_id, op))
1474 } else {
1475 None
1476 }
1477 }
1478
1479 pub fn redo_to_transaction(&mut self, transaction_id: TransactionId) -> Vec<Operation> {
1480 let transactions = self
1481 .history
1482 .remove_from_redo(transaction_id)
1483 .iter()
1484 .map(|entry| entry.transaction.clone())
1485 .collect::<Vec<_>>();
1486
1487 transactions
1488 .into_iter()
1489 .map(|transaction| self.undo_or_redo(transaction))
1490 .collect()
1491 }
1492
1493 fn undo_or_redo(&mut self, transaction: Transaction) -> Operation {
1494 let mut counts = HashMap::default();
1495 for edit_id in transaction.edit_ids {
1496 counts.insert(edit_id, self.undo_map.undo_count(edit_id).saturating_add(1));
1497 }
1498
1499 let operation = self.undo_operations(counts);
1500 self.history.push(operation.clone());
1501 operation
1502 }
1503
1504 pub fn undo_operations(&mut self, counts: HashMap<clock::Lamport, u32>) -> Operation {
1505 let timestamp = self.lamport_clock.tick();
1506 let version = self.version();
1507 self.snapshot.version.observe(timestamp);
1508 let undo = UndoOperation {
1509 timestamp,
1510 version,
1511 counts,
1512 };
1513 self.apply_undo(&undo);
1514 Operation::Undo(undo)
1515 }
1516
1517 pub fn push_transaction(&mut self, transaction: Transaction, now: Instant) {
1518 self.history.push_transaction(transaction, now);
1519 }
1520
1521 /// Differs from `push_transaction` in that it does not clear the redo stack.
1522 /// The caller responsible for
1523 /// Differs from `push_transaction` in that it does not clear the redo
1524 /// stack. Intended to be used to create a parent transaction to merge
1525 /// potential child transactions into.
1526 ///
1527 /// The caller is responsible for removing it from the undo history using
1528 /// `forget_transaction` if no edits are merged into it. Otherwise, if edits
1529 /// are merged into this transaction, the caller is responsible for ensuring
1530 /// the redo stack is cleared. The easiest way to ensure the redo stack is
1531 /// cleared is to create transactions with the usual `start_transaction` and
1532 /// `end_transaction` methods and merging the resulting transactions into
1533 /// the transaction created by this method
1534 pub fn push_empty_transaction(&mut self, now: Instant) -> TransactionId {
1535 self.history
1536 .push_empty_transaction(self.version.clone(), now, &mut self.lamport_clock)
1537 }
1538
1539 pub fn edited_ranges_for_transaction_id<D>(
1540 &self,
1541 transaction_id: TransactionId,
1542 ) -> impl '_ + Iterator<Item = Range<D>>
1543 where
1544 D: TextDimension,
1545 {
1546 self.history
1547 .transaction(transaction_id)
1548 .into_iter()
1549 .flat_map(|transaction| self.edited_ranges_for_transaction(transaction))
1550 }
1551
1552 pub fn edited_ranges_for_edit_ids<'a, D>(
1553 &'a self,
1554 edit_ids: impl IntoIterator<Item = &'a clock::Lamport>,
1555 ) -> impl 'a + Iterator<Item = Range<D>>
1556 where
1557 D: TextDimension,
1558 {
1559 // get fragment ranges
1560 let mut cursor = self
1561 .fragments
1562 .cursor::<Dimensions<Option<&Locator>, usize>>(&None);
1563 let offset_ranges = self
1564 .fragment_ids_for_edits(edit_ids.into_iter())
1565 .into_iter()
1566 .filter_map(move |fragment_id| {
1567 cursor.seek_forward(&Some(fragment_id), Bias::Left);
1568 let fragment = cursor.item()?;
1569 let start_offset = cursor.start().1;
1570 let end_offset = start_offset + if fragment.visible { fragment.len } else { 0 };
1571 Some(start_offset..end_offset)
1572 });
1573
1574 // combine adjacent ranges
1575 let mut prev_range: Option<Range<usize>> = None;
1576 let disjoint_ranges = offset_ranges
1577 .map(Some)
1578 .chain([None])
1579 .filter_map(move |range| {
1580 if let Some((range, prev_range)) = range.as_ref().zip(prev_range.as_mut())
1581 && prev_range.end == range.start
1582 {
1583 prev_range.end = range.end;
1584 return None;
1585 }
1586 let result = prev_range.clone();
1587 prev_range = range;
1588 result
1589 });
1590
1591 // convert to the desired text dimension.
1592 let mut position = D::zero(());
1593 let mut rope_cursor = self.visible_text.cursor(0);
1594 disjoint_ranges.map(move |range| {
1595 position.add_assign(&rope_cursor.summary(range.start));
1596 let start = position;
1597 position.add_assign(&rope_cursor.summary(range.end));
1598 let end = position;
1599 start..end
1600 })
1601 }
1602
1603 pub fn edited_ranges_for_transaction<'a, D>(
1604 &'a self,
1605 transaction: &'a Transaction,
1606 ) -> impl 'a + Iterator<Item = Range<D>>
1607 where
1608 D: TextDimension,
1609 {
1610 self.edited_ranges_for_edit_ids(&transaction.edit_ids)
1611 }
1612
1613 pub fn subscribe(&mut self) -> Subscription {
1614 self.subscriptions.subscribe()
1615 }
1616
1617 pub fn wait_for_edits<It: IntoIterator<Item = clock::Lamport>>(
1618 &mut self,
1619 edit_ids: It,
1620 ) -> impl 'static + Future<Output = Result<()>> + use<It> {
1621 let mut futures = Vec::new();
1622 for edit_id in edit_ids {
1623 if !self.version.observed(edit_id) {
1624 let (tx, rx) = oneshot::channel();
1625 self.edit_id_resolvers.entry(edit_id).or_default().push(tx);
1626 futures.push(rx);
1627 }
1628 }
1629
1630 async move {
1631 for mut future in futures {
1632 if future.recv().await.is_none() {
1633 anyhow::bail!("gave up waiting for edits");
1634 }
1635 }
1636 Ok(())
1637 }
1638 }
1639
1640 pub fn wait_for_anchors<It: IntoIterator<Item = Anchor>>(
1641 &mut self,
1642 anchors: It,
1643 ) -> impl 'static + Future<Output = Result<()>> + use<It> {
1644 let mut futures = Vec::new();
1645 for anchor in anchors {
1646 if !self.version.observed(anchor.timestamp)
1647 && anchor != Anchor::MAX
1648 && anchor != Anchor::MIN
1649 {
1650 let (tx, rx) = oneshot::channel();
1651 self.edit_id_resolvers
1652 .entry(anchor.timestamp)
1653 .or_default()
1654 .push(tx);
1655 futures.push(rx);
1656 }
1657 }
1658
1659 async move {
1660 for mut future in futures {
1661 if future.recv().await.is_none() {
1662 anyhow::bail!("gave up waiting for anchors");
1663 }
1664 }
1665 Ok(())
1666 }
1667 }
1668
1669 pub fn wait_for_version(
1670 &mut self,
1671 version: clock::Global,
1672 ) -> impl Future<Output = Result<()>> + use<> {
1673 let mut rx = None;
1674 if !self.snapshot.version.observed_all(&version) {
1675 let channel = oneshot::channel();
1676 self.wait_for_version_txs.push((version, channel.0));
1677 rx = Some(channel.1);
1678 }
1679 async move {
1680 if let Some(mut rx) = rx
1681 && rx.recv().await.is_none()
1682 {
1683 anyhow::bail!("gave up waiting for version");
1684 }
1685 Ok(())
1686 }
1687 }
1688
1689 pub fn give_up_waiting(&mut self) {
1690 self.edit_id_resolvers.clear();
1691 self.wait_for_version_txs.clear();
1692 }
1693
1694 fn resolve_edit(&mut self, edit_id: clock::Lamport) {
1695 for mut tx in self
1696 .edit_id_resolvers
1697 .remove(&edit_id)
1698 .into_iter()
1699 .flatten()
1700 {
1701 tx.try_send(()).ok();
1702 }
1703 }
1704}
1705
1706#[cfg(any(test, feature = "test-support"))]
1707impl Buffer {
1708 #[track_caller]
1709 pub fn edit_via_marked_text(&mut self, marked_string: &str) {
1710 let edits = self.edits_for_marked_text(marked_string);
1711 self.edit(edits);
1712 }
1713
1714 #[track_caller]
1715 pub fn edits_for_marked_text(&self, marked_string: &str) -> Vec<(Range<usize>, String)> {
1716 let old_text = self.text();
1717 let (new_text, mut ranges) = util::test::marked_text_ranges(marked_string, false);
1718 if ranges.is_empty() {
1719 ranges.push(0..new_text.len());
1720 }
1721
1722 assert_eq!(
1723 old_text[..ranges[0].start],
1724 new_text[..ranges[0].start],
1725 "invalid edit"
1726 );
1727
1728 let mut delta = 0;
1729 let mut edits = Vec::new();
1730 let mut ranges = ranges.into_iter().peekable();
1731
1732 while let Some(inserted_range) = ranges.next() {
1733 let new_start = inserted_range.start;
1734 let old_start = (new_start as isize - delta) as usize;
1735
1736 let following_text = if let Some(next_range) = ranges.peek() {
1737 &new_text[inserted_range.end..next_range.start]
1738 } else {
1739 &new_text[inserted_range.end..]
1740 };
1741
1742 let inserted_len = inserted_range.len();
1743 let deleted_len = old_text[old_start..]
1744 .find(following_text)
1745 .expect("invalid edit");
1746
1747 let old_range = old_start..old_start + deleted_len;
1748 edits.push((old_range, new_text[inserted_range].to_string()));
1749 delta += inserted_len as isize - deleted_len as isize;
1750 }
1751
1752 assert_eq!(
1753 old_text.len() as isize + delta,
1754 new_text.len() as isize,
1755 "invalid edit"
1756 );
1757
1758 edits
1759 }
1760
1761 pub fn check_invariants(&self) {
1762 // Ensure every fragment is ordered by locator in the fragment tree and corresponds
1763 // to an insertion fragment in the insertions tree.
1764 let mut prev_fragment_id = Locator::min();
1765 for fragment in self.snapshot.fragments.items(&None) {
1766 assert!(fragment.id > prev_fragment_id);
1767 prev_fragment_id = fragment.id.clone();
1768
1769 let insertion_fragment = self
1770 .snapshot
1771 .insertions
1772 .get(
1773 &InsertionFragmentKey {
1774 timestamp: fragment.timestamp,
1775 split_offset: fragment.insertion_offset,
1776 },
1777 (),
1778 )
1779 .unwrap();
1780 assert_eq!(
1781 insertion_fragment.fragment_id, fragment.id,
1782 "fragment: {:?}\ninsertion: {:?}",
1783 fragment, insertion_fragment
1784 );
1785 }
1786
1787 let mut cursor = self.snapshot.fragments.cursor::<Option<&Locator>>(&None);
1788 for insertion_fragment in self.snapshot.insertions.cursor::<()>(()) {
1789 cursor.seek(&Some(&insertion_fragment.fragment_id), Bias::Left);
1790 let fragment = cursor.item().unwrap();
1791 assert_eq!(insertion_fragment.fragment_id, fragment.id);
1792 assert_eq!(insertion_fragment.split_offset, fragment.insertion_offset);
1793 }
1794
1795 let fragment_summary = self.snapshot.fragments.summary();
1796 assert_eq!(
1797 fragment_summary.text.visible,
1798 self.snapshot.visible_text.len()
1799 );
1800 assert_eq!(
1801 fragment_summary.text.deleted,
1802 self.snapshot.deleted_text.len()
1803 );
1804
1805 assert!(!self.text().contains("\r\n"));
1806 }
1807
1808 pub fn set_group_interval(&mut self, group_interval: Duration) {
1809 self.history.group_interval = group_interval;
1810 }
1811
1812 pub fn random_byte_range(&self, start_offset: usize, rng: &mut impl rand::Rng) -> Range<usize> {
1813 let end = self.clip_offset(rng.random_range(start_offset..=self.len()), Bias::Right);
1814 let start = self.clip_offset(rng.random_range(start_offset..=end), Bias::Right);
1815 start..end
1816 }
1817
1818 pub fn get_random_edits<T>(
1819 &self,
1820 rng: &mut T,
1821 edit_count: usize,
1822 ) -> Vec<(Range<usize>, Arc<str>)>
1823 where
1824 T: rand::Rng,
1825 {
1826 let mut edits: Vec<(Range<usize>, Arc<str>)> = Vec::new();
1827 let mut last_end = None;
1828 for _ in 0..edit_count {
1829 if last_end.is_some_and(|last_end| last_end >= self.len()) {
1830 break;
1831 }
1832 let new_start = last_end.map_or(0, |last_end| last_end + 1);
1833 let range = self.random_byte_range(new_start, rng);
1834 last_end = Some(range.end);
1835
1836 let new_text_len = rng.random_range(0..10);
1837 let new_text: String = RandomCharIter::new(&mut *rng).take(new_text_len).collect();
1838
1839 edits.push((range, new_text.into()));
1840 }
1841 edits
1842 }
1843
1844 pub fn randomly_edit<T>(
1845 &mut self,
1846 rng: &mut T,
1847 edit_count: usize,
1848 ) -> (Vec<(Range<usize>, Arc<str>)>, Operation)
1849 where
1850 T: rand::Rng,
1851 {
1852 let mut edits = self.get_random_edits(rng, edit_count);
1853 log::info!("mutating buffer {:?} with {:?}", self.replica_id, edits);
1854
1855 let op = self.edit(edits.iter().cloned());
1856 if let Operation::Edit(edit) = &op {
1857 assert_eq!(edits.len(), edit.new_text.len());
1858 for (edit, new_text) in edits.iter_mut().zip(&edit.new_text) {
1859 edit.1 = new_text.clone();
1860 }
1861 } else {
1862 unreachable!()
1863 }
1864
1865 (edits, op)
1866 }
1867
1868 pub fn randomly_undo_redo(&mut self, rng: &mut impl rand::Rng) -> Vec<Operation> {
1869 use rand::prelude::*;
1870
1871 let mut ops = Vec::new();
1872 for _ in 0..rng.random_range(1..=5) {
1873 if let Some(entry) = self.history.undo_stack.choose(rng) {
1874 let transaction = entry.transaction.clone();
1875 log::info!(
1876 "undoing buffer {:?} transaction {:?}",
1877 self.replica_id,
1878 transaction
1879 );
1880 ops.push(self.undo_or_redo(transaction));
1881 }
1882 }
1883 ops
1884 }
1885}
1886
1887impl Deref for Buffer {
1888 type Target = BufferSnapshot;
1889
1890 fn deref(&self) -> &Self::Target {
1891 &self.snapshot
1892 }
1893}
1894
1895impl BufferSnapshot {
1896 pub fn as_rope(&self) -> &Rope {
1897 &self.visible_text
1898 }
1899
1900 pub fn rope_for_version(&self, version: &clock::Global) -> Rope {
1901 let mut rope = Rope::new();
1902
1903 let mut cursor = self
1904 .fragments
1905 .filter::<_, FragmentTextSummary>(&None, move |summary| {
1906 !version.observed_all(&summary.max_version)
1907 });
1908 cursor.next();
1909
1910 let mut visible_cursor = self.visible_text.cursor(0);
1911 let mut deleted_cursor = self.deleted_text.cursor(0);
1912
1913 while let Some(fragment) = cursor.item() {
1914 if cursor.start().visible > visible_cursor.offset() {
1915 let text = visible_cursor.slice(cursor.start().visible);
1916 rope.append(text);
1917 }
1918
1919 if fragment.was_visible(version, &self.undo_map) {
1920 if fragment.visible {
1921 let text = visible_cursor.slice(cursor.end().visible);
1922 rope.append(text);
1923 } else {
1924 deleted_cursor.seek_forward(cursor.start().deleted);
1925 let text = deleted_cursor.slice(cursor.end().deleted);
1926 rope.append(text);
1927 }
1928 } else if fragment.visible {
1929 visible_cursor.seek_forward(cursor.end().visible);
1930 }
1931
1932 cursor.next();
1933 }
1934
1935 if cursor.start().visible > visible_cursor.offset() {
1936 let text = visible_cursor.slice(cursor.start().visible);
1937 rope.append(text);
1938 }
1939
1940 rope
1941 }
1942
1943 pub fn remote_id(&self) -> BufferId {
1944 self.remote_id
1945 }
1946
1947 pub fn replica_id(&self) -> ReplicaId {
1948 self.replica_id
1949 }
1950
1951 pub fn row_count(&self) -> u32 {
1952 self.max_point().row + 1
1953 }
1954
1955 pub fn len(&self) -> usize {
1956 self.visible_text.len()
1957 }
1958
1959 pub fn is_empty(&self) -> bool {
1960 self.len() == 0
1961 }
1962
1963 pub fn chars(&self) -> impl Iterator<Item = char> + '_ {
1964 self.chars_at(0)
1965 }
1966
1967 pub fn chars_for_range<T: ToOffset>(&self, range: Range<T>) -> impl Iterator<Item = char> + '_ {
1968 self.text_for_range(range).flat_map(str::chars)
1969 }
1970
1971 pub fn reversed_chars_for_range<T: ToOffset>(
1972 &self,
1973 range: Range<T>,
1974 ) -> impl Iterator<Item = char> + '_ {
1975 self.reversed_chunks_in_range(range)
1976 .flat_map(|chunk| chunk.chars().rev())
1977 }
1978
1979 pub fn contains_str_at<T>(&self, position: T, needle: &str) -> bool
1980 where
1981 T: ToOffset,
1982 {
1983 let position = position.to_offset(self);
1984 position == self.clip_offset(position, Bias::Left)
1985 && self
1986 .bytes_in_range(position..self.len())
1987 .flatten()
1988 .copied()
1989 .take(needle.len())
1990 .eq(needle.bytes())
1991 }
1992
1993 pub fn common_prefix_at<T>(&self, position: T, needle: &str) -> Range<T>
1994 where
1995 T: ToOffset + TextDimension,
1996 {
1997 let offset = position.to_offset(self);
1998 let common_prefix_len = needle
1999 .char_indices()
2000 .map(|(index, _)| index)
2001 .chain([needle.len()])
2002 .take_while(|&len| len <= offset)
2003 .filter(|&len| {
2004 let left = self
2005 .chars_for_range(offset - len..offset)
2006 .flat_map(char::to_lowercase);
2007 let right = needle[..len].chars().flat_map(char::to_lowercase);
2008 left.eq(right)
2009 })
2010 .last()
2011 .unwrap_or(0);
2012 let start_offset = offset - common_prefix_len;
2013 let start = self.text_summary_for_range(0..start_offset);
2014 start..position
2015 }
2016
2017 /// Returns the buffer's text as a String.
2018 ///
2019 /// Note: This always uses `\n` as the line separator, regardless of the buffer's
2020 /// actual line ending setting. For LSP communication or other cases where you need
2021 /// to preserve the original line endings, use [`Self::text_with_original_line_endings`] instead.
2022 pub fn text(&self) -> String {
2023 self.visible_text.to_string()
2024 }
2025
2026 /// Returns the buffer's text with line same endings as in buffer's file.
2027 ///
2028 /// Unlike [`Self::text`] which always uses `\n`, this method formats the text using
2029 /// the buffer's actual line ending setting (Unix `\n` or Windows `\r\n`).
2030 pub fn text_with_original_line_endings(&self) -> String {
2031 self.visible_text
2032 .to_string_with_line_ending(self.line_ending)
2033 }
2034
2035 pub fn line_ending(&self) -> LineEnding {
2036 self.line_ending
2037 }
2038
2039 pub fn deleted_text(&self) -> String {
2040 self.deleted_text.to_string()
2041 }
2042
2043 pub fn fragments(&self) -> impl Iterator<Item = &Fragment> {
2044 self.fragments.iter()
2045 }
2046
2047 pub fn text_summary(&self) -> TextSummary {
2048 self.visible_text.summary()
2049 }
2050
2051 pub fn max_point(&self) -> Point {
2052 self.visible_text.max_point()
2053 }
2054
2055 pub fn max_point_utf16(&self) -> PointUtf16 {
2056 self.visible_text.max_point_utf16()
2057 }
2058
2059 pub fn point_to_offset(&self, point: Point) -> usize {
2060 self.visible_text.point_to_offset(point)
2061 }
2062
2063 pub fn point_to_offset_utf16(&self, point: Point) -> OffsetUtf16 {
2064 self.visible_text.point_to_offset_utf16(point)
2065 }
2066
2067 pub fn point_utf16_to_offset_utf16(&self, point: PointUtf16) -> OffsetUtf16 {
2068 self.visible_text.point_utf16_to_offset_utf16(point)
2069 }
2070
2071 pub fn point_utf16_to_offset(&self, point: PointUtf16) -> usize {
2072 self.visible_text.point_utf16_to_offset(point)
2073 }
2074
2075 pub fn unclipped_point_utf16_to_offset(&self, point: Unclipped<PointUtf16>) -> usize {
2076 self.visible_text.unclipped_point_utf16_to_offset(point)
2077 }
2078
2079 pub fn unclipped_point_utf16_to_point(&self, point: Unclipped<PointUtf16>) -> Point {
2080 self.visible_text.unclipped_point_utf16_to_point(point)
2081 }
2082
2083 pub fn offset_utf16_to_offset(&self, offset: OffsetUtf16) -> usize {
2084 self.visible_text.offset_utf16_to_offset(offset)
2085 }
2086
2087 pub fn offset_to_offset_utf16(&self, offset: usize) -> OffsetUtf16 {
2088 self.visible_text.offset_to_offset_utf16(offset)
2089 }
2090
2091 pub fn offset_to_point(&self, offset: usize) -> Point {
2092 self.visible_text.offset_to_point(offset)
2093 }
2094
2095 pub fn offset_to_point_utf16(&self, offset: usize) -> PointUtf16 {
2096 self.visible_text.offset_to_point_utf16(offset)
2097 }
2098
2099 pub fn point_to_point_utf16(&self, point: Point) -> PointUtf16 {
2100 self.visible_text.point_to_point_utf16(point)
2101 }
2102
2103 pub fn point_utf16_to_point(&self, point: PointUtf16) -> Point {
2104 self.visible_text.point_utf16_to_point(point)
2105 }
2106
2107 pub fn version(&self) -> &clock::Global {
2108 &self.version
2109 }
2110
2111 pub fn chars_at<T: ToOffset>(&self, position: T) -> impl Iterator<Item = char> + '_ {
2112 let offset = position.to_offset(self);
2113 self.visible_text.chars_at(offset)
2114 }
2115
2116 pub fn reversed_chars_at<T: ToOffset>(&self, position: T) -> impl Iterator<Item = char> + '_ {
2117 let offset = position.to_offset(self);
2118 self.visible_text.reversed_chars_at(offset)
2119 }
2120
2121 pub fn reversed_chunks_in_range<T: ToOffset>(&self, range: Range<T>) -> rope::Chunks<'_> {
2122 let range = range.start.to_offset(self)..range.end.to_offset(self);
2123 self.visible_text.reversed_chunks_in_range(range)
2124 }
2125
2126 pub fn bytes_in_range<T: ToOffset>(&self, range: Range<T>) -> rope::Bytes<'_> {
2127 let start = range.start.to_offset(self);
2128 let end = range.end.to_offset(self);
2129 self.visible_text.bytes_in_range(start..end)
2130 }
2131
2132 pub fn reversed_bytes_in_range<T: ToOffset>(&self, range: Range<T>) -> rope::Bytes<'_> {
2133 let start = range.start.to_offset(self);
2134 let end = range.end.to_offset(self);
2135 self.visible_text.reversed_bytes_in_range(start..end)
2136 }
2137
2138 /// Returns the text in the given range.
2139 ///
2140 /// Note: This always uses `\n` as the line separator, regardless of the buffer's
2141 /// actual line ending setting.
2142 pub fn text_for_range<T: ToOffset>(&self, range: Range<T>) -> Chunks<'_> {
2143 let start = range.start.to_offset(self);
2144 let end = range.end.to_offset(self);
2145 self.visible_text.chunks_in_range(start..end)
2146 }
2147
2148 pub fn line_len(&self, row: u32) -> u32 {
2149 let row_start_offset = Point::new(row, 0).to_offset(self);
2150 let row_end_offset = if row >= self.max_point().row {
2151 self.len()
2152 } else {
2153 Point::new(row + 1, 0).to_previous_offset(self)
2154 };
2155 (row_end_offset - row_start_offset) as u32
2156 }
2157
2158 pub fn line_indents_in_row_range(
2159 &self,
2160 row_range: Range<u32>,
2161 ) -> impl Iterator<Item = (u32, LineIndent)> + '_ {
2162 let start = Point::new(row_range.start, 0).to_offset(self);
2163 let end = Point::new(row_range.end, self.line_len(row_range.end)).to_offset(self);
2164
2165 let mut chunks = self.as_rope().chunks_in_range(start..end);
2166 let mut row = row_range.start;
2167 let mut done = false;
2168 std::iter::from_fn(move || {
2169 if done {
2170 None
2171 } else {
2172 let indent = (row, LineIndent::from_chunks(&mut chunks));
2173 done = !chunks.next_line();
2174 row += 1;
2175 Some(indent)
2176 }
2177 })
2178 }
2179
2180 /// Returns the line indents in the given row range, exclusive of end row, in reversed order.
2181 pub fn reversed_line_indents_in_row_range(
2182 &self,
2183 row_range: Range<u32>,
2184 ) -> impl Iterator<Item = (u32, LineIndent)> + '_ {
2185 let start = Point::new(row_range.start, 0).to_offset(self);
2186
2187 let end_point;
2188 let end;
2189 if row_range.end > row_range.start {
2190 end_point = Point::new(row_range.end - 1, self.line_len(row_range.end - 1));
2191 end = end_point.to_offset(self);
2192 } else {
2193 end_point = Point::new(row_range.start, 0);
2194 end = start;
2195 };
2196
2197 let mut chunks = self.as_rope().chunks_in_range(start..end);
2198 // Move the cursor to the start of the last line if it's not empty.
2199 chunks.seek(end);
2200 if end_point.column > 0 {
2201 chunks.prev_line();
2202 }
2203
2204 let mut row = end_point.row;
2205 let mut done = false;
2206 std::iter::from_fn(move || {
2207 if done {
2208 None
2209 } else {
2210 let initial_offset = chunks.offset();
2211 let indent = (row, LineIndent::from_chunks(&mut chunks));
2212 if chunks.offset() > initial_offset {
2213 chunks.prev_line();
2214 }
2215 done = !chunks.prev_line();
2216 if !done {
2217 row -= 1;
2218 }
2219
2220 Some(indent)
2221 }
2222 })
2223 }
2224
2225 pub fn line_indent_for_row(&self, row: u32) -> LineIndent {
2226 LineIndent::from_iter(self.chars_at(Point::new(row, 0)))
2227 }
2228
2229 pub fn is_line_blank(&self, row: u32) -> bool {
2230 self.text_for_range(Point::new(row, 0)..Point::new(row, self.line_len(row)))
2231 .all(|chunk| chunk.matches(|c: char| !c.is_whitespace()).next().is_none())
2232 }
2233
2234 pub fn text_summary_for_range<D, O: ToOffset>(&self, range: Range<O>) -> D
2235 where
2236 D: TextDimension,
2237 {
2238 self.visible_text
2239 .cursor(range.start.to_offset(self))
2240 .summary(range.end.to_offset(self))
2241 }
2242
2243 pub fn summaries_for_anchors<'a, D, A>(&'a self, anchors: A) -> impl 'a + Iterator<Item = D>
2244 where
2245 D: 'a + TextDimension,
2246 A: 'a + IntoIterator<Item = &'a Anchor>,
2247 {
2248 let anchors = anchors.into_iter();
2249 self.summaries_for_anchors_with_payload::<D, _, ()>(anchors.map(|a| (a, ())))
2250 .map(|d| d.0)
2251 }
2252
2253 pub fn summaries_for_anchors_with_payload<'a, D, A, T>(
2254 &'a self,
2255 anchors: A,
2256 ) -> impl 'a + Iterator<Item = (D, T)>
2257 where
2258 D: 'a + TextDimension,
2259 A: 'a + IntoIterator<Item = (&'a Anchor, T)>,
2260 {
2261 let anchors = anchors.into_iter();
2262 let mut insertion_cursor = self.insertions.cursor::<InsertionFragmentKey>(());
2263 let mut fragment_cursor = self
2264 .fragments
2265 .cursor::<Dimensions<Option<&Locator>, usize>>(&None);
2266 let mut text_cursor = self.visible_text.cursor(0);
2267 let mut position = D::zero(());
2268
2269 anchors.map(move |(anchor, payload)| {
2270 if *anchor == Anchor::MIN {
2271 return (D::zero(()), payload);
2272 } else if *anchor == Anchor::MAX {
2273 return (D::from_text_summary(&self.visible_text.summary()), payload);
2274 }
2275
2276 let anchor_key = InsertionFragmentKey {
2277 timestamp: anchor.timestamp,
2278 split_offset: anchor.offset,
2279 };
2280 insertion_cursor.seek(&anchor_key, anchor.bias);
2281 if let Some(insertion) = insertion_cursor.item() {
2282 let comparison = sum_tree::KeyedItem::key(insertion).cmp(&anchor_key);
2283 if comparison == Ordering::Greater
2284 || (anchor.bias == Bias::Left
2285 && comparison == Ordering::Equal
2286 && anchor.offset > 0)
2287 {
2288 insertion_cursor.prev();
2289 }
2290 } else {
2291 insertion_cursor.prev();
2292 }
2293 let insertion = insertion_cursor.item().expect("invalid insertion");
2294 assert_eq!(insertion.timestamp, anchor.timestamp, "invalid insertion");
2295
2296 fragment_cursor.seek_forward(&Some(&insertion.fragment_id), Bias::Left);
2297 let fragment = fragment_cursor.item().unwrap();
2298 let mut fragment_offset = fragment_cursor.start().1;
2299 if fragment.visible {
2300 fragment_offset += anchor.offset - insertion.split_offset;
2301 }
2302
2303 position.add_assign(&text_cursor.summary(fragment_offset));
2304 (position, payload)
2305 })
2306 }
2307
2308 pub fn summary_for_anchor<D>(&self, anchor: &Anchor) -> D
2309 where
2310 D: TextDimension,
2311 {
2312 self.text_summary_for_range(0..self.offset_for_anchor(anchor))
2313 }
2314
2315 pub fn offset_for_anchor(&self, anchor: &Anchor) -> usize {
2316 if *anchor == Anchor::MIN {
2317 0
2318 } else if *anchor == Anchor::MAX {
2319 self.visible_text.len()
2320 } else {
2321 debug_assert!(anchor.buffer_id == Some(self.remote_id));
2322 let anchor_key = InsertionFragmentKey {
2323 timestamp: anchor.timestamp,
2324 split_offset: anchor.offset,
2325 };
2326 let mut insertion_cursor = self.insertions.cursor::<InsertionFragmentKey>(());
2327 insertion_cursor.seek(&anchor_key, anchor.bias);
2328 if let Some(insertion) = insertion_cursor.item() {
2329 let comparison = sum_tree::KeyedItem::key(insertion).cmp(&anchor_key);
2330 if comparison == Ordering::Greater
2331 || (anchor.bias == Bias::Left
2332 && comparison == Ordering::Equal
2333 && anchor.offset > 0)
2334 {
2335 insertion_cursor.prev();
2336 }
2337 } else {
2338 insertion_cursor.prev();
2339 }
2340
2341 let Some(insertion) = insertion_cursor
2342 .item()
2343 .filter(|insertion| insertion.timestamp == anchor.timestamp)
2344 else {
2345 panic!(
2346 "invalid anchor {:?}. buffer id: {}, version: {:?}",
2347 anchor, self.remote_id, self.version
2348 );
2349 };
2350
2351 let (start, _, item) = self
2352 .fragments
2353 .find::<Dimensions<Option<&Locator>, usize>, _>(
2354 &None,
2355 &Some(&insertion.fragment_id),
2356 Bias::Left,
2357 );
2358 let fragment = item.unwrap();
2359 let mut fragment_offset = start.1;
2360 if fragment.visible {
2361 fragment_offset += anchor.offset - insertion.split_offset;
2362 }
2363 fragment_offset
2364 }
2365 }
2366
2367 fn fragment_id_for_anchor(&self, anchor: &Anchor) -> &Locator {
2368 self.try_fragment_id_for_anchor(anchor).unwrap_or_else(|| {
2369 panic!(
2370 "invalid anchor {:?}. buffer id: {}, version: {:?}",
2371 anchor, self.remote_id, self.version,
2372 )
2373 })
2374 }
2375
2376 fn try_fragment_id_for_anchor(&self, anchor: &Anchor) -> Option<&Locator> {
2377 if *anchor == Anchor::MIN {
2378 Some(Locator::min_ref())
2379 } else if *anchor == Anchor::MAX {
2380 Some(Locator::max_ref())
2381 } else {
2382 let anchor_key = InsertionFragmentKey {
2383 timestamp: anchor.timestamp,
2384 split_offset: anchor.offset,
2385 };
2386 let mut insertion_cursor = self.insertions.cursor::<InsertionFragmentKey>(());
2387 insertion_cursor.seek(&anchor_key, anchor.bias);
2388 if let Some(insertion) = insertion_cursor.item() {
2389 let comparison = sum_tree::KeyedItem::key(insertion).cmp(&anchor_key);
2390 if comparison == Ordering::Greater
2391 || (anchor.bias == Bias::Left
2392 && comparison == Ordering::Equal
2393 && anchor.offset > 0)
2394 {
2395 insertion_cursor.prev();
2396 }
2397 } else {
2398 insertion_cursor.prev();
2399 }
2400
2401 insertion_cursor
2402 .item()
2403 .filter(|insertion| {
2404 !cfg!(debug_assertions) || insertion.timestamp == anchor.timestamp
2405 })
2406 .map(|insertion| &insertion.fragment_id)
2407 }
2408 }
2409
2410 pub fn anchor_before<T: ToOffset>(&self, position: T) -> Anchor {
2411 self.anchor_at(position, Bias::Left)
2412 }
2413
2414 pub fn anchor_after<T: ToOffset>(&self, position: T) -> Anchor {
2415 self.anchor_at(position, Bias::Right)
2416 }
2417
2418 pub fn anchor_at<T: ToOffset>(&self, position: T, bias: Bias) -> Anchor {
2419 self.anchor_at_offset(position.to_offset(self), bias)
2420 }
2421
2422 fn anchor_at_offset(&self, offset: usize, bias: Bias) -> Anchor {
2423 if bias == Bias::Left && offset == 0 {
2424 Anchor::MIN
2425 } else if bias == Bias::Right && offset == self.len() {
2426 Anchor::MAX
2427 } else {
2428 if offset > self.visible_text.len() {
2429 panic!("offset {} is out of bounds", offset)
2430 }
2431 self.visible_text.assert_char_boundary(offset);
2432 let (start, _, item) = self.fragments.find::<usize, _>(&None, &offset, bias);
2433 let fragment = item.unwrap();
2434 let overshoot = offset - start;
2435 Anchor {
2436 timestamp: fragment.timestamp,
2437 offset: fragment.insertion_offset + overshoot,
2438 bias,
2439 buffer_id: Some(self.remote_id),
2440 }
2441 }
2442 }
2443
2444 pub fn can_resolve(&self, anchor: &Anchor) -> bool {
2445 *anchor == Anchor::MIN
2446 || *anchor == Anchor::MAX
2447 || (Some(self.remote_id) == anchor.buffer_id && self.version.observed(anchor.timestamp))
2448 }
2449
2450 pub fn clip_offset(&self, offset: usize, bias: Bias) -> usize {
2451 self.visible_text.clip_offset(offset, bias)
2452 }
2453
2454 pub fn clip_point(&self, point: Point, bias: Bias) -> Point {
2455 self.visible_text.clip_point(point, bias)
2456 }
2457
2458 pub fn clip_offset_utf16(&self, offset: OffsetUtf16, bias: Bias) -> OffsetUtf16 {
2459 self.visible_text.clip_offset_utf16(offset, bias)
2460 }
2461
2462 pub fn clip_point_utf16(&self, point: Unclipped<PointUtf16>, bias: Bias) -> PointUtf16 {
2463 self.visible_text.clip_point_utf16(point, bias)
2464 }
2465
2466 pub fn edits_since<'a, D>(
2467 &'a self,
2468 since: &'a clock::Global,
2469 ) -> impl 'a + Iterator<Item = Edit<D>>
2470 where
2471 D: TextDimension + Ord,
2472 {
2473 self.edits_since_in_range(since, Anchor::MIN..Anchor::MAX)
2474 }
2475
2476 pub fn anchored_edits_since<'a, D>(
2477 &'a self,
2478 since: &'a clock::Global,
2479 ) -> impl 'a + Iterator<Item = (Edit<D>, Range<Anchor>)>
2480 where
2481 D: TextDimension + Ord,
2482 {
2483 self.anchored_edits_since_in_range(since, Anchor::MIN..Anchor::MAX)
2484 }
2485
2486 pub fn edits_since_in_range<'a, D>(
2487 &'a self,
2488 since: &'a clock::Global,
2489 range: Range<Anchor>,
2490 ) -> impl 'a + Iterator<Item = Edit<D>>
2491 where
2492 D: TextDimension + Ord,
2493 {
2494 self.anchored_edits_since_in_range(since, range)
2495 .map(|item| item.0)
2496 }
2497
2498 pub fn anchored_edits_since_in_range<'a, D>(
2499 &'a self,
2500 since: &'a clock::Global,
2501 range: Range<Anchor>,
2502 ) -> impl 'a + Iterator<Item = (Edit<D>, Range<Anchor>)>
2503 where
2504 D: TextDimension + Ord,
2505 {
2506 let fragments_cursor = if *since == self.version {
2507 None
2508 } else {
2509 let mut cursor = self.fragments.filter(&None, move |summary| {
2510 !since.observed_all(&summary.max_version)
2511 });
2512 cursor.next();
2513 Some(cursor)
2514 };
2515 let start_fragment_id = self.fragment_id_for_anchor(&range.start);
2516 let (start, _, item) = self
2517 .fragments
2518 .find::<Dimensions<Option<&Locator>, FragmentTextSummary>, _>(
2519 &None,
2520 &Some(start_fragment_id),
2521 Bias::Left,
2522 );
2523 let mut visible_start = start.1.visible;
2524 let mut deleted_start = start.1.deleted;
2525 if let Some(fragment) = item {
2526 let overshoot = range.start.offset - fragment.insertion_offset;
2527 if fragment.visible {
2528 visible_start += overshoot;
2529 } else {
2530 deleted_start += overshoot;
2531 }
2532 }
2533 let end_fragment_id = self.fragment_id_for_anchor(&range.end);
2534
2535 Edits {
2536 visible_cursor: self.visible_text.cursor(visible_start),
2537 deleted_cursor: self.deleted_text.cursor(deleted_start),
2538 fragments_cursor,
2539 undos: &self.undo_map,
2540 since,
2541 old_end: D::zero(()),
2542 new_end: D::zero(()),
2543 range: (start_fragment_id, range.start.offset)..(end_fragment_id, range.end.offset),
2544 buffer_id: self.remote_id,
2545 }
2546 }
2547
2548 pub fn has_edits_since_in_range(&self, since: &clock::Global, range: Range<Anchor>) -> bool {
2549 if *since != self.version {
2550 let start_fragment_id = self.fragment_id_for_anchor(&range.start);
2551 let end_fragment_id = self.fragment_id_for_anchor(&range.end);
2552 let mut cursor = self.fragments.filter::<_, usize>(&None, move |summary| {
2553 !since.observed_all(&summary.max_version)
2554 });
2555 cursor.next();
2556 while let Some(fragment) = cursor.item() {
2557 if fragment.id > *end_fragment_id {
2558 break;
2559 }
2560 if fragment.id > *start_fragment_id {
2561 let was_visible = fragment.was_visible(since, &self.undo_map);
2562 let is_visible = fragment.visible;
2563 if was_visible != is_visible {
2564 return true;
2565 }
2566 }
2567 cursor.next();
2568 }
2569 }
2570 false
2571 }
2572
2573 pub fn has_edits_since(&self, since: &clock::Global) -> bool {
2574 if *since != self.version {
2575 let mut cursor = self.fragments.filter::<_, usize>(&None, move |summary| {
2576 !since.observed_all(&summary.max_version)
2577 });
2578 cursor.next();
2579 while let Some(fragment) = cursor.item() {
2580 let was_visible = fragment.was_visible(since, &self.undo_map);
2581 let is_visible = fragment.visible;
2582 if was_visible != is_visible {
2583 return true;
2584 }
2585 cursor.next();
2586 }
2587 }
2588 false
2589 }
2590
2591 pub fn range_to_version(&self, range: Range<usize>, version: &clock::Global) -> Range<usize> {
2592 let mut offsets = self.offsets_to_version([range.start, range.end], version);
2593 offsets.next().unwrap()..offsets.next().unwrap()
2594 }
2595
2596 /// Converts the given sequence of offsets into their corresponding offsets
2597 /// at a prior version of this buffer.
2598 pub fn offsets_to_version<'a>(
2599 &'a self,
2600 offsets: impl 'a + IntoIterator<Item = usize>,
2601 version: &'a clock::Global,
2602 ) -> impl 'a + Iterator<Item = usize> {
2603 let mut edits = self.edits_since(version).peekable();
2604 let mut last_old_end = 0;
2605 let mut last_new_end = 0;
2606 offsets.into_iter().map(move |new_offset| {
2607 while let Some(edit) = edits.peek() {
2608 if edit.new.start > new_offset {
2609 break;
2610 }
2611
2612 if edit.new.end <= new_offset {
2613 last_new_end = edit.new.end;
2614 last_old_end = edit.old.end;
2615 edits.next();
2616 continue;
2617 }
2618
2619 let overshoot = new_offset - edit.new.start;
2620 return (edit.old.start + overshoot).min(edit.old.end);
2621 }
2622
2623 last_old_end + new_offset.saturating_sub(last_new_end)
2624 })
2625 }
2626
2627 /// Visually annotates a position or range with the `Debug` representation of a value. The
2628 /// callsite of this function is used as a key - previous annotations will be removed.
2629 #[cfg(debug_assertions)]
2630 #[track_caller]
2631 pub fn debug<R, V>(&self, ranges: &R, value: V)
2632 where
2633 R: debug::ToDebugRanges,
2634 V: std::fmt::Debug,
2635 {
2636 self.debug_with_key(std::panic::Location::caller(), ranges, value);
2637 }
2638
2639 /// Visually annotates a position or range with the `Debug` representation of a value. Previous
2640 /// debug annotations with the same key will be removed. The key is also used to determine the
2641 /// annotation's color.
2642 #[cfg(debug_assertions)]
2643 pub fn debug_with_key<K, R, V>(&self, key: &K, ranges: &R, value: V)
2644 where
2645 K: std::hash::Hash + 'static,
2646 R: debug::ToDebugRanges,
2647 V: std::fmt::Debug,
2648 {
2649 let ranges = ranges
2650 .to_debug_ranges(self)
2651 .into_iter()
2652 .map(|range| self.anchor_after(range.start)..self.anchor_before(range.end))
2653 .collect();
2654 debug::GlobalDebugRanges::with_locked(|debug_ranges| {
2655 debug_ranges.insert(key, ranges, format!("{value:?}").into());
2656 });
2657 }
2658}
2659
2660struct RopeBuilder<'a> {
2661 old_visible_cursor: rope::Cursor<'a>,
2662 old_deleted_cursor: rope::Cursor<'a>,
2663 new_visible: Rope,
2664 new_deleted: Rope,
2665}
2666
2667impl<'a> RopeBuilder<'a> {
2668 fn new(old_visible_cursor: rope::Cursor<'a>, old_deleted_cursor: rope::Cursor<'a>) -> Self {
2669 Self {
2670 old_visible_cursor,
2671 old_deleted_cursor,
2672 new_visible: Rope::new(),
2673 new_deleted: Rope::new(),
2674 }
2675 }
2676
2677 fn append(&mut self, len: FragmentTextSummary) {
2678 self.push(len.visible, true, true);
2679 self.push(len.deleted, false, false);
2680 }
2681
2682 fn push_fragment(&mut self, fragment: &Fragment, was_visible: bool) {
2683 debug_assert!(fragment.len > 0);
2684 self.push(fragment.len, was_visible, fragment.visible)
2685 }
2686
2687 fn push(&mut self, len: usize, was_visible: bool, is_visible: bool) {
2688 let text = if was_visible {
2689 self.old_visible_cursor
2690 .slice(self.old_visible_cursor.offset() + len)
2691 } else {
2692 self.old_deleted_cursor
2693 .slice(self.old_deleted_cursor.offset() + len)
2694 };
2695 if is_visible {
2696 self.new_visible.append(text);
2697 } else {
2698 self.new_deleted.append(text);
2699 }
2700 }
2701
2702 fn push_str(&mut self, text: &str) {
2703 self.new_visible.push(text);
2704 }
2705
2706 fn finish(mut self) -> (Rope, Rope) {
2707 self.new_visible.append(self.old_visible_cursor.suffix());
2708 self.new_deleted.append(self.old_deleted_cursor.suffix());
2709 (self.new_visible, self.new_deleted)
2710 }
2711}
2712
2713impl<D: TextDimension + Ord, F: FnMut(&FragmentSummary) -> bool> Iterator for Edits<'_, D, F> {
2714 type Item = (Edit<D>, Range<Anchor>);
2715
2716 fn next(&mut self) -> Option<Self::Item> {
2717 let mut pending_edit: Option<Self::Item> = None;
2718 let cursor = self.fragments_cursor.as_mut()?;
2719
2720 while let Some(fragment) = cursor.item() {
2721 if fragment.id < *self.range.start.0 {
2722 cursor.next();
2723 continue;
2724 } else if fragment.id > *self.range.end.0 {
2725 break;
2726 }
2727
2728 if cursor.start().visible > self.visible_cursor.offset() {
2729 let summary = self.visible_cursor.summary(cursor.start().visible);
2730 self.old_end.add_assign(&summary);
2731 self.new_end.add_assign(&summary);
2732 }
2733
2734 if pending_edit
2735 .as_ref()
2736 .is_some_and(|(change, _)| change.new.end < self.new_end)
2737 {
2738 break;
2739 }
2740
2741 let start_anchor = Anchor {
2742 timestamp: fragment.timestamp,
2743 offset: fragment.insertion_offset,
2744 bias: Bias::Right,
2745 buffer_id: Some(self.buffer_id),
2746 };
2747 let end_anchor = Anchor {
2748 timestamp: fragment.timestamp,
2749 offset: fragment.insertion_offset + fragment.len,
2750 bias: Bias::Left,
2751 buffer_id: Some(self.buffer_id),
2752 };
2753
2754 if !fragment.was_visible(self.since, self.undos) && fragment.visible {
2755 let mut visible_end = cursor.end().visible;
2756 if fragment.id == *self.range.end.0 {
2757 visible_end = cmp::min(
2758 visible_end,
2759 cursor.start().visible + (self.range.end.1 - fragment.insertion_offset),
2760 );
2761 }
2762
2763 let fragment_summary = self.visible_cursor.summary(visible_end);
2764 let mut new_end = self.new_end;
2765 new_end.add_assign(&fragment_summary);
2766 if let Some((edit, range)) = pending_edit.as_mut() {
2767 edit.new.end = new_end;
2768 range.end = end_anchor;
2769 } else {
2770 pending_edit = Some((
2771 Edit {
2772 old: self.old_end..self.old_end,
2773 new: self.new_end..new_end,
2774 },
2775 start_anchor..end_anchor,
2776 ));
2777 }
2778
2779 self.new_end = new_end;
2780 } else if fragment.was_visible(self.since, self.undos) && !fragment.visible {
2781 let mut deleted_end = cursor.end().deleted;
2782 if fragment.id == *self.range.end.0 {
2783 deleted_end = cmp::min(
2784 deleted_end,
2785 cursor.start().deleted + (self.range.end.1 - fragment.insertion_offset),
2786 );
2787 }
2788
2789 if cursor.start().deleted > self.deleted_cursor.offset() {
2790 self.deleted_cursor.seek_forward(cursor.start().deleted);
2791 }
2792 let fragment_summary = self.deleted_cursor.summary(deleted_end);
2793 let mut old_end = self.old_end;
2794 old_end.add_assign(&fragment_summary);
2795 if let Some((edit, range)) = pending_edit.as_mut() {
2796 edit.old.end = old_end;
2797 range.end = end_anchor;
2798 } else {
2799 pending_edit = Some((
2800 Edit {
2801 old: self.old_end..old_end,
2802 new: self.new_end..self.new_end,
2803 },
2804 start_anchor..end_anchor,
2805 ));
2806 }
2807
2808 self.old_end = old_end;
2809 }
2810
2811 cursor.next();
2812 }
2813
2814 pending_edit
2815 }
2816}
2817
2818impl Fragment {
2819 fn is_visible(&self, undos: &UndoMap) -> bool {
2820 !undos.is_undone(self.timestamp) && self.deletions.iter().all(|d| undos.is_undone(*d))
2821 }
2822
2823 fn was_visible(&self, version: &clock::Global, undos: &UndoMap) -> bool {
2824 (version.observed(self.timestamp) && !undos.was_undone(self.timestamp, version))
2825 && self
2826 .deletions
2827 .iter()
2828 .all(|d| !version.observed(*d) || undos.was_undone(*d, version))
2829 }
2830}
2831
2832impl sum_tree::Item for Fragment {
2833 type Summary = FragmentSummary;
2834
2835 fn summary(&self, _cx: &Option<clock::Global>) -> Self::Summary {
2836 let mut max_version = clock::Global::new();
2837 max_version.observe(self.timestamp);
2838 for deletion in &self.deletions {
2839 max_version.observe(*deletion);
2840 }
2841 max_version.join(&self.max_undos);
2842
2843 let mut min_insertion_version = clock::Global::new();
2844 min_insertion_version.observe(self.timestamp);
2845 let max_insertion_version = min_insertion_version.clone();
2846 if self.visible {
2847 FragmentSummary {
2848 max_id: self.id.clone(),
2849 text: FragmentTextSummary {
2850 visible: self.len,
2851 deleted: 0,
2852 },
2853 max_version,
2854 min_insertion_version,
2855 max_insertion_version,
2856 }
2857 } else {
2858 FragmentSummary {
2859 max_id: self.id.clone(),
2860 text: FragmentTextSummary {
2861 visible: 0,
2862 deleted: self.len,
2863 },
2864 max_version,
2865 min_insertion_version,
2866 max_insertion_version,
2867 }
2868 }
2869 }
2870}
2871
2872impl sum_tree::Summary for FragmentSummary {
2873 type Context<'a> = &'a Option<clock::Global>;
2874
2875 fn zero(_cx: Self::Context<'_>) -> Self {
2876 Default::default()
2877 }
2878
2879 fn add_summary(&mut self, other: &Self, _: Self::Context<'_>) {
2880 self.max_id.assign(&other.max_id);
2881 self.text.visible += &other.text.visible;
2882 self.text.deleted += &other.text.deleted;
2883 self.max_version.join(&other.max_version);
2884 self.min_insertion_version
2885 .meet(&other.min_insertion_version);
2886 self.max_insertion_version
2887 .join(&other.max_insertion_version);
2888 }
2889}
2890
2891impl Default for FragmentSummary {
2892 fn default() -> Self {
2893 FragmentSummary {
2894 max_id: Locator::min(),
2895 text: FragmentTextSummary::default(),
2896 max_version: clock::Global::new(),
2897 min_insertion_version: clock::Global::new(),
2898 max_insertion_version: clock::Global::new(),
2899 }
2900 }
2901}
2902
2903impl sum_tree::Item for InsertionFragment {
2904 type Summary = InsertionFragmentKey;
2905
2906 fn summary(&self, _cx: ()) -> Self::Summary {
2907 InsertionFragmentKey {
2908 timestamp: self.timestamp,
2909 split_offset: self.split_offset,
2910 }
2911 }
2912}
2913
2914impl sum_tree::KeyedItem for InsertionFragment {
2915 type Key = InsertionFragmentKey;
2916
2917 fn key(&self) -> Self::Key {
2918 sum_tree::Item::summary(self, ())
2919 }
2920}
2921
2922impl InsertionFragment {
2923 fn new(fragment: &Fragment) -> Self {
2924 Self {
2925 timestamp: fragment.timestamp,
2926 split_offset: fragment.insertion_offset,
2927 fragment_id: fragment.id.clone(),
2928 }
2929 }
2930
2931 fn insert_new(fragment: &Fragment) -> sum_tree::Edit<Self> {
2932 sum_tree::Edit::Insert(Self::new(fragment))
2933 }
2934}
2935
2936impl sum_tree::ContextLessSummary for InsertionFragmentKey {
2937 fn zero() -> Self {
2938 InsertionFragmentKey {
2939 timestamp: Lamport::MIN,
2940 split_offset: 0,
2941 }
2942 }
2943
2944 fn add_summary(&mut self, summary: &Self) {
2945 *self = *summary;
2946 }
2947}
2948
2949#[derive(Copy, Clone, Debug, Default, PartialEq, Eq, PartialOrd, Ord, Hash)]
2950pub struct FullOffset(pub usize);
2951
2952impl ops::AddAssign<usize> for FullOffset {
2953 fn add_assign(&mut self, rhs: usize) {
2954 self.0 += rhs;
2955 }
2956}
2957
2958impl ops::Add<usize> for FullOffset {
2959 type Output = Self;
2960
2961 fn add(mut self, rhs: usize) -> Self::Output {
2962 self += rhs;
2963 self
2964 }
2965}
2966
2967impl ops::Sub for FullOffset {
2968 type Output = usize;
2969
2970 fn sub(self, rhs: Self) -> Self::Output {
2971 self.0 - rhs.0
2972 }
2973}
2974
2975impl sum_tree::Dimension<'_, FragmentSummary> for usize {
2976 fn zero(_: &Option<clock::Global>) -> Self {
2977 Default::default()
2978 }
2979
2980 fn add_summary(&mut self, summary: &FragmentSummary, _: &Option<clock::Global>) {
2981 *self += summary.text.visible;
2982 }
2983}
2984
2985impl sum_tree::Dimension<'_, FragmentSummary> for FullOffset {
2986 fn zero(_: &Option<clock::Global>) -> Self {
2987 Default::default()
2988 }
2989
2990 fn add_summary(&mut self, summary: &FragmentSummary, _: &Option<clock::Global>) {
2991 self.0 += summary.text.visible + summary.text.deleted;
2992 }
2993}
2994
2995impl<'a> sum_tree::Dimension<'a, FragmentSummary> for Option<&'a Locator> {
2996 fn zero(_: &Option<clock::Global>) -> Self {
2997 Default::default()
2998 }
2999
3000 fn add_summary(&mut self, summary: &'a FragmentSummary, _: &Option<clock::Global>) {
3001 *self = Some(&summary.max_id);
3002 }
3003}
3004
3005impl sum_tree::SeekTarget<'_, FragmentSummary, FragmentTextSummary> for usize {
3006 fn cmp(
3007 &self,
3008 cursor_location: &FragmentTextSummary,
3009 _: &Option<clock::Global>,
3010 ) -> cmp::Ordering {
3011 Ord::cmp(self, &cursor_location.visible)
3012 }
3013}
3014
3015#[derive(Copy, Clone, Debug, Eq, PartialEq)]
3016enum VersionedFullOffset {
3017 Offset(FullOffset),
3018 Invalid,
3019}
3020
3021impl VersionedFullOffset {
3022 fn full_offset(&self) -> FullOffset {
3023 if let Self::Offset(position) = self {
3024 *position
3025 } else {
3026 panic!("invalid version")
3027 }
3028 }
3029}
3030
3031impl Default for VersionedFullOffset {
3032 fn default() -> Self {
3033 Self::Offset(Default::default())
3034 }
3035}
3036
3037impl<'a> sum_tree::Dimension<'a, FragmentSummary> for VersionedFullOffset {
3038 fn zero(_cx: &Option<clock::Global>) -> Self {
3039 Default::default()
3040 }
3041
3042 fn add_summary(&mut self, summary: &'a FragmentSummary, cx: &Option<clock::Global>) {
3043 if let Self::Offset(offset) = self {
3044 let version = cx.as_ref().unwrap();
3045 if version.observed_all(&summary.max_insertion_version) {
3046 *offset += summary.text.visible + summary.text.deleted;
3047 } else if version.observed_any(&summary.min_insertion_version) {
3048 *self = Self::Invalid;
3049 }
3050 }
3051 }
3052}
3053
3054impl sum_tree::SeekTarget<'_, FragmentSummary, Self> for VersionedFullOffset {
3055 fn cmp(&self, cursor_position: &Self, _: &Option<clock::Global>) -> cmp::Ordering {
3056 match (self, cursor_position) {
3057 (Self::Offset(a), Self::Offset(b)) => Ord::cmp(a, b),
3058 (Self::Offset(_), Self::Invalid) => cmp::Ordering::Less,
3059 (Self::Invalid, _) => unreachable!(),
3060 }
3061 }
3062}
3063
3064impl Operation {
3065 fn replica_id(&self) -> ReplicaId {
3066 operation_queue::Operation::lamport_timestamp(self).replica_id
3067 }
3068
3069 pub fn timestamp(&self) -> clock::Lamport {
3070 match self {
3071 Operation::Edit(edit) => edit.timestamp,
3072 Operation::Undo(undo) => undo.timestamp,
3073 }
3074 }
3075
3076 pub fn as_edit(&self) -> Option<&EditOperation> {
3077 match self {
3078 Operation::Edit(edit) => Some(edit),
3079 _ => None,
3080 }
3081 }
3082
3083 pub fn is_edit(&self) -> bool {
3084 matches!(self, Operation::Edit { .. })
3085 }
3086}
3087
3088impl operation_queue::Operation for Operation {
3089 fn lamport_timestamp(&self) -> clock::Lamport {
3090 match self {
3091 Operation::Edit(edit) => edit.timestamp,
3092 Operation::Undo(undo) => undo.timestamp,
3093 }
3094 }
3095}
3096
3097pub trait ToOffset {
3098 fn to_offset(&self, snapshot: &BufferSnapshot) -> usize;
3099 /// Turns this point into the next offset in the buffer that comes after this, respecting utf8 boundaries.
3100 fn to_next_offset(&self, snapshot: &BufferSnapshot) -> usize {
3101 snapshot
3102 .visible_text
3103 .ceil_char_boundary(self.to_offset(snapshot) + 1)
3104 }
3105 /// Turns this point into the previous offset in the buffer that comes before this, respecting utf8 boundaries.
3106 fn to_previous_offset(&self, snapshot: &BufferSnapshot) -> usize {
3107 snapshot
3108 .visible_text
3109 .floor_char_boundary(self.to_offset(snapshot).saturating_sub(1))
3110 }
3111}
3112
3113impl ToOffset for Point {
3114 fn to_offset(&self, snapshot: &BufferSnapshot) -> usize {
3115 snapshot.point_to_offset(*self)
3116 }
3117}
3118
3119impl ToOffset for usize {
3120 #[track_caller]
3121 fn to_offset(&self, snapshot: &BufferSnapshot) -> usize {
3122 assert!(
3123 *self <= snapshot.len(),
3124 "offset {} is out of range, snapshot length is {}",
3125 self,
3126 snapshot.len()
3127 );
3128 *self
3129 }
3130}
3131
3132impl ToOffset for Anchor {
3133 fn to_offset(&self, snapshot: &BufferSnapshot) -> usize {
3134 snapshot.summary_for_anchor(self)
3135 }
3136}
3137
3138impl<T: ToOffset> ToOffset for &T {
3139 fn to_offset(&self, content: &BufferSnapshot) -> usize {
3140 (*self).to_offset(content)
3141 }
3142}
3143
3144impl ToOffset for PointUtf16 {
3145 fn to_offset(&self, snapshot: &BufferSnapshot) -> usize {
3146 snapshot.point_utf16_to_offset(*self)
3147 }
3148}
3149
3150impl ToOffset for Unclipped<PointUtf16> {
3151 fn to_offset(&self, snapshot: &BufferSnapshot) -> usize {
3152 snapshot.unclipped_point_utf16_to_offset(*self)
3153 }
3154}
3155
3156pub trait ToPoint {
3157 fn to_point(&self, snapshot: &BufferSnapshot) -> Point;
3158}
3159
3160impl ToPoint for Anchor {
3161 fn to_point(&self, snapshot: &BufferSnapshot) -> Point {
3162 snapshot.summary_for_anchor(self)
3163 }
3164}
3165
3166impl ToPoint for usize {
3167 fn to_point(&self, snapshot: &BufferSnapshot) -> Point {
3168 snapshot.offset_to_point(*self)
3169 }
3170}
3171
3172impl ToPoint for Point {
3173 fn to_point(&self, _: &BufferSnapshot) -> Point {
3174 *self
3175 }
3176}
3177
3178impl ToPoint for Unclipped<PointUtf16> {
3179 fn to_point(&self, snapshot: &BufferSnapshot) -> Point {
3180 snapshot.unclipped_point_utf16_to_point(*self)
3181 }
3182}
3183
3184pub trait ToPointUtf16 {
3185 fn to_point_utf16(&self, snapshot: &BufferSnapshot) -> PointUtf16;
3186}
3187
3188impl ToPointUtf16 for Anchor {
3189 fn to_point_utf16(&self, snapshot: &BufferSnapshot) -> PointUtf16 {
3190 snapshot.summary_for_anchor(self)
3191 }
3192}
3193
3194impl ToPointUtf16 for usize {
3195 fn to_point_utf16(&self, snapshot: &BufferSnapshot) -> PointUtf16 {
3196 snapshot.offset_to_point_utf16(*self)
3197 }
3198}
3199
3200impl ToPointUtf16 for PointUtf16 {
3201 fn to_point_utf16(&self, _: &BufferSnapshot) -> PointUtf16 {
3202 *self
3203 }
3204}
3205
3206impl ToPointUtf16 for Point {
3207 fn to_point_utf16(&self, snapshot: &BufferSnapshot) -> PointUtf16 {
3208 snapshot.point_to_point_utf16(*self)
3209 }
3210}
3211
3212pub trait ToOffsetUtf16 {
3213 fn to_offset_utf16(&self, snapshot: &BufferSnapshot) -> OffsetUtf16;
3214}
3215
3216impl ToOffsetUtf16 for Anchor {
3217 fn to_offset_utf16(&self, snapshot: &BufferSnapshot) -> OffsetUtf16 {
3218 snapshot.summary_for_anchor(self)
3219 }
3220}
3221
3222impl ToOffsetUtf16 for usize {
3223 fn to_offset_utf16(&self, snapshot: &BufferSnapshot) -> OffsetUtf16 {
3224 snapshot.offset_to_offset_utf16(*self)
3225 }
3226}
3227
3228impl ToOffsetUtf16 for OffsetUtf16 {
3229 fn to_offset_utf16(&self, _snapshot: &BufferSnapshot) -> OffsetUtf16 {
3230 *self
3231 }
3232}
3233
3234pub trait FromAnchor {
3235 fn from_anchor(anchor: &Anchor, snapshot: &BufferSnapshot) -> Self;
3236}
3237
3238impl FromAnchor for Anchor {
3239 fn from_anchor(anchor: &Anchor, _snapshot: &BufferSnapshot) -> Self {
3240 *anchor
3241 }
3242}
3243
3244impl FromAnchor for Point {
3245 fn from_anchor(anchor: &Anchor, snapshot: &BufferSnapshot) -> Self {
3246 snapshot.summary_for_anchor(anchor)
3247 }
3248}
3249
3250impl FromAnchor for PointUtf16 {
3251 fn from_anchor(anchor: &Anchor, snapshot: &BufferSnapshot) -> Self {
3252 snapshot.summary_for_anchor(anchor)
3253 }
3254}
3255
3256impl FromAnchor for usize {
3257 fn from_anchor(anchor: &Anchor, snapshot: &BufferSnapshot) -> Self {
3258 snapshot.summary_for_anchor(anchor)
3259 }
3260}
3261
3262#[cfg(debug_assertions)]
3263pub mod debug {
3264 use super::*;
3265 use parking_lot::Mutex;
3266 use std::any::TypeId;
3267 use std::hash::{Hash, Hasher};
3268
3269 static GLOBAL_DEBUG_RANGES: Mutex<Option<GlobalDebugRanges>> = Mutex::new(None);
3270
3271 pub struct GlobalDebugRanges {
3272 pub ranges: Vec<DebugRange>,
3273 key_to_occurrence_index: HashMap<Key, usize>,
3274 next_occurrence_index: usize,
3275 }
3276
3277 pub struct DebugRange {
3278 key: Key,
3279 pub ranges: Vec<Range<Anchor>>,
3280 pub value: Arc<str>,
3281 pub occurrence_index: usize,
3282 }
3283
3284 #[derive(Debug, Clone, PartialEq, Eq, Hash)]
3285 struct Key {
3286 type_id: TypeId,
3287 hash: u64,
3288 }
3289
3290 impl GlobalDebugRanges {
3291 pub fn with_locked<R>(f: impl FnOnce(&mut Self) -> R) -> R {
3292 let mut state = GLOBAL_DEBUG_RANGES.lock();
3293 if state.is_none() {
3294 *state = Some(GlobalDebugRanges {
3295 ranges: Vec::new(),
3296 key_to_occurrence_index: HashMap::default(),
3297 next_occurrence_index: 0,
3298 });
3299 }
3300 if let Some(global_debug_ranges) = state.as_mut() {
3301 f(global_debug_ranges)
3302 } else {
3303 unreachable!()
3304 }
3305 }
3306
3307 pub fn insert<K: Hash + 'static>(
3308 &mut self,
3309 key: &K,
3310 ranges: Vec<Range<Anchor>>,
3311 value: Arc<str>,
3312 ) {
3313 let occurrence_index = *self
3314 .key_to_occurrence_index
3315 .entry(Key::new(key))
3316 .or_insert_with(|| {
3317 let occurrence_index = self.next_occurrence_index;
3318 self.next_occurrence_index += 1;
3319 occurrence_index
3320 });
3321 let key = Key::new(key);
3322 let existing = self
3323 .ranges
3324 .iter()
3325 .enumerate()
3326 .rfind(|(_, existing)| existing.key == key);
3327 if let Some((existing_ix, _)) = existing {
3328 self.ranges.remove(existing_ix);
3329 }
3330 self.ranges.push(DebugRange {
3331 ranges,
3332 key,
3333 value,
3334 occurrence_index,
3335 });
3336 }
3337
3338 pub fn remove<K: Hash + 'static>(&mut self, key: &K) {
3339 self.remove_impl(&Key::new(key));
3340 }
3341
3342 fn remove_impl(&mut self, key: &Key) {
3343 let existing = self
3344 .ranges
3345 .iter()
3346 .enumerate()
3347 .rfind(|(_, existing)| &existing.key == key);
3348 if let Some((existing_ix, _)) = existing {
3349 self.ranges.remove(existing_ix);
3350 }
3351 }
3352
3353 pub fn remove_all_with_key_type<K: 'static>(&mut self) {
3354 self.ranges
3355 .retain(|item| item.key.type_id != TypeId::of::<K>());
3356 }
3357 }
3358
3359 impl Key {
3360 fn new<K: Hash + 'static>(key: &K) -> Self {
3361 let type_id = TypeId::of::<K>();
3362 let mut hasher = collections::FxHasher::default();
3363 key.hash(&mut hasher);
3364 Key {
3365 type_id,
3366 hash: hasher.finish(),
3367 }
3368 }
3369 }
3370
3371 pub trait ToDebugRanges {
3372 fn to_debug_ranges(&self, snapshot: &BufferSnapshot) -> Vec<Range<usize>>;
3373 }
3374
3375 impl<T: ToOffset> ToDebugRanges for T {
3376 fn to_debug_ranges(&self, snapshot: &BufferSnapshot) -> Vec<Range<usize>> {
3377 [self.to_offset(snapshot)].to_debug_ranges(snapshot)
3378 }
3379 }
3380
3381 impl<T: ToOffset + Clone> ToDebugRanges for Range<T> {
3382 fn to_debug_ranges(&self, snapshot: &BufferSnapshot) -> Vec<Range<usize>> {
3383 [self.clone()].to_debug_ranges(snapshot)
3384 }
3385 }
3386
3387 impl<T: ToOffset> ToDebugRanges for Vec<T> {
3388 fn to_debug_ranges(&self, snapshot: &BufferSnapshot) -> Vec<Range<usize>> {
3389 self.as_slice().to_debug_ranges(snapshot)
3390 }
3391 }
3392
3393 impl<T: ToOffset> ToDebugRanges for Vec<Range<T>> {
3394 fn to_debug_ranges(&self, snapshot: &BufferSnapshot) -> Vec<Range<usize>> {
3395 self.as_slice().to_debug_ranges(snapshot)
3396 }
3397 }
3398
3399 impl<T: ToOffset> ToDebugRanges for [T] {
3400 fn to_debug_ranges(&self, snapshot: &BufferSnapshot) -> Vec<Range<usize>> {
3401 self.iter()
3402 .map(|item| {
3403 let offset = item.to_offset(snapshot);
3404 offset..offset
3405 })
3406 .collect()
3407 }
3408 }
3409
3410 impl<T: ToOffset> ToDebugRanges for [Range<T>] {
3411 fn to_debug_ranges(&self, snapshot: &BufferSnapshot) -> Vec<Range<usize>> {
3412 self.iter()
3413 .map(|range| range.start.to_offset(snapshot)..range.end.to_offset(snapshot))
3414 .collect()
3415 }
3416 }
3417}