1mod anchor;
2pub mod locator;
3#[cfg(any(test, feature = "test-support"))]
4pub mod network;
5pub mod operation_queue;
6mod patch;
7mod selection;
8pub mod subscription;
9#[cfg(test)]
10mod tests;
11mod undo_map;
12
13pub use anchor::*;
14use anyhow::{Context as _, Result};
15use clock::Lamport;
16pub use clock::ReplicaId;
17use collections::{HashMap, HashSet};
18use locator::Locator;
19use operation_queue::OperationQueue;
20pub use patch::Patch;
21use postage::{oneshot, prelude::*};
22
23use regex::Regex;
24pub use rope::*;
25pub use selection::*;
26use std::{
27 borrow::Cow,
28 cmp::{self, Ordering, Reverse},
29 fmt::Display,
30 future::Future,
31 iter::Iterator,
32 num::NonZeroU64,
33 ops::{self, Deref, Range, Sub},
34 str,
35 sync::{Arc, LazyLock},
36 time::{Duration, Instant},
37};
38pub use subscription::*;
39pub use sum_tree::Bias;
40use sum_tree::{Dimensions, FilterCursor, SumTree, TreeMap, TreeSet};
41use undo_map::UndoMap;
42
43#[cfg(any(test, feature = "test-support"))]
44use util::RandomCharIter;
45
46static LINE_SEPARATORS_REGEX: LazyLock<Regex> =
47 LazyLock::new(|| Regex::new(r"\r\n|\r").expect("Failed to create LINE_SEPARATORS_REGEX"));
48
49pub type TransactionId = clock::Lamport;
50
51pub struct Buffer {
52 snapshot: BufferSnapshot,
53 history: History,
54 deferred_ops: OperationQueue<Operation>,
55 deferred_replicas: HashSet<ReplicaId>,
56 pub lamport_clock: clock::Lamport,
57 subscriptions: Topic,
58 edit_id_resolvers: HashMap<clock::Lamport, Vec<oneshot::Sender<()>>>,
59 wait_for_version_txs: Vec<(clock::Global, oneshot::Sender<()>)>,
60}
61
62#[repr(transparent)]
63#[derive(Clone, Copy, Debug, Hash, PartialEq, PartialOrd, Ord, Eq)]
64pub struct BufferId(NonZeroU64);
65
66impl Display for BufferId {
67 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
68 write!(f, "{}", self.0)
69 }
70}
71
72impl From<NonZeroU64> for BufferId {
73 fn from(id: NonZeroU64) -> Self {
74 BufferId(id)
75 }
76}
77
78impl BufferId {
79 /// Returns Err if `id` is outside of BufferId domain.
80 pub fn new(id: u64) -> anyhow::Result<Self> {
81 let id = NonZeroU64::new(id).context("Buffer id cannot be 0.")?;
82 Ok(Self(id))
83 }
84
85 /// Increments this buffer id, returning the old value.
86 /// So that's a post-increment operator in disguise.
87 pub fn next(&mut self) -> Self {
88 let old = *self;
89 self.0 = self.0.saturating_add(1);
90 old
91 }
92
93 pub fn to_proto(self) -> u64 {
94 self.into()
95 }
96}
97
98impl From<BufferId> for u64 {
99 fn from(id: BufferId) -> Self {
100 id.0.get()
101 }
102}
103
104#[derive(Clone)]
105pub struct BufferSnapshot {
106 replica_id: ReplicaId,
107 remote_id: BufferId,
108 visible_text: Rope,
109 deleted_text: Rope,
110 line_ending: LineEnding,
111 undo_map: UndoMap,
112 fragments: SumTree<Fragment>,
113 insertions: SumTree<InsertionFragment>,
114 insertion_slices: TreeSet<InsertionSlice>,
115 pub version: clock::Global,
116}
117
118#[derive(Clone, Debug)]
119pub struct HistoryEntry {
120 transaction: Transaction,
121 first_edit_at: Instant,
122 last_edit_at: Instant,
123 suppress_grouping: bool,
124}
125
126#[derive(Clone, Debug)]
127pub struct Transaction {
128 pub id: TransactionId,
129 pub edit_ids: Vec<clock::Lamport>,
130 pub start: clock::Global,
131}
132
133impl Transaction {
134 pub fn merge_in(&mut self, other: Transaction) {
135 self.edit_ids.extend(other.edit_ids);
136 }
137}
138
139impl HistoryEntry {
140 pub fn transaction_id(&self) -> TransactionId {
141 self.transaction.id
142 }
143}
144
145struct History {
146 base_text: Rope,
147 operations: TreeMap<clock::Lamport, Operation>,
148 undo_stack: Vec<HistoryEntry>,
149 redo_stack: Vec<HistoryEntry>,
150 transaction_depth: usize,
151 group_interval: Duration,
152}
153
154#[derive(Clone, Debug, Eq, PartialEq)]
155struct InsertionSlice {
156 edit_id: clock::Lamport,
157 insertion_id: clock::Lamport,
158 range: Range<usize>,
159}
160
161impl Ord for InsertionSlice {
162 fn cmp(&self, other: &Self) -> Ordering {
163 self.edit_id
164 .cmp(&other.edit_id)
165 .then_with(|| self.insertion_id.cmp(&other.insertion_id))
166 .then_with(|| self.range.start.cmp(&other.range.start))
167 .then_with(|| self.range.end.cmp(&other.range.end))
168 }
169}
170
171impl PartialOrd for InsertionSlice {
172 fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
173 Some(self.cmp(other))
174 }
175}
176
177impl InsertionSlice {
178 fn from_fragment(edit_id: clock::Lamport, fragment: &Fragment) -> Self {
179 Self {
180 edit_id,
181 insertion_id: fragment.timestamp,
182 range: fragment.insertion_offset..fragment.insertion_offset + fragment.len,
183 }
184 }
185}
186
187impl History {
188 pub fn new(base_text: Rope) -> Self {
189 Self {
190 base_text,
191 operations: Default::default(),
192 undo_stack: Vec::new(),
193 redo_stack: Vec::new(),
194 transaction_depth: 0,
195 // Don't group transactions in tests unless we opt in, because it's a footgun.
196 #[cfg(any(test, feature = "test-support"))]
197 group_interval: Duration::ZERO,
198 #[cfg(not(any(test, feature = "test-support")))]
199 group_interval: Duration::from_millis(300),
200 }
201 }
202
203 fn push(&mut self, op: Operation) {
204 self.operations.insert(op.timestamp(), op);
205 }
206
207 fn start_transaction(
208 &mut self,
209 start: clock::Global,
210 now: Instant,
211 clock: &mut clock::Lamport,
212 ) -> Option<TransactionId> {
213 self.transaction_depth += 1;
214 if self.transaction_depth == 1 {
215 let id = clock.tick();
216 self.undo_stack.push(HistoryEntry {
217 transaction: Transaction {
218 id,
219 start,
220 edit_ids: Default::default(),
221 },
222 first_edit_at: now,
223 last_edit_at: now,
224 suppress_grouping: false,
225 });
226 Some(id)
227 } else {
228 None
229 }
230 }
231
232 fn end_transaction(&mut self, now: Instant) -> Option<&HistoryEntry> {
233 assert_ne!(self.transaction_depth, 0);
234 self.transaction_depth -= 1;
235 if self.transaction_depth == 0 {
236 if self
237 .undo_stack
238 .last()
239 .unwrap()
240 .transaction
241 .edit_ids
242 .is_empty()
243 {
244 self.undo_stack.pop();
245 None
246 } else {
247 self.redo_stack.clear();
248 let entry = self.undo_stack.last_mut().unwrap();
249 entry.last_edit_at = now;
250 Some(entry)
251 }
252 } else {
253 None
254 }
255 }
256
257 fn group(&mut self) -> Option<TransactionId> {
258 let mut count = 0;
259 let mut entries = self.undo_stack.iter();
260 if let Some(mut entry) = entries.next_back() {
261 while let Some(prev_entry) = entries.next_back() {
262 if !prev_entry.suppress_grouping
263 && entry.first_edit_at - prev_entry.last_edit_at < self.group_interval
264 {
265 entry = prev_entry;
266 count += 1;
267 } else {
268 break;
269 }
270 }
271 }
272 self.group_trailing(count)
273 }
274
275 fn group_until(&mut self, transaction_id: TransactionId) {
276 let mut count = 0;
277 for entry in self.undo_stack.iter().rev() {
278 if entry.transaction_id() == transaction_id {
279 self.group_trailing(count);
280 break;
281 } else if entry.suppress_grouping {
282 break;
283 } else {
284 count += 1;
285 }
286 }
287 }
288
289 fn group_trailing(&mut self, n: usize) -> Option<TransactionId> {
290 let new_len = self.undo_stack.len() - n;
291 let (entries_to_keep, entries_to_merge) = self.undo_stack.split_at_mut(new_len);
292 if let Some(last_entry) = entries_to_keep.last_mut() {
293 for entry in &*entries_to_merge {
294 for edit_id in &entry.transaction.edit_ids {
295 last_entry.transaction.edit_ids.push(*edit_id);
296 }
297 }
298
299 if let Some(entry) = entries_to_merge.last_mut() {
300 last_entry.last_edit_at = entry.last_edit_at;
301 }
302 }
303
304 self.undo_stack.truncate(new_len);
305 self.undo_stack.last().map(|e| e.transaction.id)
306 }
307
308 fn finalize_last_transaction(&mut self) -> Option<&Transaction> {
309 self.undo_stack.last_mut().map(|entry| {
310 entry.suppress_grouping = true;
311 &entry.transaction
312 })
313 }
314
315 fn push_transaction(&mut self, transaction: Transaction, now: Instant) {
316 assert_eq!(self.transaction_depth, 0);
317 self.undo_stack.push(HistoryEntry {
318 transaction,
319 first_edit_at: now,
320 last_edit_at: now,
321 suppress_grouping: false,
322 });
323 }
324
325 /// Differs from `push_transaction` in that it does not clear the redo
326 /// stack. Intended to be used to create a parent transaction to merge
327 /// potential child transactions into.
328 ///
329 /// The caller is responsible for removing it from the undo history using
330 /// `forget_transaction` if no edits are merged into it. Otherwise, if edits
331 /// are merged into this transaction, the caller is responsible for ensuring
332 /// the redo stack is cleared. The easiest way to ensure the redo stack is
333 /// cleared is to create transactions with the usual `start_transaction` and
334 /// `end_transaction` methods and merging the resulting transactions into
335 /// the transaction created by this method
336 fn push_empty_transaction(
337 &mut self,
338 start: clock::Global,
339 now: Instant,
340 clock: &mut clock::Lamport,
341 ) -> TransactionId {
342 assert_eq!(self.transaction_depth, 0);
343 let id = clock.tick();
344 let transaction = Transaction {
345 id,
346 start,
347 edit_ids: Vec::new(),
348 };
349 self.undo_stack.push(HistoryEntry {
350 transaction,
351 first_edit_at: now,
352 last_edit_at: now,
353 suppress_grouping: false,
354 });
355 id
356 }
357
358 fn push_undo(&mut self, op_id: clock::Lamport) {
359 assert_ne!(self.transaction_depth, 0);
360 if let Some(Operation::Edit(_)) = self.operations.get(&op_id) {
361 let last_transaction = self.undo_stack.last_mut().unwrap();
362 last_transaction.transaction.edit_ids.push(op_id);
363 }
364 }
365
366 fn pop_undo(&mut self) -> Option<&HistoryEntry> {
367 assert_eq!(self.transaction_depth, 0);
368 if let Some(entry) = self.undo_stack.pop() {
369 self.redo_stack.push(entry);
370 self.redo_stack.last()
371 } else {
372 None
373 }
374 }
375
376 fn remove_from_undo(&mut self, transaction_id: TransactionId) -> Option<&HistoryEntry> {
377 assert_eq!(self.transaction_depth, 0);
378
379 let entry_ix = self
380 .undo_stack
381 .iter()
382 .rposition(|entry| entry.transaction.id == transaction_id)?;
383 let entry = self.undo_stack.remove(entry_ix);
384 self.redo_stack.push(entry);
385 self.redo_stack.last()
386 }
387
388 fn remove_from_undo_until(&mut self, transaction_id: TransactionId) -> &[HistoryEntry] {
389 assert_eq!(self.transaction_depth, 0);
390
391 let redo_stack_start_len = self.redo_stack.len();
392 if let Some(entry_ix) = self
393 .undo_stack
394 .iter()
395 .rposition(|entry| entry.transaction.id == transaction_id)
396 {
397 self.redo_stack
398 .extend(self.undo_stack.drain(entry_ix..).rev());
399 }
400 &self.redo_stack[redo_stack_start_len..]
401 }
402
403 fn forget(&mut self, transaction_id: TransactionId) -> Option<Transaction> {
404 assert_eq!(self.transaction_depth, 0);
405 if let Some(entry_ix) = self
406 .undo_stack
407 .iter()
408 .rposition(|entry| entry.transaction.id == transaction_id)
409 {
410 Some(self.undo_stack.remove(entry_ix).transaction)
411 } else if let Some(entry_ix) = self
412 .redo_stack
413 .iter()
414 .rposition(|entry| entry.transaction.id == transaction_id)
415 {
416 Some(self.redo_stack.remove(entry_ix).transaction)
417 } else {
418 None
419 }
420 }
421
422 fn transaction(&self, transaction_id: TransactionId) -> Option<&Transaction> {
423 let entry = self
424 .undo_stack
425 .iter()
426 .rfind(|entry| entry.transaction.id == transaction_id)
427 .or_else(|| {
428 self.redo_stack
429 .iter()
430 .rfind(|entry| entry.transaction.id == transaction_id)
431 })?;
432 Some(&entry.transaction)
433 }
434
435 fn transaction_mut(&mut self, transaction_id: TransactionId) -> Option<&mut Transaction> {
436 let entry = self
437 .undo_stack
438 .iter_mut()
439 .rfind(|entry| entry.transaction.id == transaction_id)
440 .or_else(|| {
441 self.redo_stack
442 .iter_mut()
443 .rfind(|entry| entry.transaction.id == transaction_id)
444 })?;
445 Some(&mut entry.transaction)
446 }
447
448 fn merge_transactions(&mut self, transaction: TransactionId, destination: TransactionId) {
449 if let Some(transaction) = self.forget(transaction)
450 && let Some(destination) = self.transaction_mut(destination)
451 {
452 destination.edit_ids.extend(transaction.edit_ids);
453 }
454 }
455
456 fn pop_redo(&mut self) -> Option<&HistoryEntry> {
457 assert_eq!(self.transaction_depth, 0);
458 if let Some(entry) = self.redo_stack.pop() {
459 self.undo_stack.push(entry);
460 self.undo_stack.last()
461 } else {
462 None
463 }
464 }
465
466 fn remove_from_redo(&mut self, transaction_id: TransactionId) -> &[HistoryEntry] {
467 assert_eq!(self.transaction_depth, 0);
468
469 let undo_stack_start_len = self.undo_stack.len();
470 if let Some(entry_ix) = self
471 .redo_stack
472 .iter()
473 .rposition(|entry| entry.transaction.id == transaction_id)
474 {
475 self.undo_stack
476 .extend(self.redo_stack.drain(entry_ix..).rev());
477 }
478 &self.undo_stack[undo_stack_start_len..]
479 }
480}
481
482struct Edits<'a, D: TextDimension, F: FnMut(&FragmentSummary) -> bool> {
483 visible_cursor: rope::Cursor<'a>,
484 deleted_cursor: rope::Cursor<'a>,
485 fragments_cursor: Option<FilterCursor<'a, 'static, F, Fragment, FragmentTextSummary>>,
486 undos: &'a UndoMap,
487 since: &'a clock::Global,
488 old_end: D,
489 new_end: D,
490 range: Range<(&'a Locator, usize)>,
491 buffer_id: BufferId,
492}
493
494#[derive(Clone, Debug, Default, Eq, PartialEq)]
495pub struct Edit<D> {
496 pub old: Range<D>,
497 pub new: Range<D>,
498}
499impl<D> Edit<D>
500where
501 D: PartialEq,
502{
503 pub fn is_empty(&self) -> bool {
504 self.old.start == self.old.end && self.new.start == self.new.end
505 }
506}
507
508impl<D, DDelta> Edit<D>
509where
510 D: Sub<D, Output = DDelta> + Copy,
511{
512 pub fn old_len(&self) -> DDelta {
513 self.old.end - self.old.start
514 }
515
516 pub fn new_len(&self) -> DDelta {
517 self.new.end - self.new.start
518 }
519}
520
521impl<D1, D2> Edit<(D1, D2)> {
522 pub fn flatten(self) -> (Edit<D1>, Edit<D2>) {
523 (
524 Edit {
525 old: self.old.start.0..self.old.end.0,
526 new: self.new.start.0..self.new.end.0,
527 },
528 Edit {
529 old: self.old.start.1..self.old.end.1,
530 new: self.new.start.1..self.new.end.1,
531 },
532 )
533 }
534}
535
536#[derive(Eq, PartialEq, Clone, Debug)]
537pub struct Fragment {
538 pub id: Locator,
539 pub timestamp: clock::Lamport,
540 pub insertion_offset: usize,
541 pub len: usize,
542 pub visible: bool,
543 pub deletions: HashSet<clock::Lamport>,
544 pub max_undos: clock::Global,
545}
546
547#[derive(Eq, PartialEq, Clone, Debug)]
548pub struct FragmentSummary {
549 text: FragmentTextSummary,
550 max_id: Locator,
551 max_version: clock::Global,
552 min_insertion_version: clock::Global,
553 max_insertion_version: clock::Global,
554}
555
556#[derive(Copy, Default, Clone, Debug, PartialEq, Eq)]
557struct FragmentTextSummary {
558 visible: usize,
559 deleted: usize,
560}
561
562impl<'a> sum_tree::Dimension<'a, FragmentSummary> for FragmentTextSummary {
563 fn zero(_: &Option<clock::Global>) -> Self {
564 Default::default()
565 }
566
567 fn add_summary(&mut self, summary: &'a FragmentSummary, _: &Option<clock::Global>) {
568 self.visible += summary.text.visible;
569 self.deleted += summary.text.deleted;
570 }
571}
572
573#[derive(Eq, PartialEq, Clone, Debug)]
574struct InsertionFragment {
575 timestamp: clock::Lamport,
576 split_offset: usize,
577 fragment_id: Locator,
578}
579
580#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord)]
581struct InsertionFragmentKey {
582 timestamp: clock::Lamport,
583 split_offset: usize,
584}
585
586#[derive(Clone, Debug, Eq, PartialEq)]
587pub enum Operation {
588 Edit(EditOperation),
589 Undo(UndoOperation),
590}
591
592#[derive(Clone, Debug, Eq, PartialEq)]
593pub struct EditOperation {
594 pub timestamp: clock::Lamport,
595 pub version: clock::Global,
596 pub ranges: Vec<Range<FullOffset>>,
597 pub new_text: Vec<Arc<str>>,
598}
599
600#[derive(Clone, Debug, Eq, PartialEq)]
601pub struct UndoOperation {
602 pub timestamp: clock::Lamport,
603 pub version: clock::Global,
604 pub counts: HashMap<clock::Lamport, u32>,
605}
606
607/// Stores information about the indentation of a line (tabs and spaces).
608#[derive(Clone, Copy, Debug, Eq, PartialEq)]
609pub struct LineIndent {
610 pub tabs: u32,
611 pub spaces: u32,
612 pub line_blank: bool,
613}
614
615impl LineIndent {
616 pub fn from_chunks(chunks: &mut Chunks) -> Self {
617 let mut tabs = 0;
618 let mut spaces = 0;
619 let mut line_blank = true;
620
621 'outer: while let Some(chunk) = chunks.peek() {
622 for ch in chunk.chars() {
623 if ch == '\t' {
624 tabs += 1;
625 } else if ch == ' ' {
626 spaces += 1;
627 } else {
628 if ch != '\n' {
629 line_blank = false;
630 }
631 break 'outer;
632 }
633 }
634
635 chunks.next();
636 }
637
638 Self {
639 tabs,
640 spaces,
641 line_blank,
642 }
643 }
644
645 /// Constructs a new `LineIndent` which only contains spaces.
646 pub fn spaces(spaces: u32) -> Self {
647 Self {
648 tabs: 0,
649 spaces,
650 line_blank: true,
651 }
652 }
653
654 /// Constructs a new `LineIndent` which only contains tabs.
655 pub fn tabs(tabs: u32) -> Self {
656 Self {
657 tabs,
658 spaces: 0,
659 line_blank: true,
660 }
661 }
662
663 /// Indicates whether the line is empty.
664 pub fn is_line_empty(&self) -> bool {
665 self.tabs == 0 && self.spaces == 0 && self.line_blank
666 }
667
668 /// Indicates whether the line is blank (contains only whitespace).
669 pub fn is_line_blank(&self) -> bool {
670 self.line_blank
671 }
672
673 /// Returns the number of indentation characters (tabs or spaces).
674 pub fn raw_len(&self) -> u32 {
675 self.tabs + self.spaces
676 }
677
678 /// Returns the number of indentation characters (tabs or spaces), taking tab size into account.
679 pub fn len(&self, tab_size: u32) -> u32 {
680 self.tabs * tab_size + self.spaces
681 }
682}
683
684impl From<&str> for LineIndent {
685 fn from(value: &str) -> Self {
686 Self::from_iter(value.chars())
687 }
688}
689
690impl FromIterator<char> for LineIndent {
691 fn from_iter<T: IntoIterator<Item = char>>(chars: T) -> Self {
692 let mut tabs = 0;
693 let mut spaces = 0;
694 let mut line_blank = true;
695 for c in chars {
696 if c == '\t' {
697 tabs += 1;
698 } else if c == ' ' {
699 spaces += 1;
700 } else {
701 if c != '\n' {
702 line_blank = false;
703 }
704 break;
705 }
706 }
707 Self {
708 tabs,
709 spaces,
710 line_blank,
711 }
712 }
713}
714
715impl Buffer {
716 pub fn new(replica_id: ReplicaId, remote_id: BufferId, base_text: impl Into<String>) -> Buffer {
717 let mut base_text = base_text.into();
718 let line_ending = LineEnding::detect(&base_text);
719 LineEnding::normalize(&mut base_text);
720 Self::new_normalized(replica_id, remote_id, line_ending, Rope::from(&*base_text))
721 }
722
723 pub fn new_normalized(
724 replica_id: ReplicaId,
725 remote_id: BufferId,
726 line_ending: LineEnding,
727 normalized: Rope,
728 ) -> Buffer {
729 let history = History::new(normalized);
730 let mut fragments = SumTree::new(&None);
731 let mut insertions = SumTree::default();
732
733 let mut lamport_clock = clock::Lamport::new(replica_id);
734 let mut version = clock::Global::new();
735
736 let visible_text = history.base_text.clone();
737 if !visible_text.is_empty() {
738 let insertion_timestamp = clock::Lamport::new(ReplicaId::LOCAL);
739 lamport_clock.observe(insertion_timestamp);
740 version.observe(insertion_timestamp);
741 let fragment_id = Locator::between(&Locator::min(), &Locator::max());
742 let fragment = Fragment {
743 id: fragment_id,
744 timestamp: insertion_timestamp,
745 insertion_offset: 0,
746 len: visible_text.len(),
747 visible: true,
748 deletions: Default::default(),
749 max_undos: Default::default(),
750 };
751 insertions.push(InsertionFragment::new(&fragment), ());
752 fragments.push(fragment, &None);
753 }
754
755 Buffer {
756 snapshot: BufferSnapshot {
757 replica_id,
758 remote_id,
759 visible_text,
760 deleted_text: Rope::new(),
761 line_ending,
762 fragments,
763 insertions,
764 version,
765 undo_map: Default::default(),
766 insertion_slices: Default::default(),
767 },
768 history,
769 deferred_ops: OperationQueue::new(),
770 deferred_replicas: HashSet::default(),
771 lamport_clock,
772 subscriptions: Default::default(),
773 edit_id_resolvers: Default::default(),
774 wait_for_version_txs: Default::default(),
775 }
776 }
777
778 pub fn version(&self) -> clock::Global {
779 self.version.clone()
780 }
781
782 pub fn snapshot(&self) -> BufferSnapshot {
783 self.snapshot.clone()
784 }
785
786 pub fn branch(&self) -> Self {
787 Self {
788 snapshot: self.snapshot.clone(),
789 history: History::new(self.base_text().clone()),
790 deferred_ops: OperationQueue::new(),
791 deferred_replicas: HashSet::default(),
792 lamport_clock: clock::Lamport::new(ReplicaId::LOCAL_BRANCH),
793 subscriptions: Default::default(),
794 edit_id_resolvers: Default::default(),
795 wait_for_version_txs: Default::default(),
796 }
797 }
798
799 pub fn replica_id(&self) -> ReplicaId {
800 self.lamport_clock.replica_id
801 }
802
803 pub fn remote_id(&self) -> BufferId {
804 self.remote_id
805 }
806
807 pub fn deferred_ops_len(&self) -> usize {
808 self.deferred_ops.len()
809 }
810
811 pub fn transaction_group_interval(&self) -> Duration {
812 self.history.group_interval
813 }
814
815 pub fn edit<R, I, S, T>(&mut self, edits: R) -> Operation
816 where
817 R: IntoIterator<IntoIter = I>,
818 I: ExactSizeIterator<Item = (Range<S>, T)>,
819 S: ToOffset,
820 T: Into<Arc<str>>,
821 {
822 let edits = edits
823 .into_iter()
824 .map(|(range, new_text)| (range, new_text.into()));
825
826 self.start_transaction();
827 let timestamp = self.lamport_clock.tick();
828 let operation = Operation::Edit(self.apply_local_edit(edits, timestamp));
829
830 self.history.push(operation.clone());
831 self.history.push_undo(operation.timestamp());
832 self.snapshot.version.observe(operation.timestamp());
833 self.end_transaction();
834 operation
835 }
836
837 fn apply_local_edit<S: ToOffset, T: Into<Arc<str>>>(
838 &mut self,
839 edits: impl ExactSizeIterator<Item = (Range<S>, T)>,
840 timestamp: clock::Lamport,
841 ) -> EditOperation {
842 let mut edits_patch = Patch::default();
843 let mut edit_op = EditOperation {
844 timestamp,
845 version: self.version(),
846 ranges: Vec::with_capacity(edits.len()),
847 new_text: Vec::with_capacity(edits.len()),
848 };
849 let mut new_insertions = Vec::new();
850 let mut insertion_offset = 0;
851 let mut insertion_slices = Vec::new();
852
853 let mut edits = edits
854 .map(|(range, new_text)| (range.to_offset(&*self), new_text))
855 .peekable();
856
857 let mut new_ropes =
858 RopeBuilder::new(self.visible_text.cursor(0), self.deleted_text.cursor(0));
859 let mut old_fragments = self.fragments.cursor::<FragmentTextSummary>(&None);
860 let mut new_fragments = old_fragments.slice(&edits.peek().unwrap().0.start, Bias::Right);
861 new_ropes.append(new_fragments.summary().text);
862
863 let mut fragment_start = old_fragments.start().visible;
864 for (range, new_text) in edits {
865 let new_text = LineEnding::normalize_arc(new_text.into());
866 let fragment_end = old_fragments.end().visible;
867
868 // If the current fragment ends before this range, then jump ahead to the first fragment
869 // that extends past the start of this range, reusing any intervening fragments.
870 if fragment_end < range.start {
871 // If the current fragment has been partially consumed, then consume the rest of it
872 // and advance to the next fragment before slicing.
873 if fragment_start > old_fragments.start().visible {
874 if fragment_end > fragment_start {
875 let mut suffix = old_fragments.item().unwrap().clone();
876 suffix.len = fragment_end - fragment_start;
877 suffix.insertion_offset += fragment_start - old_fragments.start().visible;
878 new_insertions.push(InsertionFragment::insert_new(&suffix));
879 new_ropes.push_fragment(&suffix, suffix.visible);
880 new_fragments.push(suffix, &None);
881 }
882 old_fragments.next();
883 }
884
885 let slice = old_fragments.slice(&range.start, Bias::Right);
886 new_ropes.append(slice.summary().text);
887 new_fragments.append(slice, &None);
888 fragment_start = old_fragments.start().visible;
889 }
890
891 let full_range_start = FullOffset(range.start + old_fragments.start().deleted);
892
893 // Preserve any portion of the current fragment that precedes this range.
894 if fragment_start < range.start {
895 let mut prefix = old_fragments.item().unwrap().clone();
896 prefix.len = range.start - fragment_start;
897 prefix.insertion_offset += fragment_start - old_fragments.start().visible;
898 prefix.id = Locator::between(&new_fragments.summary().max_id, &prefix.id);
899 new_insertions.push(InsertionFragment::insert_new(&prefix));
900 new_ropes.push_fragment(&prefix, prefix.visible);
901 new_fragments.push(prefix, &None);
902 fragment_start = range.start;
903 }
904
905 // Insert the new text before any existing fragments within the range.
906 if !new_text.is_empty() {
907 let new_start = new_fragments.summary().text.visible;
908
909 let fragment = Fragment {
910 id: Locator::between(
911 &new_fragments.summary().max_id,
912 old_fragments
913 .item()
914 .map_or(&Locator::max(), |old_fragment| &old_fragment.id),
915 ),
916 timestamp,
917 insertion_offset,
918 len: new_text.len(),
919 deletions: Default::default(),
920 max_undos: Default::default(),
921 visible: true,
922 };
923 edits_patch.push(Edit {
924 old: fragment_start..fragment_start,
925 new: new_start..new_start + new_text.len(),
926 });
927 insertion_slices.push(InsertionSlice::from_fragment(timestamp, &fragment));
928 new_insertions.push(InsertionFragment::insert_new(&fragment));
929 new_ropes.push_str(new_text.as_ref());
930 new_fragments.push(fragment, &None);
931 insertion_offset += new_text.len();
932 }
933
934 // Advance through every fragment that intersects this range, marking the intersecting
935 // portions as deleted.
936 while fragment_start < range.end {
937 let fragment = old_fragments.item().unwrap();
938 let fragment_end = old_fragments.end().visible;
939 let mut intersection = fragment.clone();
940 let intersection_end = cmp::min(range.end, fragment_end);
941 if fragment.visible {
942 intersection.len = intersection_end - fragment_start;
943 intersection.insertion_offset += fragment_start - old_fragments.start().visible;
944 intersection.id =
945 Locator::between(&new_fragments.summary().max_id, &intersection.id);
946 intersection.deletions.insert(timestamp);
947 intersection.visible = false;
948 }
949 if intersection.len > 0 {
950 if fragment.visible && !intersection.visible {
951 let new_start = new_fragments.summary().text.visible;
952 edits_patch.push(Edit {
953 old: fragment_start..intersection_end,
954 new: new_start..new_start,
955 });
956 insertion_slices
957 .push(InsertionSlice::from_fragment(timestamp, &intersection));
958 }
959 new_insertions.push(InsertionFragment::insert_new(&intersection));
960 new_ropes.push_fragment(&intersection, fragment.visible);
961 new_fragments.push(intersection, &None);
962 fragment_start = intersection_end;
963 }
964 if fragment_end <= range.end {
965 old_fragments.next();
966 }
967 }
968
969 let full_range_end = FullOffset(range.end + old_fragments.start().deleted);
970 edit_op.ranges.push(full_range_start..full_range_end);
971 edit_op.new_text.push(new_text);
972 }
973
974 // If the current fragment has been partially consumed, then consume the rest of it
975 // and advance to the next fragment before slicing.
976 if fragment_start > old_fragments.start().visible {
977 let fragment_end = old_fragments.end().visible;
978 if fragment_end > fragment_start {
979 let mut suffix = old_fragments.item().unwrap().clone();
980 suffix.len = fragment_end - fragment_start;
981 suffix.insertion_offset += fragment_start - old_fragments.start().visible;
982 new_insertions.push(InsertionFragment::insert_new(&suffix));
983 new_ropes.push_fragment(&suffix, suffix.visible);
984 new_fragments.push(suffix, &None);
985 }
986 old_fragments.next();
987 }
988
989 let suffix = old_fragments.suffix();
990 new_ropes.append(suffix.summary().text);
991 new_fragments.append(suffix, &None);
992 let (visible_text, deleted_text) = new_ropes.finish();
993 drop(old_fragments);
994
995 self.snapshot.fragments = new_fragments;
996 self.snapshot.insertions.edit(new_insertions, ());
997 self.snapshot.visible_text = visible_text;
998 self.snapshot.deleted_text = deleted_text;
999 self.subscriptions.publish_mut(&edits_patch);
1000 self.snapshot.insertion_slices.extend(insertion_slices);
1001 edit_op
1002 }
1003
1004 pub fn set_line_ending(&mut self, line_ending: LineEnding) {
1005 self.snapshot.line_ending = line_ending;
1006 }
1007
1008 pub fn apply_ops<I: IntoIterator<Item = Operation>>(&mut self, ops: I) {
1009 let mut deferred_ops = Vec::new();
1010 for op in ops {
1011 self.history.push(op.clone());
1012 if self.can_apply_op(&op) {
1013 self.apply_op(op);
1014 } else {
1015 self.deferred_replicas.insert(op.replica_id());
1016 deferred_ops.push(op);
1017 }
1018 }
1019 self.deferred_ops.insert(deferred_ops);
1020 self.flush_deferred_ops();
1021 }
1022
1023 fn apply_op(&mut self, op: Operation) {
1024 match op {
1025 Operation::Edit(edit) => {
1026 if !self.version.observed(edit.timestamp) {
1027 self.apply_remote_edit(
1028 &edit.version,
1029 &edit.ranges,
1030 &edit.new_text,
1031 edit.timestamp,
1032 );
1033 self.snapshot.version.observe(edit.timestamp);
1034 self.lamport_clock.observe(edit.timestamp);
1035 self.resolve_edit(edit.timestamp);
1036 }
1037 }
1038 Operation::Undo(undo) => {
1039 if !self.version.observed(undo.timestamp) {
1040 self.apply_undo(&undo);
1041 self.snapshot.version.observe(undo.timestamp);
1042 self.lamport_clock.observe(undo.timestamp);
1043 }
1044 }
1045 }
1046 self.wait_for_version_txs.retain_mut(|(version, tx)| {
1047 if self.snapshot.version().observed_all(version) {
1048 tx.try_send(()).ok();
1049 false
1050 } else {
1051 true
1052 }
1053 });
1054 }
1055
1056 fn apply_remote_edit(
1057 &mut self,
1058 version: &clock::Global,
1059 ranges: &[Range<FullOffset>],
1060 new_text: &[Arc<str>],
1061 timestamp: clock::Lamport,
1062 ) {
1063 if ranges.is_empty() {
1064 return;
1065 }
1066
1067 let edits = ranges.iter().zip(new_text.iter());
1068 let mut edits_patch = Patch::default();
1069 let mut insertion_slices = Vec::new();
1070 let cx = Some(version.clone());
1071 let mut new_insertions = Vec::new();
1072 let mut insertion_offset = 0;
1073 let mut new_ropes =
1074 RopeBuilder::new(self.visible_text.cursor(0), self.deleted_text.cursor(0));
1075 let mut old_fragments = self
1076 .fragments
1077 .cursor::<Dimensions<VersionedFullOffset, usize>>(&cx);
1078 let mut new_fragments =
1079 old_fragments.slice(&VersionedFullOffset::Offset(ranges[0].start), Bias::Left);
1080 new_ropes.append(new_fragments.summary().text);
1081
1082 let mut fragment_start = old_fragments.start().0.full_offset();
1083 for (range, new_text) in edits {
1084 let fragment_end = old_fragments.end().0.full_offset();
1085
1086 // If the current fragment ends before this range, then jump ahead to the first fragment
1087 // that extends past the start of this range, reusing any intervening fragments.
1088 if fragment_end < range.start {
1089 // If the current fragment has been partially consumed, then consume the rest of it
1090 // and advance to the next fragment before slicing.
1091 if fragment_start > old_fragments.start().0.full_offset() {
1092 if fragment_end > fragment_start {
1093 let mut suffix = old_fragments.item().unwrap().clone();
1094 suffix.len = fragment_end.0 - fragment_start.0;
1095 suffix.insertion_offset +=
1096 fragment_start - old_fragments.start().0.full_offset();
1097 new_insertions.push(InsertionFragment::insert_new(&suffix));
1098 new_ropes.push_fragment(&suffix, suffix.visible);
1099 new_fragments.push(suffix, &None);
1100 }
1101 old_fragments.next();
1102 }
1103
1104 let slice =
1105 old_fragments.slice(&VersionedFullOffset::Offset(range.start), Bias::Left);
1106 new_ropes.append(slice.summary().text);
1107 new_fragments.append(slice, &None);
1108 fragment_start = old_fragments.start().0.full_offset();
1109 }
1110
1111 // If we are at the end of a non-concurrent fragment, advance to the next one.
1112 let fragment_end = old_fragments.end().0.full_offset();
1113 if fragment_end == range.start && fragment_end > fragment_start {
1114 let mut fragment = old_fragments.item().unwrap().clone();
1115 fragment.len = fragment_end.0 - fragment_start.0;
1116 fragment.insertion_offset += fragment_start - old_fragments.start().0.full_offset();
1117 new_insertions.push(InsertionFragment::insert_new(&fragment));
1118 new_ropes.push_fragment(&fragment, fragment.visible);
1119 new_fragments.push(fragment, &None);
1120 old_fragments.next();
1121 fragment_start = old_fragments.start().0.full_offset();
1122 }
1123
1124 // Skip over insertions that are concurrent to this edit, but have a lower lamport
1125 // timestamp.
1126 while let Some(fragment) = old_fragments.item() {
1127 if fragment_start == range.start && fragment.timestamp > timestamp {
1128 new_ropes.push_fragment(fragment, fragment.visible);
1129 new_fragments.push(fragment.clone(), &None);
1130 old_fragments.next();
1131 debug_assert_eq!(fragment_start, range.start);
1132 } else {
1133 break;
1134 }
1135 }
1136 debug_assert!(fragment_start <= range.start);
1137
1138 // Preserve any portion of the current fragment that precedes this range.
1139 if fragment_start < range.start {
1140 let mut prefix = old_fragments.item().unwrap().clone();
1141 prefix.len = range.start.0 - fragment_start.0;
1142 prefix.insertion_offset += fragment_start - old_fragments.start().0.full_offset();
1143 prefix.id = Locator::between(&new_fragments.summary().max_id, &prefix.id);
1144 new_insertions.push(InsertionFragment::insert_new(&prefix));
1145 fragment_start = range.start;
1146 new_ropes.push_fragment(&prefix, prefix.visible);
1147 new_fragments.push(prefix, &None);
1148 }
1149
1150 // Insert the new text before any existing fragments within the range.
1151 if !new_text.is_empty() {
1152 let mut old_start = old_fragments.start().1;
1153 if old_fragments.item().is_some_and(|f| f.visible) {
1154 old_start += fragment_start.0 - old_fragments.start().0.full_offset().0;
1155 }
1156 let new_start = new_fragments.summary().text.visible;
1157 let fragment = Fragment {
1158 id: Locator::between(
1159 &new_fragments.summary().max_id,
1160 old_fragments
1161 .item()
1162 .map_or(&Locator::max(), |old_fragment| &old_fragment.id),
1163 ),
1164 timestamp,
1165 insertion_offset,
1166 len: new_text.len(),
1167 deletions: Default::default(),
1168 max_undos: Default::default(),
1169 visible: true,
1170 };
1171 edits_patch.push(Edit {
1172 old: old_start..old_start,
1173 new: new_start..new_start + new_text.len(),
1174 });
1175 insertion_slices.push(InsertionSlice::from_fragment(timestamp, &fragment));
1176 new_insertions.push(InsertionFragment::insert_new(&fragment));
1177 new_ropes.push_str(new_text);
1178 new_fragments.push(fragment, &None);
1179 insertion_offset += new_text.len();
1180 }
1181
1182 // Advance through every fragment that intersects this range, marking the intersecting
1183 // portions as deleted.
1184 while fragment_start < range.end {
1185 let fragment = old_fragments.item().unwrap();
1186 let fragment_end = old_fragments.end().0.full_offset();
1187 let mut intersection = fragment.clone();
1188 let intersection_end = cmp::min(range.end, fragment_end);
1189 if fragment.was_visible(version, &self.undo_map) {
1190 intersection.len = intersection_end.0 - fragment_start.0;
1191 intersection.insertion_offset +=
1192 fragment_start - old_fragments.start().0.full_offset();
1193 intersection.id =
1194 Locator::between(&new_fragments.summary().max_id, &intersection.id);
1195 intersection.deletions.insert(timestamp);
1196 intersection.visible = false;
1197 insertion_slices.push(InsertionSlice::from_fragment(timestamp, &intersection));
1198 }
1199 if intersection.len > 0 {
1200 if fragment.visible && !intersection.visible {
1201 let old_start = old_fragments.start().1
1202 + (fragment_start.0 - old_fragments.start().0.full_offset().0);
1203 let new_start = new_fragments.summary().text.visible;
1204 edits_patch.push(Edit {
1205 old: old_start..old_start + intersection.len,
1206 new: new_start..new_start,
1207 });
1208 }
1209 new_insertions.push(InsertionFragment::insert_new(&intersection));
1210 new_ropes.push_fragment(&intersection, fragment.visible);
1211 new_fragments.push(intersection, &None);
1212 fragment_start = intersection_end;
1213 }
1214 if fragment_end <= range.end {
1215 old_fragments.next();
1216 }
1217 }
1218 }
1219
1220 // If the current fragment has been partially consumed, then consume the rest of it
1221 // and advance to the next fragment before slicing.
1222 if fragment_start > old_fragments.start().0.full_offset() {
1223 let fragment_end = old_fragments.end().0.full_offset();
1224 if fragment_end > fragment_start {
1225 let mut suffix = old_fragments.item().unwrap().clone();
1226 suffix.len = fragment_end.0 - fragment_start.0;
1227 suffix.insertion_offset += fragment_start - old_fragments.start().0.full_offset();
1228 new_insertions.push(InsertionFragment::insert_new(&suffix));
1229 new_ropes.push_fragment(&suffix, suffix.visible);
1230 new_fragments.push(suffix, &None);
1231 }
1232 old_fragments.next();
1233 }
1234
1235 let suffix = old_fragments.suffix();
1236 new_ropes.append(suffix.summary().text);
1237 new_fragments.append(suffix, &None);
1238 let (visible_text, deleted_text) = new_ropes.finish();
1239 drop(old_fragments);
1240
1241 self.snapshot.fragments = new_fragments;
1242 self.snapshot.visible_text = visible_text;
1243 self.snapshot.deleted_text = deleted_text;
1244 self.snapshot.insertions.edit(new_insertions, ());
1245 self.snapshot.insertion_slices.extend(insertion_slices);
1246 self.subscriptions.publish_mut(&edits_patch)
1247 }
1248
1249 fn fragment_ids_for_edits<'a>(
1250 &'a self,
1251 edit_ids: impl Iterator<Item = &'a clock::Lamport>,
1252 ) -> Vec<&'a Locator> {
1253 // Get all of the insertion slices changed by the given edits.
1254 let mut insertion_slices = Vec::new();
1255 for edit_id in edit_ids {
1256 let insertion_slice = InsertionSlice {
1257 edit_id: *edit_id,
1258 insertion_id: clock::Lamport::MIN,
1259 range: 0..0,
1260 };
1261 let slices = self
1262 .snapshot
1263 .insertion_slices
1264 .iter_from(&insertion_slice)
1265 .take_while(|slice| slice.edit_id == *edit_id);
1266 insertion_slices.extend(slices)
1267 }
1268 insertion_slices
1269 .sort_unstable_by_key(|s| (s.insertion_id, s.range.start, Reverse(s.range.end)));
1270
1271 // Get all of the fragments corresponding to these insertion slices.
1272 let mut fragment_ids = Vec::new();
1273 let mut insertions_cursor = self.insertions.cursor::<InsertionFragmentKey>(());
1274 for insertion_slice in &insertion_slices {
1275 if insertion_slice.insertion_id != insertions_cursor.start().timestamp
1276 || insertion_slice.range.start > insertions_cursor.start().split_offset
1277 {
1278 insertions_cursor.seek_forward(
1279 &InsertionFragmentKey {
1280 timestamp: insertion_slice.insertion_id,
1281 split_offset: insertion_slice.range.start,
1282 },
1283 Bias::Left,
1284 );
1285 }
1286 while let Some(item) = insertions_cursor.item() {
1287 if item.timestamp != insertion_slice.insertion_id
1288 || item.split_offset >= insertion_slice.range.end
1289 {
1290 break;
1291 }
1292 fragment_ids.push(&item.fragment_id);
1293 insertions_cursor.next();
1294 }
1295 }
1296 fragment_ids.sort_unstable();
1297 fragment_ids
1298 }
1299
1300 fn apply_undo(&mut self, undo: &UndoOperation) {
1301 self.snapshot.undo_map.insert(undo);
1302
1303 let mut edits = Patch::default();
1304 let mut old_fragments = self
1305 .fragments
1306 .cursor::<Dimensions<Option<&Locator>, usize>>(&None);
1307 let mut new_fragments = SumTree::new(&None);
1308 let mut new_ropes =
1309 RopeBuilder::new(self.visible_text.cursor(0), self.deleted_text.cursor(0));
1310
1311 for fragment_id in self.fragment_ids_for_edits(undo.counts.keys()) {
1312 let preceding_fragments = old_fragments.slice(&Some(fragment_id), Bias::Left);
1313 new_ropes.append(preceding_fragments.summary().text);
1314 new_fragments.append(preceding_fragments, &None);
1315
1316 if let Some(fragment) = old_fragments.item() {
1317 let mut fragment = fragment.clone();
1318 let fragment_was_visible = fragment.visible;
1319
1320 fragment.visible = fragment.is_visible(&self.undo_map);
1321 fragment.max_undos.observe(undo.timestamp);
1322
1323 let old_start = old_fragments.start().1;
1324 let new_start = new_fragments.summary().text.visible;
1325 if fragment_was_visible && !fragment.visible {
1326 edits.push(Edit {
1327 old: old_start..old_start + fragment.len,
1328 new: new_start..new_start,
1329 });
1330 } else if !fragment_was_visible && fragment.visible {
1331 edits.push(Edit {
1332 old: old_start..old_start,
1333 new: new_start..new_start + fragment.len,
1334 });
1335 }
1336 new_ropes.push_fragment(&fragment, fragment_was_visible);
1337 new_fragments.push(fragment, &None);
1338
1339 old_fragments.next();
1340 }
1341 }
1342
1343 let suffix = old_fragments.suffix();
1344 new_ropes.append(suffix.summary().text);
1345 new_fragments.append(suffix, &None);
1346
1347 drop(old_fragments);
1348 let (visible_text, deleted_text) = new_ropes.finish();
1349 self.snapshot.fragments = new_fragments;
1350 self.snapshot.visible_text = visible_text;
1351 self.snapshot.deleted_text = deleted_text;
1352 self.subscriptions.publish_mut(&edits);
1353 }
1354
1355 fn flush_deferred_ops(&mut self) {
1356 self.deferred_replicas.clear();
1357 let mut deferred_ops = Vec::new();
1358 for op in self.deferred_ops.drain().iter().cloned() {
1359 if self.can_apply_op(&op) {
1360 self.apply_op(op);
1361 } else {
1362 self.deferred_replicas.insert(op.replica_id());
1363 deferred_ops.push(op);
1364 }
1365 }
1366 self.deferred_ops.insert(deferred_ops);
1367 }
1368
1369 fn can_apply_op(&self, op: &Operation) -> bool {
1370 if self.deferred_replicas.contains(&op.replica_id()) {
1371 false
1372 } else {
1373 self.version.observed_all(match op {
1374 Operation::Edit(edit) => &edit.version,
1375 Operation::Undo(undo) => &undo.version,
1376 })
1377 }
1378 }
1379
1380 pub fn has_deferred_ops(&self) -> bool {
1381 !self.deferred_ops.is_empty()
1382 }
1383
1384 pub fn peek_undo_stack(&self) -> Option<&HistoryEntry> {
1385 self.history.undo_stack.last()
1386 }
1387
1388 pub fn peek_redo_stack(&self) -> Option<&HistoryEntry> {
1389 self.history.redo_stack.last()
1390 }
1391
1392 pub fn start_transaction(&mut self) -> Option<TransactionId> {
1393 self.start_transaction_at(Instant::now())
1394 }
1395
1396 pub fn start_transaction_at(&mut self, now: Instant) -> Option<TransactionId> {
1397 self.history
1398 .start_transaction(self.version.clone(), now, &mut self.lamport_clock)
1399 }
1400
1401 pub fn end_transaction(&mut self) -> Option<(TransactionId, clock::Global)> {
1402 self.end_transaction_at(Instant::now())
1403 }
1404
1405 pub fn end_transaction_at(&mut self, now: Instant) -> Option<(TransactionId, clock::Global)> {
1406 if let Some(entry) = self.history.end_transaction(now) {
1407 let since = entry.transaction.start.clone();
1408 let id = self.history.group().unwrap();
1409 Some((id, since))
1410 } else {
1411 None
1412 }
1413 }
1414
1415 pub fn finalize_last_transaction(&mut self) -> Option<&Transaction> {
1416 self.history.finalize_last_transaction()
1417 }
1418
1419 pub fn group_until_transaction(&mut self, transaction_id: TransactionId) {
1420 self.history.group_until(transaction_id);
1421 }
1422
1423 pub fn base_text(&self) -> &Rope {
1424 &self.history.base_text
1425 }
1426
1427 pub fn operations(&self) -> &TreeMap<clock::Lamport, Operation> {
1428 &self.history.operations
1429 }
1430
1431 pub fn undo(&mut self) -> Option<(TransactionId, Operation)> {
1432 if let Some(entry) = self.history.pop_undo() {
1433 let transaction = entry.transaction.clone();
1434 let transaction_id = transaction.id;
1435 let op = self.undo_or_redo(transaction);
1436 Some((transaction_id, op))
1437 } else {
1438 None
1439 }
1440 }
1441
1442 pub fn undo_transaction(&mut self, transaction_id: TransactionId) -> Option<Operation> {
1443 let transaction = self
1444 .history
1445 .remove_from_undo(transaction_id)?
1446 .transaction
1447 .clone();
1448 Some(self.undo_or_redo(transaction))
1449 }
1450
1451 pub fn undo_to_transaction(&mut self, transaction_id: TransactionId) -> Vec<Operation> {
1452 let transactions = self
1453 .history
1454 .remove_from_undo_until(transaction_id)
1455 .iter()
1456 .map(|entry| entry.transaction.clone())
1457 .collect::<Vec<_>>();
1458
1459 transactions
1460 .into_iter()
1461 .map(|transaction| self.undo_or_redo(transaction))
1462 .collect()
1463 }
1464
1465 pub fn forget_transaction(&mut self, transaction_id: TransactionId) -> Option<Transaction> {
1466 self.history.forget(transaction_id)
1467 }
1468
1469 pub fn get_transaction(&self, transaction_id: TransactionId) -> Option<&Transaction> {
1470 self.history.transaction(transaction_id)
1471 }
1472
1473 pub fn merge_transactions(&mut self, transaction: TransactionId, destination: TransactionId) {
1474 self.history.merge_transactions(transaction, destination);
1475 }
1476
1477 pub fn redo(&mut self) -> Option<(TransactionId, Operation)> {
1478 if let Some(entry) = self.history.pop_redo() {
1479 let transaction = entry.transaction.clone();
1480 let transaction_id = transaction.id;
1481 let op = self.undo_or_redo(transaction);
1482 Some((transaction_id, op))
1483 } else {
1484 None
1485 }
1486 }
1487
1488 pub fn redo_to_transaction(&mut self, transaction_id: TransactionId) -> Vec<Operation> {
1489 let transactions = self
1490 .history
1491 .remove_from_redo(transaction_id)
1492 .iter()
1493 .map(|entry| entry.transaction.clone())
1494 .collect::<Vec<_>>();
1495
1496 transactions
1497 .into_iter()
1498 .map(|transaction| self.undo_or_redo(transaction))
1499 .collect()
1500 }
1501
1502 fn undo_or_redo(&mut self, transaction: Transaction) -> Operation {
1503 let mut counts = HashMap::default();
1504 for edit_id in transaction.edit_ids {
1505 counts.insert(edit_id, self.undo_map.undo_count(edit_id).saturating_add(1));
1506 }
1507
1508 let operation = self.undo_operations(counts);
1509 self.history.push(operation.clone());
1510 operation
1511 }
1512
1513 pub fn undo_operations(&mut self, counts: HashMap<clock::Lamport, u32>) -> Operation {
1514 let timestamp = self.lamport_clock.tick();
1515 let version = self.version();
1516 self.snapshot.version.observe(timestamp);
1517 let undo = UndoOperation {
1518 timestamp,
1519 version,
1520 counts,
1521 };
1522 self.apply_undo(&undo);
1523 Operation::Undo(undo)
1524 }
1525
1526 pub fn push_transaction(&mut self, transaction: Transaction, now: Instant) {
1527 self.history.push_transaction(transaction, now);
1528 }
1529
1530 /// Differs from `push_transaction` in that it does not clear the redo stack.
1531 /// The caller responsible for
1532 /// Differs from `push_transaction` in that it does not clear the redo
1533 /// stack. Intended to be used to create a parent transaction to merge
1534 /// potential child transactions into.
1535 ///
1536 /// The caller is responsible for removing it from the undo history using
1537 /// `forget_transaction` if no edits are merged into it. Otherwise, if edits
1538 /// are merged into this transaction, the caller is responsible for ensuring
1539 /// the redo stack is cleared. The easiest way to ensure the redo stack is
1540 /// cleared is to create transactions with the usual `start_transaction` and
1541 /// `end_transaction` methods and merging the resulting transactions into
1542 /// the transaction created by this method
1543 pub fn push_empty_transaction(&mut self, now: Instant) -> TransactionId {
1544 self.history
1545 .push_empty_transaction(self.version.clone(), now, &mut self.lamport_clock)
1546 }
1547
1548 pub fn edited_ranges_for_transaction_id<D>(
1549 &self,
1550 transaction_id: TransactionId,
1551 ) -> impl '_ + Iterator<Item = Range<D>>
1552 where
1553 D: TextDimension,
1554 {
1555 self.history
1556 .transaction(transaction_id)
1557 .into_iter()
1558 .flat_map(|transaction| self.edited_ranges_for_transaction(transaction))
1559 }
1560
1561 pub fn edited_ranges_for_edit_ids<'a, D>(
1562 &'a self,
1563 edit_ids: impl IntoIterator<Item = &'a clock::Lamport>,
1564 ) -> impl 'a + Iterator<Item = Range<D>>
1565 where
1566 D: TextDimension,
1567 {
1568 // get fragment ranges
1569 let mut cursor = self
1570 .fragments
1571 .cursor::<Dimensions<Option<&Locator>, usize>>(&None);
1572 let offset_ranges = self
1573 .fragment_ids_for_edits(edit_ids.into_iter())
1574 .into_iter()
1575 .filter_map(move |fragment_id| {
1576 cursor.seek_forward(&Some(fragment_id), Bias::Left);
1577 let fragment = cursor.item()?;
1578 let start_offset = cursor.start().1;
1579 let end_offset = start_offset + if fragment.visible { fragment.len } else { 0 };
1580 Some(start_offset..end_offset)
1581 });
1582
1583 // combine adjacent ranges
1584 let mut prev_range: Option<Range<usize>> = None;
1585 let disjoint_ranges = offset_ranges
1586 .map(Some)
1587 .chain([None])
1588 .filter_map(move |range| {
1589 if let Some((range, prev_range)) = range.as_ref().zip(prev_range.as_mut())
1590 && prev_range.end == range.start
1591 {
1592 prev_range.end = range.end;
1593 return None;
1594 }
1595 let result = prev_range.clone();
1596 prev_range = range;
1597 result
1598 });
1599
1600 // convert to the desired text dimension.
1601 let mut position = D::zero(());
1602 let mut rope_cursor = self.visible_text.cursor(0);
1603 disjoint_ranges.map(move |range| {
1604 position.add_assign(&rope_cursor.summary(range.start));
1605 let start = position;
1606 position.add_assign(&rope_cursor.summary(range.end));
1607 let end = position;
1608 start..end
1609 })
1610 }
1611
1612 pub fn edited_ranges_for_transaction<'a, D>(
1613 &'a self,
1614 transaction: &'a Transaction,
1615 ) -> impl 'a + Iterator<Item = Range<D>>
1616 where
1617 D: TextDimension,
1618 {
1619 self.edited_ranges_for_edit_ids(&transaction.edit_ids)
1620 }
1621
1622 pub fn subscribe(&mut self) -> Subscription {
1623 self.subscriptions.subscribe()
1624 }
1625
1626 pub fn wait_for_edits<It: IntoIterator<Item = clock::Lamport>>(
1627 &mut self,
1628 edit_ids: It,
1629 ) -> impl 'static + Future<Output = Result<()>> + use<It> {
1630 let mut futures = Vec::new();
1631 for edit_id in edit_ids {
1632 if !self.version.observed(edit_id) {
1633 let (tx, rx) = oneshot::channel();
1634 self.edit_id_resolvers.entry(edit_id).or_default().push(tx);
1635 futures.push(rx);
1636 }
1637 }
1638
1639 async move {
1640 for mut future in futures {
1641 if future.recv().await.is_none() {
1642 anyhow::bail!("gave up waiting for edits");
1643 }
1644 }
1645 Ok(())
1646 }
1647 }
1648
1649 pub fn wait_for_anchors<It: IntoIterator<Item = Anchor>>(
1650 &mut self,
1651 anchors: It,
1652 ) -> impl 'static + Future<Output = Result<()>> + use<It> {
1653 let mut futures = Vec::new();
1654 for anchor in anchors {
1655 if !self.version.observed(anchor.timestamp)
1656 && anchor != Anchor::MAX
1657 && anchor != Anchor::MIN
1658 {
1659 let (tx, rx) = oneshot::channel();
1660 self.edit_id_resolvers
1661 .entry(anchor.timestamp)
1662 .or_default()
1663 .push(tx);
1664 futures.push(rx);
1665 }
1666 }
1667
1668 async move {
1669 for mut future in futures {
1670 if future.recv().await.is_none() {
1671 anyhow::bail!("gave up waiting for anchors");
1672 }
1673 }
1674 Ok(())
1675 }
1676 }
1677
1678 pub fn wait_for_version(
1679 &mut self,
1680 version: clock::Global,
1681 ) -> impl Future<Output = Result<()>> + use<> {
1682 let mut rx = None;
1683 if !self.snapshot.version.observed_all(&version) {
1684 let channel = oneshot::channel();
1685 self.wait_for_version_txs.push((version, channel.0));
1686 rx = Some(channel.1);
1687 }
1688 async move {
1689 if let Some(mut rx) = rx
1690 && rx.recv().await.is_none()
1691 {
1692 anyhow::bail!("gave up waiting for version");
1693 }
1694 Ok(())
1695 }
1696 }
1697
1698 pub fn give_up_waiting(&mut self) {
1699 self.edit_id_resolvers.clear();
1700 self.wait_for_version_txs.clear();
1701 }
1702
1703 fn resolve_edit(&mut self, edit_id: clock::Lamport) {
1704 for mut tx in self
1705 .edit_id_resolvers
1706 .remove(&edit_id)
1707 .into_iter()
1708 .flatten()
1709 {
1710 tx.try_send(()).ok();
1711 }
1712 }
1713}
1714
1715#[cfg(any(test, feature = "test-support"))]
1716impl Buffer {
1717 #[track_caller]
1718 pub fn edit_via_marked_text(&mut self, marked_string: &str) {
1719 let edits = self.edits_for_marked_text(marked_string);
1720 self.edit(edits);
1721 }
1722
1723 #[track_caller]
1724 pub fn edits_for_marked_text(&self, marked_string: &str) -> Vec<(Range<usize>, String)> {
1725 let old_text = self.text();
1726 let (new_text, mut ranges) = util::test::marked_text_ranges(marked_string, false);
1727 if ranges.is_empty() {
1728 ranges.push(0..new_text.len());
1729 }
1730
1731 assert_eq!(
1732 old_text[..ranges[0].start],
1733 new_text[..ranges[0].start],
1734 "invalid edit"
1735 );
1736
1737 let mut delta = 0;
1738 let mut edits = Vec::new();
1739 let mut ranges = ranges.into_iter().peekable();
1740
1741 while let Some(inserted_range) = ranges.next() {
1742 let new_start = inserted_range.start;
1743 let old_start = (new_start as isize - delta) as usize;
1744
1745 let following_text = if let Some(next_range) = ranges.peek() {
1746 &new_text[inserted_range.end..next_range.start]
1747 } else {
1748 &new_text[inserted_range.end..]
1749 };
1750
1751 let inserted_len = inserted_range.len();
1752 let deleted_len = old_text[old_start..]
1753 .find(following_text)
1754 .expect("invalid edit");
1755
1756 let old_range = old_start..old_start + deleted_len;
1757 edits.push((old_range, new_text[inserted_range].to_string()));
1758 delta += inserted_len as isize - deleted_len as isize;
1759 }
1760
1761 assert_eq!(
1762 old_text.len() as isize + delta,
1763 new_text.len() as isize,
1764 "invalid edit"
1765 );
1766
1767 edits
1768 }
1769
1770 pub fn check_invariants(&self) {
1771 // Ensure every fragment is ordered by locator in the fragment tree and corresponds
1772 // to an insertion fragment in the insertions tree.
1773 let mut prev_fragment_id = Locator::min();
1774 for fragment in self.snapshot.fragments.items(&None) {
1775 assert!(fragment.id > prev_fragment_id);
1776 prev_fragment_id = fragment.id.clone();
1777
1778 let insertion_fragment = self
1779 .snapshot
1780 .insertions
1781 .get(
1782 &InsertionFragmentKey {
1783 timestamp: fragment.timestamp,
1784 split_offset: fragment.insertion_offset,
1785 },
1786 (),
1787 )
1788 .unwrap();
1789 assert_eq!(
1790 insertion_fragment.fragment_id, fragment.id,
1791 "fragment: {:?}\ninsertion: {:?}",
1792 fragment, insertion_fragment
1793 );
1794 }
1795
1796 let mut cursor = self.snapshot.fragments.cursor::<Option<&Locator>>(&None);
1797 for insertion_fragment in self.snapshot.insertions.cursor::<()>(()) {
1798 cursor.seek(&Some(&insertion_fragment.fragment_id), Bias::Left);
1799 let fragment = cursor.item().unwrap();
1800 assert_eq!(insertion_fragment.fragment_id, fragment.id);
1801 assert_eq!(insertion_fragment.split_offset, fragment.insertion_offset);
1802 }
1803
1804 let fragment_summary = self.snapshot.fragments.summary();
1805 assert_eq!(
1806 fragment_summary.text.visible,
1807 self.snapshot.visible_text.len()
1808 );
1809 assert_eq!(
1810 fragment_summary.text.deleted,
1811 self.snapshot.deleted_text.len()
1812 );
1813
1814 assert!(!self.text().contains("\r\n"));
1815 }
1816
1817 pub fn set_group_interval(&mut self, group_interval: Duration) {
1818 self.history.group_interval = group_interval;
1819 }
1820
1821 pub fn random_byte_range(&self, start_offset: usize, rng: &mut impl rand::Rng) -> Range<usize> {
1822 let end = self.clip_offset(rng.random_range(start_offset..=self.len()), Bias::Right);
1823 let start = self.clip_offset(rng.random_range(start_offset..=end), Bias::Right);
1824 start..end
1825 }
1826
1827 pub fn get_random_edits<T>(
1828 &self,
1829 rng: &mut T,
1830 edit_count: usize,
1831 ) -> Vec<(Range<usize>, Arc<str>)>
1832 where
1833 T: rand::Rng,
1834 {
1835 let mut edits: Vec<(Range<usize>, Arc<str>)> = Vec::new();
1836 let mut last_end = None;
1837 for _ in 0..edit_count {
1838 if last_end.is_some_and(|last_end| last_end >= self.len()) {
1839 break;
1840 }
1841 let new_start = last_end.map_or(0, |last_end| last_end + 1);
1842 let range = self.random_byte_range(new_start, rng);
1843 last_end = Some(range.end);
1844
1845 let new_text_len = rng.random_range(0..10);
1846 let new_text: String = RandomCharIter::new(&mut *rng).take(new_text_len).collect();
1847
1848 edits.push((range, new_text.into()));
1849 }
1850 edits
1851 }
1852
1853 pub fn randomly_edit<T>(
1854 &mut self,
1855 rng: &mut T,
1856 edit_count: usize,
1857 ) -> (Vec<(Range<usize>, Arc<str>)>, Operation)
1858 where
1859 T: rand::Rng,
1860 {
1861 let mut edits = self.get_random_edits(rng, edit_count);
1862 log::info!("mutating buffer {:?} with {:?}", self.replica_id, edits);
1863
1864 let op = self.edit(edits.iter().cloned());
1865 if let Operation::Edit(edit) = &op {
1866 assert_eq!(edits.len(), edit.new_text.len());
1867 for (edit, new_text) in edits.iter_mut().zip(&edit.new_text) {
1868 edit.1 = new_text.clone();
1869 }
1870 } else {
1871 unreachable!()
1872 }
1873
1874 (edits, op)
1875 }
1876
1877 pub fn randomly_undo_redo(&mut self, rng: &mut impl rand::Rng) -> Vec<Operation> {
1878 use rand::prelude::*;
1879
1880 let mut ops = Vec::new();
1881 for _ in 0..rng.random_range(1..=5) {
1882 if let Some(entry) = self.history.undo_stack.choose(rng) {
1883 let transaction = entry.transaction.clone();
1884 log::info!(
1885 "undoing buffer {:?} transaction {:?}",
1886 self.replica_id,
1887 transaction
1888 );
1889 ops.push(self.undo_or_redo(transaction));
1890 }
1891 }
1892 ops
1893 }
1894}
1895
1896impl Deref for Buffer {
1897 type Target = BufferSnapshot;
1898
1899 fn deref(&self) -> &Self::Target {
1900 &self.snapshot
1901 }
1902}
1903
1904impl BufferSnapshot {
1905 pub fn as_rope(&self) -> &Rope {
1906 &self.visible_text
1907 }
1908
1909 pub fn rope_for_version(&self, version: &clock::Global) -> Rope {
1910 let mut rope = Rope::new();
1911
1912 let mut cursor = self
1913 .fragments
1914 .filter::<_, FragmentTextSummary>(&None, move |summary| {
1915 !version.observed_all(&summary.max_version)
1916 });
1917 cursor.next();
1918
1919 let mut visible_cursor = self.visible_text.cursor(0);
1920 let mut deleted_cursor = self.deleted_text.cursor(0);
1921
1922 while let Some(fragment) = cursor.item() {
1923 if cursor.start().visible > visible_cursor.offset() {
1924 let text = visible_cursor.slice(cursor.start().visible);
1925 rope.append(text);
1926 }
1927
1928 if fragment.was_visible(version, &self.undo_map) {
1929 if fragment.visible {
1930 let text = visible_cursor.slice(cursor.end().visible);
1931 rope.append(text);
1932 } else {
1933 deleted_cursor.seek_forward(cursor.start().deleted);
1934 let text = deleted_cursor.slice(cursor.end().deleted);
1935 rope.append(text);
1936 }
1937 } else if fragment.visible {
1938 visible_cursor.seek_forward(cursor.end().visible);
1939 }
1940
1941 cursor.next();
1942 }
1943
1944 if cursor.start().visible > visible_cursor.offset() {
1945 let text = visible_cursor.slice(cursor.start().visible);
1946 rope.append(text);
1947 }
1948
1949 rope
1950 }
1951
1952 pub fn remote_id(&self) -> BufferId {
1953 self.remote_id
1954 }
1955
1956 pub fn replica_id(&self) -> ReplicaId {
1957 self.replica_id
1958 }
1959
1960 pub fn row_count(&self) -> u32 {
1961 self.max_point().row + 1
1962 }
1963
1964 pub fn len(&self) -> usize {
1965 self.visible_text.len()
1966 }
1967
1968 pub fn is_empty(&self) -> bool {
1969 self.len() == 0
1970 }
1971
1972 pub fn chars(&self) -> impl Iterator<Item = char> + '_ {
1973 self.chars_at(0)
1974 }
1975
1976 pub fn chars_for_range<T: ToOffset>(&self, range: Range<T>) -> impl Iterator<Item = char> + '_ {
1977 self.text_for_range(range).flat_map(str::chars)
1978 }
1979
1980 pub fn reversed_chars_for_range<T: ToOffset>(
1981 &self,
1982 range: Range<T>,
1983 ) -> impl Iterator<Item = char> + '_ {
1984 self.reversed_chunks_in_range(range)
1985 .flat_map(|chunk| chunk.chars().rev())
1986 }
1987
1988 pub fn contains_str_at<T>(&self, position: T, needle: &str) -> bool
1989 where
1990 T: ToOffset,
1991 {
1992 let position = position.to_offset(self);
1993 position == self.clip_offset(position, Bias::Left)
1994 && self
1995 .bytes_in_range(position..self.len())
1996 .flatten()
1997 .copied()
1998 .take(needle.len())
1999 .eq(needle.bytes())
2000 }
2001
2002 pub fn common_prefix_at<T>(&self, position: T, needle: &str) -> Range<T>
2003 where
2004 T: ToOffset + TextDimension,
2005 {
2006 let offset = position.to_offset(self);
2007 let common_prefix_len = needle
2008 .char_indices()
2009 .map(|(index, _)| index)
2010 .chain([needle.len()])
2011 .take_while(|&len| len <= offset)
2012 .filter(|&len| {
2013 let left = self
2014 .chars_for_range(offset - len..offset)
2015 .flat_map(char::to_lowercase);
2016 let right = needle[..len].chars().flat_map(char::to_lowercase);
2017 left.eq(right)
2018 })
2019 .last()
2020 .unwrap_or(0);
2021 let start_offset = offset - common_prefix_len;
2022 let start = self.text_summary_for_range(0..start_offset);
2023 start..position
2024 }
2025
2026 pub fn text(&self) -> String {
2027 self.visible_text.to_string()
2028 }
2029
2030 pub fn line_ending(&self) -> LineEnding {
2031 self.line_ending
2032 }
2033
2034 pub fn deleted_text(&self) -> String {
2035 self.deleted_text.to_string()
2036 }
2037
2038 pub fn fragments(&self) -> impl Iterator<Item = &Fragment> {
2039 self.fragments.iter()
2040 }
2041
2042 pub fn text_summary(&self) -> TextSummary {
2043 self.visible_text.summary()
2044 }
2045
2046 pub fn max_point(&self) -> Point {
2047 self.visible_text.max_point()
2048 }
2049
2050 pub fn max_point_utf16(&self) -> PointUtf16 {
2051 self.visible_text.max_point_utf16()
2052 }
2053
2054 pub fn point_to_offset(&self, point: Point) -> usize {
2055 self.visible_text.point_to_offset(point)
2056 }
2057
2058 pub fn point_to_offset_utf16(&self, point: Point) -> OffsetUtf16 {
2059 self.visible_text.point_to_offset_utf16(point)
2060 }
2061
2062 pub fn point_utf16_to_offset_utf16(&self, point: PointUtf16) -> OffsetUtf16 {
2063 self.visible_text.point_utf16_to_offset_utf16(point)
2064 }
2065
2066 pub fn point_utf16_to_offset(&self, point: PointUtf16) -> usize {
2067 self.visible_text.point_utf16_to_offset(point)
2068 }
2069
2070 pub fn unclipped_point_utf16_to_offset(&self, point: Unclipped<PointUtf16>) -> usize {
2071 self.visible_text.unclipped_point_utf16_to_offset(point)
2072 }
2073
2074 pub fn unclipped_point_utf16_to_point(&self, point: Unclipped<PointUtf16>) -> Point {
2075 self.visible_text.unclipped_point_utf16_to_point(point)
2076 }
2077
2078 pub fn offset_utf16_to_offset(&self, offset: OffsetUtf16) -> usize {
2079 self.visible_text.offset_utf16_to_offset(offset)
2080 }
2081
2082 pub fn offset_to_offset_utf16(&self, offset: usize) -> OffsetUtf16 {
2083 self.visible_text.offset_to_offset_utf16(offset)
2084 }
2085
2086 pub fn offset_to_point(&self, offset: usize) -> Point {
2087 self.visible_text.offset_to_point(offset)
2088 }
2089
2090 pub fn offset_to_point_utf16(&self, offset: usize) -> PointUtf16 {
2091 self.visible_text.offset_to_point_utf16(offset)
2092 }
2093
2094 pub fn point_to_point_utf16(&self, point: Point) -> PointUtf16 {
2095 self.visible_text.point_to_point_utf16(point)
2096 }
2097
2098 pub fn point_utf16_to_point(&self, point: PointUtf16) -> Point {
2099 self.visible_text.point_utf16_to_point(point)
2100 }
2101
2102 pub fn version(&self) -> &clock::Global {
2103 &self.version
2104 }
2105
2106 pub fn chars_at<T: ToOffset>(&self, position: T) -> impl Iterator<Item = char> + '_ {
2107 let offset = position.to_offset(self);
2108 self.visible_text.chars_at(offset)
2109 }
2110
2111 pub fn reversed_chars_at<T: ToOffset>(&self, position: T) -> impl Iterator<Item = char> + '_ {
2112 let offset = position.to_offset(self);
2113 self.visible_text.reversed_chars_at(offset)
2114 }
2115
2116 pub fn reversed_chunks_in_range<T: ToOffset>(&self, range: Range<T>) -> rope::Chunks<'_> {
2117 let range = range.start.to_offset(self)..range.end.to_offset(self);
2118 self.visible_text.reversed_chunks_in_range(range)
2119 }
2120
2121 pub fn bytes_in_range<T: ToOffset>(&self, range: Range<T>) -> rope::Bytes<'_> {
2122 let start = range.start.to_offset(self);
2123 let end = range.end.to_offset(self);
2124 self.visible_text.bytes_in_range(start..end)
2125 }
2126
2127 pub fn reversed_bytes_in_range<T: ToOffset>(&self, range: Range<T>) -> rope::Bytes<'_> {
2128 let start = range.start.to_offset(self);
2129 let end = range.end.to_offset(self);
2130 self.visible_text.reversed_bytes_in_range(start..end)
2131 }
2132
2133 pub fn text_for_range<T: ToOffset>(&self, range: Range<T>) -> Chunks<'_> {
2134 let start = range.start.to_offset(self);
2135 let end = range.end.to_offset(self);
2136 self.visible_text.chunks_in_range(start..end)
2137 }
2138
2139 pub fn line_len(&self, row: u32) -> u32 {
2140 let row_start_offset = Point::new(row, 0).to_offset(self);
2141 let row_end_offset = if row >= self.max_point().row {
2142 self.len()
2143 } else {
2144 Point::new(row + 1, 0).to_previous_offset(self)
2145 };
2146 (row_end_offset - row_start_offset) as u32
2147 }
2148
2149 pub fn line_indents_in_row_range(
2150 &self,
2151 row_range: Range<u32>,
2152 ) -> impl Iterator<Item = (u32, LineIndent)> + '_ {
2153 let start = Point::new(row_range.start, 0).to_offset(self);
2154 let end = Point::new(row_range.end, self.line_len(row_range.end)).to_offset(self);
2155
2156 let mut chunks = self.as_rope().chunks_in_range(start..end);
2157 let mut row = row_range.start;
2158 let mut done = false;
2159 std::iter::from_fn(move || {
2160 if done {
2161 None
2162 } else {
2163 let indent = (row, LineIndent::from_chunks(&mut chunks));
2164 done = !chunks.next_line();
2165 row += 1;
2166 Some(indent)
2167 }
2168 })
2169 }
2170
2171 /// Returns the line indents in the given row range, exclusive of end row, in reversed order.
2172 pub fn reversed_line_indents_in_row_range(
2173 &self,
2174 row_range: Range<u32>,
2175 ) -> impl Iterator<Item = (u32, LineIndent)> + '_ {
2176 let start = Point::new(row_range.start, 0).to_offset(self);
2177
2178 let end_point;
2179 let end;
2180 if row_range.end > row_range.start {
2181 end_point = Point::new(row_range.end - 1, self.line_len(row_range.end - 1));
2182 end = end_point.to_offset(self);
2183 } else {
2184 end_point = Point::new(row_range.start, 0);
2185 end = start;
2186 };
2187
2188 let mut chunks = self.as_rope().chunks_in_range(start..end);
2189 // Move the cursor to the start of the last line if it's not empty.
2190 chunks.seek(end);
2191 if end_point.column > 0 {
2192 chunks.prev_line();
2193 }
2194
2195 let mut row = end_point.row;
2196 let mut done = false;
2197 std::iter::from_fn(move || {
2198 if done {
2199 None
2200 } else {
2201 let initial_offset = chunks.offset();
2202 let indent = (row, LineIndent::from_chunks(&mut chunks));
2203 if chunks.offset() > initial_offset {
2204 chunks.prev_line();
2205 }
2206 done = !chunks.prev_line();
2207 if !done {
2208 row -= 1;
2209 }
2210
2211 Some(indent)
2212 }
2213 })
2214 }
2215
2216 pub fn line_indent_for_row(&self, row: u32) -> LineIndent {
2217 LineIndent::from_iter(self.chars_at(Point::new(row, 0)))
2218 }
2219
2220 pub fn is_line_blank(&self, row: u32) -> bool {
2221 self.text_for_range(Point::new(row, 0)..Point::new(row, self.line_len(row)))
2222 .all(|chunk| chunk.matches(|c: char| !c.is_whitespace()).next().is_none())
2223 }
2224
2225 pub fn text_summary_for_range<D, O: ToOffset>(&self, range: Range<O>) -> D
2226 where
2227 D: TextDimension,
2228 {
2229 self.visible_text
2230 .cursor(range.start.to_offset(self))
2231 .summary(range.end.to_offset(self))
2232 }
2233
2234 pub fn summaries_for_anchors<'a, D, A>(&'a self, anchors: A) -> impl 'a + Iterator<Item = D>
2235 where
2236 D: 'a + TextDimension,
2237 A: 'a + IntoIterator<Item = &'a Anchor>,
2238 {
2239 let anchors = anchors.into_iter();
2240 self.summaries_for_anchors_with_payload::<D, _, ()>(anchors.map(|a| (a, ())))
2241 .map(|d| d.0)
2242 }
2243
2244 pub fn summaries_for_anchors_with_payload<'a, D, A, T>(
2245 &'a self,
2246 anchors: A,
2247 ) -> impl 'a + Iterator<Item = (D, T)>
2248 where
2249 D: 'a + TextDimension,
2250 A: 'a + IntoIterator<Item = (&'a Anchor, T)>,
2251 {
2252 let anchors = anchors.into_iter();
2253 let mut insertion_cursor = self.insertions.cursor::<InsertionFragmentKey>(());
2254 let mut fragment_cursor = self
2255 .fragments
2256 .cursor::<Dimensions<Option<&Locator>, usize>>(&None);
2257 let mut text_cursor = self.visible_text.cursor(0);
2258 let mut position = D::zero(());
2259
2260 anchors.map(move |(anchor, payload)| {
2261 if *anchor == Anchor::MIN {
2262 return (D::zero(()), payload);
2263 } else if *anchor == Anchor::MAX {
2264 return (D::from_text_summary(&self.visible_text.summary()), payload);
2265 }
2266
2267 let anchor_key = InsertionFragmentKey {
2268 timestamp: anchor.timestamp,
2269 split_offset: anchor.offset,
2270 };
2271 insertion_cursor.seek(&anchor_key, anchor.bias);
2272 if let Some(insertion) = insertion_cursor.item() {
2273 let comparison = sum_tree::KeyedItem::key(insertion).cmp(&anchor_key);
2274 if comparison == Ordering::Greater
2275 || (anchor.bias == Bias::Left
2276 && comparison == Ordering::Equal
2277 && anchor.offset > 0)
2278 {
2279 insertion_cursor.prev();
2280 }
2281 } else {
2282 insertion_cursor.prev();
2283 }
2284 let insertion = insertion_cursor.item().expect("invalid insertion");
2285 assert_eq!(
2286 insertion.timestamp,
2287 anchor.timestamp,
2288 "invalid insertion for buffer {} with anchor {:?}",
2289 self.remote_id(),
2290 anchor
2291 );
2292
2293 fragment_cursor.seek_forward(&Some(&insertion.fragment_id), Bias::Left);
2294 let fragment = fragment_cursor.item().unwrap();
2295 let mut fragment_offset = fragment_cursor.start().1;
2296 if fragment.visible {
2297 fragment_offset += anchor.offset - insertion.split_offset;
2298 }
2299
2300 position.add_assign(&text_cursor.summary(fragment_offset));
2301 (position, payload)
2302 })
2303 }
2304
2305 pub fn summary_for_anchor<D>(&self, anchor: &Anchor) -> D
2306 where
2307 D: TextDimension,
2308 {
2309 self.text_summary_for_range(0..self.offset_for_anchor(anchor))
2310 }
2311
2312 pub fn offset_for_anchor(&self, anchor: &Anchor) -> usize {
2313 if *anchor == Anchor::MIN {
2314 0
2315 } else if *anchor == Anchor::MAX {
2316 self.visible_text.len()
2317 } else {
2318 debug_assert!(anchor.buffer_id == Some(self.remote_id));
2319 debug_assert!(self.version.observed(anchor.timestamp));
2320 let anchor_key = InsertionFragmentKey {
2321 timestamp: anchor.timestamp,
2322 split_offset: anchor.offset,
2323 };
2324 let mut insertion_cursor = self.insertions.cursor::<InsertionFragmentKey>(());
2325 insertion_cursor.seek(&anchor_key, anchor.bias);
2326 if let Some(insertion) = insertion_cursor.item() {
2327 let comparison = sum_tree::KeyedItem::key(insertion).cmp(&anchor_key);
2328 if comparison == Ordering::Greater
2329 || (anchor.bias == Bias::Left
2330 && comparison == Ordering::Equal
2331 && anchor.offset > 0)
2332 {
2333 insertion_cursor.prev();
2334 }
2335 } else {
2336 insertion_cursor.prev();
2337 }
2338
2339 let Some(insertion) = insertion_cursor
2340 .item()
2341 .filter(|insertion| insertion.timestamp == anchor.timestamp)
2342 else {
2343 self.panic_bad_anchor(anchor);
2344 };
2345
2346 let (start, _, item) = self
2347 .fragments
2348 .find::<Dimensions<Option<&Locator>, usize>, _>(
2349 &None,
2350 &Some(&insertion.fragment_id),
2351 Bias::Left,
2352 );
2353 let fragment = item.unwrap();
2354 let mut fragment_offset = start.1;
2355 if fragment.visible {
2356 fragment_offset += anchor.offset - insertion.split_offset;
2357 }
2358 fragment_offset
2359 }
2360 }
2361
2362 #[cold]
2363 fn panic_bad_anchor(&self, anchor: &Anchor) -> ! {
2364 if anchor.buffer_id.is_some_and(|id| id != self.remote_id) {
2365 panic!(
2366 "invalid anchor - buffer id does not match: anchor {anchor:?}; buffer id: {}, version: {:?}",
2367 self.remote_id, self.version
2368 );
2369 } else if !self.version.observed(anchor.timestamp) {
2370 panic!(
2371 "invalid anchor - snapshot has not observed lamport: {:?}; version: {:?}",
2372 anchor, self.version
2373 );
2374 } else {
2375 panic!(
2376 "invalid anchor {:?}. buffer id: {}, version: {:?}",
2377 anchor, self.remote_id, self.version
2378 );
2379 }
2380 }
2381
2382 fn fragment_id_for_anchor(&self, anchor: &Anchor) -> &Locator {
2383 self.try_fragment_id_for_anchor(anchor)
2384 .unwrap_or_else(|| self.panic_bad_anchor(anchor))
2385 }
2386
2387 fn try_fragment_id_for_anchor(&self, anchor: &Anchor) -> Option<&Locator> {
2388 if *anchor == Anchor::MIN {
2389 Some(Locator::min_ref())
2390 } else if *anchor == Anchor::MAX {
2391 Some(Locator::max_ref())
2392 } else {
2393 let anchor_key = InsertionFragmentKey {
2394 timestamp: anchor.timestamp,
2395 split_offset: anchor.offset,
2396 };
2397 let mut insertion_cursor = self.insertions.cursor::<InsertionFragmentKey>(());
2398 insertion_cursor.seek(&anchor_key, anchor.bias);
2399 if let Some(insertion) = insertion_cursor.item() {
2400 let comparison = sum_tree::KeyedItem::key(insertion).cmp(&anchor_key);
2401 if comparison == Ordering::Greater
2402 || (anchor.bias == Bias::Left
2403 && comparison == Ordering::Equal
2404 && anchor.offset > 0)
2405 {
2406 insertion_cursor.prev();
2407 }
2408 } else {
2409 insertion_cursor.prev();
2410 }
2411
2412 insertion_cursor
2413 .item()
2414 .filter(|insertion| {
2415 !cfg!(debug_assertions) || insertion.timestamp == anchor.timestamp
2416 })
2417 .map(|insertion| &insertion.fragment_id)
2418 }
2419 }
2420
2421 pub fn anchor_before<T: ToOffset>(&self, position: T) -> Anchor {
2422 self.anchor_at(position, Bias::Left)
2423 }
2424
2425 pub fn anchor_after<T: ToOffset>(&self, position: T) -> Anchor {
2426 self.anchor_at(position, Bias::Right)
2427 }
2428
2429 pub fn anchor_at<T: ToOffset>(&self, position: T, bias: Bias) -> Anchor {
2430 self.anchor_at_offset(position.to_offset(self), bias)
2431 }
2432
2433 fn anchor_at_offset(&self, offset: usize, bias: Bias) -> Anchor {
2434 if bias == Bias::Left && offset == 0 {
2435 Anchor::MIN
2436 } else if bias == Bias::Right && offset == self.len() {
2437 Anchor::MAX
2438 } else {
2439 if offset > self.visible_text.len() {
2440 panic!("offset {} is out of bounds", offset)
2441 }
2442 self.visible_text.assert_char_boundary(offset);
2443 let (start, _, item) = self.fragments.find::<usize, _>(&None, &offset, bias);
2444 let fragment = item.unwrap();
2445 let overshoot = offset - start;
2446 Anchor {
2447 timestamp: fragment.timestamp,
2448 offset: fragment.insertion_offset + overshoot,
2449 bias,
2450 buffer_id: Some(self.remote_id),
2451 }
2452 }
2453 }
2454
2455 pub fn can_resolve(&self, anchor: &Anchor) -> bool {
2456 *anchor == Anchor::MIN
2457 || *anchor == Anchor::MAX
2458 || (Some(self.remote_id) == anchor.buffer_id && self.version.observed(anchor.timestamp))
2459 }
2460
2461 pub fn clip_offset(&self, offset: usize, bias: Bias) -> usize {
2462 self.visible_text.clip_offset(offset, bias)
2463 }
2464
2465 pub fn clip_point(&self, point: Point, bias: Bias) -> Point {
2466 self.visible_text.clip_point(point, bias)
2467 }
2468
2469 pub fn clip_offset_utf16(&self, offset: OffsetUtf16, bias: Bias) -> OffsetUtf16 {
2470 self.visible_text.clip_offset_utf16(offset, bias)
2471 }
2472
2473 pub fn clip_point_utf16(&self, point: Unclipped<PointUtf16>, bias: Bias) -> PointUtf16 {
2474 self.visible_text.clip_point_utf16(point, bias)
2475 }
2476
2477 pub fn edits_since<'a, D>(
2478 &'a self,
2479 since: &'a clock::Global,
2480 ) -> impl 'a + Iterator<Item = Edit<D>>
2481 where
2482 D: TextDimension + Ord,
2483 {
2484 self.edits_since_in_range(since, Anchor::MIN..Anchor::MAX)
2485 }
2486
2487 pub fn anchored_edits_since<'a, D>(
2488 &'a self,
2489 since: &'a clock::Global,
2490 ) -> impl 'a + Iterator<Item = (Edit<D>, Range<Anchor>)>
2491 where
2492 D: TextDimension + Ord,
2493 {
2494 self.anchored_edits_since_in_range(since, Anchor::MIN..Anchor::MAX)
2495 }
2496
2497 pub fn edits_since_in_range<'a, D>(
2498 &'a self,
2499 since: &'a clock::Global,
2500 range: Range<Anchor>,
2501 ) -> impl 'a + Iterator<Item = Edit<D>>
2502 where
2503 D: TextDimension + Ord,
2504 {
2505 self.anchored_edits_since_in_range(since, range)
2506 .map(|item| item.0)
2507 }
2508
2509 pub fn anchored_edits_since_in_range<'a, D>(
2510 &'a self,
2511 since: &'a clock::Global,
2512 range: Range<Anchor>,
2513 ) -> impl 'a + Iterator<Item = (Edit<D>, Range<Anchor>)>
2514 where
2515 D: TextDimension + Ord,
2516 {
2517 let fragments_cursor = if *since == self.version {
2518 None
2519 } else {
2520 let mut cursor = self.fragments.filter(&None, move |summary| {
2521 !since.observed_all(&summary.max_version)
2522 });
2523 cursor.next();
2524 Some(cursor)
2525 };
2526 let start_fragment_id = self.fragment_id_for_anchor(&range.start);
2527 let (start, _, item) = self
2528 .fragments
2529 .find::<Dimensions<Option<&Locator>, FragmentTextSummary>, _>(
2530 &None,
2531 &Some(start_fragment_id),
2532 Bias::Left,
2533 );
2534 let mut visible_start = start.1.visible;
2535 let mut deleted_start = start.1.deleted;
2536 if let Some(fragment) = item {
2537 let overshoot = range.start.offset - fragment.insertion_offset;
2538 if fragment.visible {
2539 visible_start += overshoot;
2540 } else {
2541 deleted_start += overshoot;
2542 }
2543 }
2544 let end_fragment_id = self.fragment_id_for_anchor(&range.end);
2545
2546 Edits {
2547 visible_cursor: self.visible_text.cursor(visible_start),
2548 deleted_cursor: self.deleted_text.cursor(deleted_start),
2549 fragments_cursor,
2550 undos: &self.undo_map,
2551 since,
2552 old_end: D::zero(()),
2553 new_end: D::zero(()),
2554 range: (start_fragment_id, range.start.offset)..(end_fragment_id, range.end.offset),
2555 buffer_id: self.remote_id,
2556 }
2557 }
2558
2559 pub fn has_edits_since_in_range(&self, since: &clock::Global, range: Range<Anchor>) -> bool {
2560 if *since != self.version {
2561 let start_fragment_id = self.fragment_id_for_anchor(&range.start);
2562 let end_fragment_id = self.fragment_id_for_anchor(&range.end);
2563 let mut cursor = self.fragments.filter::<_, usize>(&None, move |summary| {
2564 !since.observed_all(&summary.max_version)
2565 });
2566 cursor.next();
2567 while let Some(fragment) = cursor.item() {
2568 if fragment.id > *end_fragment_id {
2569 break;
2570 }
2571 if fragment.id > *start_fragment_id {
2572 let was_visible = fragment.was_visible(since, &self.undo_map);
2573 let is_visible = fragment.visible;
2574 if was_visible != is_visible {
2575 return true;
2576 }
2577 }
2578 cursor.next();
2579 }
2580 }
2581 false
2582 }
2583
2584 pub fn has_edits_since(&self, since: &clock::Global) -> bool {
2585 if *since != self.version {
2586 let mut cursor = self.fragments.filter::<_, usize>(&None, move |summary| {
2587 !since.observed_all(&summary.max_version)
2588 });
2589 cursor.next();
2590 while let Some(fragment) = cursor.item() {
2591 let was_visible = fragment.was_visible(since, &self.undo_map);
2592 let is_visible = fragment.visible;
2593 if was_visible != is_visible {
2594 return true;
2595 }
2596 cursor.next();
2597 }
2598 }
2599 false
2600 }
2601
2602 pub fn range_to_version(&self, range: Range<usize>, version: &clock::Global) -> Range<usize> {
2603 let mut offsets = self.offsets_to_version([range.start, range.end], version);
2604 offsets.next().unwrap()..offsets.next().unwrap()
2605 }
2606
2607 /// Converts the given sequence of offsets into their corresponding offsets
2608 /// at a prior version of this buffer.
2609 pub fn offsets_to_version<'a>(
2610 &'a self,
2611 offsets: impl 'a + IntoIterator<Item = usize>,
2612 version: &'a clock::Global,
2613 ) -> impl 'a + Iterator<Item = usize> {
2614 let mut edits = self.edits_since(version).peekable();
2615 let mut last_old_end = 0;
2616 let mut last_new_end = 0;
2617 offsets.into_iter().map(move |new_offset| {
2618 while let Some(edit) = edits.peek() {
2619 if edit.new.start > new_offset {
2620 break;
2621 }
2622
2623 if edit.new.end <= new_offset {
2624 last_new_end = edit.new.end;
2625 last_old_end = edit.old.end;
2626 edits.next();
2627 continue;
2628 }
2629
2630 let overshoot = new_offset - edit.new.start;
2631 return (edit.old.start + overshoot).min(edit.old.end);
2632 }
2633
2634 last_old_end + new_offset.saturating_sub(last_new_end)
2635 })
2636 }
2637
2638 /// Visually annotates a position or range with the `Debug` representation of a value. The
2639 /// callsite of this function is used as a key - previous annotations will be removed.
2640 #[cfg(debug_assertions)]
2641 #[track_caller]
2642 pub fn debug<R, V>(&self, ranges: &R, value: V)
2643 where
2644 R: debug::ToDebugRanges,
2645 V: std::fmt::Debug,
2646 {
2647 self.debug_with_key(std::panic::Location::caller(), ranges, value);
2648 }
2649
2650 /// Visually annotates a position or range with the `Debug` representation of a value. Previous
2651 /// debug annotations with the same key will be removed. The key is also used to determine the
2652 /// annotation's color.
2653 #[cfg(debug_assertions)]
2654 pub fn debug_with_key<K, R, V>(&self, key: &K, ranges: &R, value: V)
2655 where
2656 K: std::hash::Hash + 'static,
2657 R: debug::ToDebugRanges,
2658 V: std::fmt::Debug,
2659 {
2660 let ranges = ranges
2661 .to_debug_ranges(self)
2662 .into_iter()
2663 .map(|range| self.anchor_after(range.start)..self.anchor_before(range.end))
2664 .collect();
2665 debug::GlobalDebugRanges::with_locked(|debug_ranges| {
2666 debug_ranges.insert(key, ranges, format!("{value:?}").into());
2667 });
2668 }
2669}
2670
2671struct RopeBuilder<'a> {
2672 old_visible_cursor: rope::Cursor<'a>,
2673 old_deleted_cursor: rope::Cursor<'a>,
2674 new_visible: Rope,
2675 new_deleted: Rope,
2676}
2677
2678impl<'a> RopeBuilder<'a> {
2679 fn new(old_visible_cursor: rope::Cursor<'a>, old_deleted_cursor: rope::Cursor<'a>) -> Self {
2680 Self {
2681 old_visible_cursor,
2682 old_deleted_cursor,
2683 new_visible: Rope::new(),
2684 new_deleted: Rope::new(),
2685 }
2686 }
2687
2688 fn append(&mut self, len: FragmentTextSummary) {
2689 self.push(len.visible, true, true);
2690 self.push(len.deleted, false, false);
2691 }
2692
2693 fn push_fragment(&mut self, fragment: &Fragment, was_visible: bool) {
2694 debug_assert!(fragment.len > 0);
2695 self.push(fragment.len, was_visible, fragment.visible)
2696 }
2697
2698 fn push(&mut self, len: usize, was_visible: bool, is_visible: bool) {
2699 let text = if was_visible {
2700 self.old_visible_cursor
2701 .slice(self.old_visible_cursor.offset() + len)
2702 } else {
2703 self.old_deleted_cursor
2704 .slice(self.old_deleted_cursor.offset() + len)
2705 };
2706 if is_visible {
2707 self.new_visible.append(text);
2708 } else {
2709 self.new_deleted.append(text);
2710 }
2711 }
2712
2713 fn push_str(&mut self, text: &str) {
2714 self.new_visible.push(text);
2715 }
2716
2717 fn finish(mut self) -> (Rope, Rope) {
2718 self.new_visible.append(self.old_visible_cursor.suffix());
2719 self.new_deleted.append(self.old_deleted_cursor.suffix());
2720 (self.new_visible, self.new_deleted)
2721 }
2722}
2723
2724impl<D: TextDimension + Ord, F: FnMut(&FragmentSummary) -> bool> Iterator for Edits<'_, D, F> {
2725 type Item = (Edit<D>, Range<Anchor>);
2726
2727 fn next(&mut self) -> Option<Self::Item> {
2728 let mut pending_edit: Option<Self::Item> = None;
2729 let cursor = self.fragments_cursor.as_mut()?;
2730
2731 while let Some(fragment) = cursor.item() {
2732 if fragment.id < *self.range.start.0 {
2733 cursor.next();
2734 continue;
2735 } else if fragment.id > *self.range.end.0 {
2736 break;
2737 }
2738
2739 if cursor.start().visible > self.visible_cursor.offset() {
2740 let summary = self.visible_cursor.summary(cursor.start().visible);
2741 self.old_end.add_assign(&summary);
2742 self.new_end.add_assign(&summary);
2743 }
2744
2745 if pending_edit
2746 .as_ref()
2747 .is_some_and(|(change, _)| change.new.end < self.new_end)
2748 {
2749 break;
2750 }
2751
2752 let start_anchor = Anchor {
2753 timestamp: fragment.timestamp,
2754 offset: fragment.insertion_offset,
2755 bias: Bias::Right,
2756 buffer_id: Some(self.buffer_id),
2757 };
2758 let end_anchor = Anchor {
2759 timestamp: fragment.timestamp,
2760 offset: fragment.insertion_offset + fragment.len,
2761 bias: Bias::Left,
2762 buffer_id: Some(self.buffer_id),
2763 };
2764
2765 if !fragment.was_visible(self.since, self.undos) && fragment.visible {
2766 let mut visible_end = cursor.end().visible;
2767 if fragment.id == *self.range.end.0 {
2768 visible_end = cmp::min(
2769 visible_end,
2770 cursor.start().visible + (self.range.end.1 - fragment.insertion_offset),
2771 );
2772 }
2773
2774 let fragment_summary = self.visible_cursor.summary(visible_end);
2775 let mut new_end = self.new_end;
2776 new_end.add_assign(&fragment_summary);
2777 if let Some((edit, range)) = pending_edit.as_mut() {
2778 edit.new.end = new_end;
2779 range.end = end_anchor;
2780 } else {
2781 pending_edit = Some((
2782 Edit {
2783 old: self.old_end..self.old_end,
2784 new: self.new_end..new_end,
2785 },
2786 start_anchor..end_anchor,
2787 ));
2788 }
2789
2790 self.new_end = new_end;
2791 } else if fragment.was_visible(self.since, self.undos) && !fragment.visible {
2792 let mut deleted_end = cursor.end().deleted;
2793 if fragment.id == *self.range.end.0 {
2794 deleted_end = cmp::min(
2795 deleted_end,
2796 cursor.start().deleted + (self.range.end.1 - fragment.insertion_offset),
2797 );
2798 }
2799
2800 if cursor.start().deleted > self.deleted_cursor.offset() {
2801 self.deleted_cursor.seek_forward(cursor.start().deleted);
2802 }
2803 let fragment_summary = self.deleted_cursor.summary(deleted_end);
2804 let mut old_end = self.old_end;
2805 old_end.add_assign(&fragment_summary);
2806 if let Some((edit, range)) = pending_edit.as_mut() {
2807 edit.old.end = old_end;
2808 range.end = end_anchor;
2809 } else {
2810 pending_edit = Some((
2811 Edit {
2812 old: self.old_end..old_end,
2813 new: self.new_end..self.new_end,
2814 },
2815 start_anchor..end_anchor,
2816 ));
2817 }
2818
2819 self.old_end = old_end;
2820 }
2821
2822 cursor.next();
2823 }
2824
2825 pending_edit
2826 }
2827}
2828
2829impl Fragment {
2830 fn is_visible(&self, undos: &UndoMap) -> bool {
2831 !undos.is_undone(self.timestamp) && self.deletions.iter().all(|d| undos.is_undone(*d))
2832 }
2833
2834 fn was_visible(&self, version: &clock::Global, undos: &UndoMap) -> bool {
2835 (version.observed(self.timestamp) && !undos.was_undone(self.timestamp, version))
2836 && self
2837 .deletions
2838 .iter()
2839 .all(|d| !version.observed(*d) || undos.was_undone(*d, version))
2840 }
2841}
2842
2843impl sum_tree::Item for Fragment {
2844 type Summary = FragmentSummary;
2845
2846 fn summary(&self, _cx: &Option<clock::Global>) -> Self::Summary {
2847 let mut max_version = clock::Global::new();
2848 max_version.observe(self.timestamp);
2849 for deletion in &self.deletions {
2850 max_version.observe(*deletion);
2851 }
2852 max_version.join(&self.max_undos);
2853
2854 let mut min_insertion_version = clock::Global::new();
2855 min_insertion_version.observe(self.timestamp);
2856 let max_insertion_version = min_insertion_version.clone();
2857 if self.visible {
2858 FragmentSummary {
2859 max_id: self.id.clone(),
2860 text: FragmentTextSummary {
2861 visible: self.len,
2862 deleted: 0,
2863 },
2864 max_version,
2865 min_insertion_version,
2866 max_insertion_version,
2867 }
2868 } else {
2869 FragmentSummary {
2870 max_id: self.id.clone(),
2871 text: FragmentTextSummary {
2872 visible: 0,
2873 deleted: self.len,
2874 },
2875 max_version,
2876 min_insertion_version,
2877 max_insertion_version,
2878 }
2879 }
2880 }
2881}
2882
2883impl sum_tree::Summary for FragmentSummary {
2884 type Context<'a> = &'a Option<clock::Global>;
2885
2886 fn zero(_cx: Self::Context<'_>) -> Self {
2887 Default::default()
2888 }
2889
2890 fn add_summary(&mut self, other: &Self, _: Self::Context<'_>) {
2891 self.max_id.assign(&other.max_id);
2892 self.text.visible += &other.text.visible;
2893 self.text.deleted += &other.text.deleted;
2894 self.max_version.join(&other.max_version);
2895 self.min_insertion_version
2896 .meet(&other.min_insertion_version);
2897 self.max_insertion_version
2898 .join(&other.max_insertion_version);
2899 }
2900}
2901
2902impl Default for FragmentSummary {
2903 fn default() -> Self {
2904 FragmentSummary {
2905 max_id: Locator::min(),
2906 text: FragmentTextSummary::default(),
2907 max_version: clock::Global::new(),
2908 min_insertion_version: clock::Global::new(),
2909 max_insertion_version: clock::Global::new(),
2910 }
2911 }
2912}
2913
2914impl sum_tree::Item for InsertionFragment {
2915 type Summary = InsertionFragmentKey;
2916
2917 fn summary(&self, _cx: ()) -> Self::Summary {
2918 InsertionFragmentKey {
2919 timestamp: self.timestamp,
2920 split_offset: self.split_offset,
2921 }
2922 }
2923}
2924
2925impl sum_tree::KeyedItem for InsertionFragment {
2926 type Key = InsertionFragmentKey;
2927
2928 fn key(&self) -> Self::Key {
2929 sum_tree::Item::summary(self, ())
2930 }
2931}
2932
2933impl InsertionFragment {
2934 fn new(fragment: &Fragment) -> Self {
2935 Self {
2936 timestamp: fragment.timestamp,
2937 split_offset: fragment.insertion_offset,
2938 fragment_id: fragment.id.clone(),
2939 }
2940 }
2941
2942 fn insert_new(fragment: &Fragment) -> sum_tree::Edit<Self> {
2943 sum_tree::Edit::Insert(Self::new(fragment))
2944 }
2945}
2946
2947impl sum_tree::ContextLessSummary for InsertionFragmentKey {
2948 fn zero() -> Self {
2949 InsertionFragmentKey {
2950 timestamp: Lamport::MIN,
2951 split_offset: 0,
2952 }
2953 }
2954
2955 fn add_summary(&mut self, summary: &Self) {
2956 *self = *summary;
2957 }
2958}
2959
2960#[derive(Copy, Clone, Debug, Default, PartialEq, Eq, PartialOrd, Ord, Hash)]
2961pub struct FullOffset(pub usize);
2962
2963impl ops::AddAssign<usize> for FullOffset {
2964 fn add_assign(&mut self, rhs: usize) {
2965 self.0 += rhs;
2966 }
2967}
2968
2969impl ops::Add<usize> for FullOffset {
2970 type Output = Self;
2971
2972 fn add(mut self, rhs: usize) -> Self::Output {
2973 self += rhs;
2974 self
2975 }
2976}
2977
2978impl ops::Sub for FullOffset {
2979 type Output = usize;
2980
2981 fn sub(self, rhs: Self) -> Self::Output {
2982 self.0 - rhs.0
2983 }
2984}
2985
2986impl sum_tree::Dimension<'_, FragmentSummary> for usize {
2987 fn zero(_: &Option<clock::Global>) -> Self {
2988 Default::default()
2989 }
2990
2991 fn add_summary(&mut self, summary: &FragmentSummary, _: &Option<clock::Global>) {
2992 *self += summary.text.visible;
2993 }
2994}
2995
2996impl sum_tree::Dimension<'_, FragmentSummary> for FullOffset {
2997 fn zero(_: &Option<clock::Global>) -> Self {
2998 Default::default()
2999 }
3000
3001 fn add_summary(&mut self, summary: &FragmentSummary, _: &Option<clock::Global>) {
3002 self.0 += summary.text.visible + summary.text.deleted;
3003 }
3004}
3005
3006impl<'a> sum_tree::Dimension<'a, FragmentSummary> for Option<&'a Locator> {
3007 fn zero(_: &Option<clock::Global>) -> Self {
3008 Default::default()
3009 }
3010
3011 fn add_summary(&mut self, summary: &'a FragmentSummary, _: &Option<clock::Global>) {
3012 *self = Some(&summary.max_id);
3013 }
3014}
3015
3016impl sum_tree::SeekTarget<'_, FragmentSummary, FragmentTextSummary> for usize {
3017 fn cmp(
3018 &self,
3019 cursor_location: &FragmentTextSummary,
3020 _: &Option<clock::Global>,
3021 ) -> cmp::Ordering {
3022 Ord::cmp(self, &cursor_location.visible)
3023 }
3024}
3025
3026#[derive(Copy, Clone, Debug, Eq, PartialEq)]
3027enum VersionedFullOffset {
3028 Offset(FullOffset),
3029 Invalid,
3030}
3031
3032impl VersionedFullOffset {
3033 fn full_offset(&self) -> FullOffset {
3034 if let Self::Offset(position) = self {
3035 *position
3036 } else {
3037 panic!("invalid version")
3038 }
3039 }
3040}
3041
3042impl Default for VersionedFullOffset {
3043 fn default() -> Self {
3044 Self::Offset(Default::default())
3045 }
3046}
3047
3048impl<'a> sum_tree::Dimension<'a, FragmentSummary> for VersionedFullOffset {
3049 fn zero(_cx: &Option<clock::Global>) -> Self {
3050 Default::default()
3051 }
3052
3053 fn add_summary(&mut self, summary: &'a FragmentSummary, cx: &Option<clock::Global>) {
3054 if let Self::Offset(offset) = self {
3055 let version = cx.as_ref().unwrap();
3056 if version.observed_all(&summary.max_insertion_version) {
3057 *offset += summary.text.visible + summary.text.deleted;
3058 } else if version.observed_any(&summary.min_insertion_version) {
3059 *self = Self::Invalid;
3060 }
3061 }
3062 }
3063}
3064
3065impl sum_tree::SeekTarget<'_, FragmentSummary, Self> for VersionedFullOffset {
3066 fn cmp(&self, cursor_position: &Self, _: &Option<clock::Global>) -> cmp::Ordering {
3067 match (self, cursor_position) {
3068 (Self::Offset(a), Self::Offset(b)) => Ord::cmp(a, b),
3069 (Self::Offset(_), Self::Invalid) => cmp::Ordering::Less,
3070 (Self::Invalid, _) => unreachable!(),
3071 }
3072 }
3073}
3074
3075impl Operation {
3076 fn replica_id(&self) -> ReplicaId {
3077 operation_queue::Operation::lamport_timestamp(self).replica_id
3078 }
3079
3080 pub fn timestamp(&self) -> clock::Lamport {
3081 match self {
3082 Operation::Edit(edit) => edit.timestamp,
3083 Operation::Undo(undo) => undo.timestamp,
3084 }
3085 }
3086
3087 pub fn as_edit(&self) -> Option<&EditOperation> {
3088 match self {
3089 Operation::Edit(edit) => Some(edit),
3090 _ => None,
3091 }
3092 }
3093
3094 pub fn is_edit(&self) -> bool {
3095 matches!(self, Operation::Edit { .. })
3096 }
3097}
3098
3099impl operation_queue::Operation for Operation {
3100 fn lamport_timestamp(&self) -> clock::Lamport {
3101 match self {
3102 Operation::Edit(edit) => edit.timestamp,
3103 Operation::Undo(undo) => undo.timestamp,
3104 }
3105 }
3106}
3107
3108pub trait ToOffset {
3109 fn to_offset(&self, snapshot: &BufferSnapshot) -> usize;
3110 /// Turns this point into the next offset in the buffer that comes after this, respecting utf8 boundaries.
3111 fn to_next_offset(&self, snapshot: &BufferSnapshot) -> usize {
3112 snapshot
3113 .visible_text
3114 .ceil_char_boundary(self.to_offset(snapshot) + 1)
3115 }
3116 /// Turns this point into the previous offset in the buffer that comes before this, respecting utf8 boundaries.
3117 fn to_previous_offset(&self, snapshot: &BufferSnapshot) -> usize {
3118 snapshot
3119 .visible_text
3120 .floor_char_boundary(self.to_offset(snapshot).saturating_sub(1))
3121 }
3122}
3123
3124impl ToOffset for Point {
3125 fn to_offset(&self, snapshot: &BufferSnapshot) -> usize {
3126 snapshot.point_to_offset(*self)
3127 }
3128}
3129
3130impl ToOffset for usize {
3131 #[track_caller]
3132 fn to_offset(&self, snapshot: &BufferSnapshot) -> usize {
3133 assert!(
3134 *self <= snapshot.len(),
3135 "offset {} is out of range, snapshot length is {}",
3136 self,
3137 snapshot.len()
3138 );
3139 *self
3140 }
3141}
3142
3143impl ToOffset for Anchor {
3144 fn to_offset(&self, snapshot: &BufferSnapshot) -> usize {
3145 snapshot.summary_for_anchor(self)
3146 }
3147}
3148
3149impl<T: ToOffset> ToOffset for &T {
3150 fn to_offset(&self, content: &BufferSnapshot) -> usize {
3151 (*self).to_offset(content)
3152 }
3153}
3154
3155impl ToOffset for PointUtf16 {
3156 fn to_offset(&self, snapshot: &BufferSnapshot) -> usize {
3157 snapshot.point_utf16_to_offset(*self)
3158 }
3159}
3160
3161impl ToOffset for Unclipped<PointUtf16> {
3162 fn to_offset(&self, snapshot: &BufferSnapshot) -> usize {
3163 snapshot.unclipped_point_utf16_to_offset(*self)
3164 }
3165}
3166
3167pub trait ToPoint {
3168 fn to_point(&self, snapshot: &BufferSnapshot) -> Point;
3169}
3170
3171impl ToPoint for Anchor {
3172 fn to_point(&self, snapshot: &BufferSnapshot) -> Point {
3173 snapshot.summary_for_anchor(self)
3174 }
3175}
3176
3177impl ToPoint for usize {
3178 fn to_point(&self, snapshot: &BufferSnapshot) -> Point {
3179 snapshot.offset_to_point(*self)
3180 }
3181}
3182
3183impl ToPoint for Point {
3184 fn to_point(&self, _: &BufferSnapshot) -> Point {
3185 *self
3186 }
3187}
3188
3189impl ToPoint for Unclipped<PointUtf16> {
3190 fn to_point(&self, snapshot: &BufferSnapshot) -> Point {
3191 snapshot.unclipped_point_utf16_to_point(*self)
3192 }
3193}
3194
3195pub trait ToPointUtf16 {
3196 fn to_point_utf16(&self, snapshot: &BufferSnapshot) -> PointUtf16;
3197}
3198
3199impl ToPointUtf16 for Anchor {
3200 fn to_point_utf16(&self, snapshot: &BufferSnapshot) -> PointUtf16 {
3201 snapshot.summary_for_anchor(self)
3202 }
3203}
3204
3205impl ToPointUtf16 for usize {
3206 fn to_point_utf16(&self, snapshot: &BufferSnapshot) -> PointUtf16 {
3207 snapshot.offset_to_point_utf16(*self)
3208 }
3209}
3210
3211impl ToPointUtf16 for PointUtf16 {
3212 fn to_point_utf16(&self, _: &BufferSnapshot) -> PointUtf16 {
3213 *self
3214 }
3215}
3216
3217impl ToPointUtf16 for Point {
3218 fn to_point_utf16(&self, snapshot: &BufferSnapshot) -> PointUtf16 {
3219 snapshot.point_to_point_utf16(*self)
3220 }
3221}
3222
3223pub trait ToOffsetUtf16 {
3224 fn to_offset_utf16(&self, snapshot: &BufferSnapshot) -> OffsetUtf16;
3225}
3226
3227impl ToOffsetUtf16 for Anchor {
3228 fn to_offset_utf16(&self, snapshot: &BufferSnapshot) -> OffsetUtf16 {
3229 snapshot.summary_for_anchor(self)
3230 }
3231}
3232
3233impl ToOffsetUtf16 for usize {
3234 fn to_offset_utf16(&self, snapshot: &BufferSnapshot) -> OffsetUtf16 {
3235 snapshot.offset_to_offset_utf16(*self)
3236 }
3237}
3238
3239impl ToOffsetUtf16 for OffsetUtf16 {
3240 fn to_offset_utf16(&self, _snapshot: &BufferSnapshot) -> OffsetUtf16 {
3241 *self
3242 }
3243}
3244
3245pub trait FromAnchor {
3246 fn from_anchor(anchor: &Anchor, snapshot: &BufferSnapshot) -> Self;
3247}
3248
3249impl FromAnchor for Anchor {
3250 fn from_anchor(anchor: &Anchor, _snapshot: &BufferSnapshot) -> Self {
3251 *anchor
3252 }
3253}
3254
3255impl FromAnchor for Point {
3256 fn from_anchor(anchor: &Anchor, snapshot: &BufferSnapshot) -> Self {
3257 snapshot.summary_for_anchor(anchor)
3258 }
3259}
3260
3261impl FromAnchor for PointUtf16 {
3262 fn from_anchor(anchor: &Anchor, snapshot: &BufferSnapshot) -> Self {
3263 snapshot.summary_for_anchor(anchor)
3264 }
3265}
3266
3267impl FromAnchor for usize {
3268 fn from_anchor(anchor: &Anchor, snapshot: &BufferSnapshot) -> Self {
3269 snapshot.summary_for_anchor(anchor)
3270 }
3271}
3272
3273#[derive(Clone, Copy, Debug, PartialEq)]
3274pub enum LineEnding {
3275 Unix,
3276 Windows,
3277}
3278
3279impl Default for LineEnding {
3280 fn default() -> Self {
3281 #[cfg(unix)]
3282 return Self::Unix;
3283
3284 #[cfg(not(unix))]
3285 return Self::Windows;
3286 }
3287}
3288
3289impl LineEnding {
3290 pub fn as_str(&self) -> &'static str {
3291 match self {
3292 LineEnding::Unix => "\n",
3293 LineEnding::Windows => "\r\n",
3294 }
3295 }
3296
3297 pub fn label(&self) -> &'static str {
3298 match self {
3299 LineEnding::Unix => "LF",
3300 LineEnding::Windows => "CRLF",
3301 }
3302 }
3303
3304 pub fn detect(text: &str) -> Self {
3305 let mut max_ix = cmp::min(text.len(), 1000);
3306 while !text.is_char_boundary(max_ix) {
3307 max_ix -= 1;
3308 }
3309
3310 if let Some(ix) = text[..max_ix].find(['\n']) {
3311 if ix > 0 && text.as_bytes()[ix - 1] == b'\r' {
3312 Self::Windows
3313 } else {
3314 Self::Unix
3315 }
3316 } else {
3317 Self::default()
3318 }
3319 }
3320
3321 pub fn normalize(text: &mut String) {
3322 if let Cow::Owned(replaced) = LINE_SEPARATORS_REGEX.replace_all(text, "\n") {
3323 *text = replaced;
3324 }
3325 }
3326
3327 pub fn normalize_arc(text: Arc<str>) -> Arc<str> {
3328 if let Cow::Owned(replaced) = LINE_SEPARATORS_REGEX.replace_all(&text, "\n") {
3329 replaced.into()
3330 } else {
3331 text
3332 }
3333 }
3334
3335 pub fn normalize_cow(text: Cow<str>) -> Cow<str> {
3336 if let Cow::Owned(replaced) = LINE_SEPARATORS_REGEX.replace_all(&text, "\n") {
3337 replaced.into()
3338 } else {
3339 text
3340 }
3341 }
3342}
3343
3344#[cfg(debug_assertions)]
3345pub mod debug {
3346 use super::*;
3347 use parking_lot::Mutex;
3348 use std::any::TypeId;
3349 use std::hash::{Hash, Hasher};
3350
3351 static GLOBAL_DEBUG_RANGES: Mutex<Option<GlobalDebugRanges>> = Mutex::new(None);
3352
3353 pub struct GlobalDebugRanges {
3354 pub ranges: Vec<DebugRange>,
3355 key_to_occurrence_index: HashMap<Key, usize>,
3356 next_occurrence_index: usize,
3357 }
3358
3359 pub struct DebugRange {
3360 key: Key,
3361 pub ranges: Vec<Range<Anchor>>,
3362 pub value: Arc<str>,
3363 pub occurrence_index: usize,
3364 }
3365
3366 #[derive(Debug, Clone, PartialEq, Eq, Hash)]
3367 struct Key {
3368 type_id: TypeId,
3369 hash: u64,
3370 }
3371
3372 impl GlobalDebugRanges {
3373 pub fn with_locked<R>(f: impl FnOnce(&mut Self) -> R) -> R {
3374 let mut state = GLOBAL_DEBUG_RANGES.lock();
3375 if state.is_none() {
3376 *state = Some(GlobalDebugRanges {
3377 ranges: Vec::new(),
3378 key_to_occurrence_index: HashMap::default(),
3379 next_occurrence_index: 0,
3380 });
3381 }
3382 if let Some(global_debug_ranges) = state.as_mut() {
3383 f(global_debug_ranges)
3384 } else {
3385 unreachable!()
3386 }
3387 }
3388
3389 pub fn insert<K: Hash + 'static>(
3390 &mut self,
3391 key: &K,
3392 ranges: Vec<Range<Anchor>>,
3393 value: Arc<str>,
3394 ) {
3395 let occurrence_index = *self
3396 .key_to_occurrence_index
3397 .entry(Key::new(key))
3398 .or_insert_with(|| {
3399 let occurrence_index = self.next_occurrence_index;
3400 self.next_occurrence_index += 1;
3401 occurrence_index
3402 });
3403 let key = Key::new(key);
3404 let existing = self
3405 .ranges
3406 .iter()
3407 .enumerate()
3408 .rfind(|(_, existing)| existing.key == key);
3409 if let Some((existing_ix, _)) = existing {
3410 self.ranges.remove(existing_ix);
3411 }
3412 self.ranges.push(DebugRange {
3413 ranges,
3414 key,
3415 value,
3416 occurrence_index,
3417 });
3418 }
3419
3420 pub fn remove<K: Hash + 'static>(&mut self, key: &K) {
3421 self.remove_impl(&Key::new(key));
3422 }
3423
3424 fn remove_impl(&mut self, key: &Key) {
3425 let existing = self
3426 .ranges
3427 .iter()
3428 .enumerate()
3429 .rfind(|(_, existing)| &existing.key == key);
3430 if let Some((existing_ix, _)) = existing {
3431 self.ranges.remove(existing_ix);
3432 }
3433 }
3434
3435 pub fn remove_all_with_key_type<K: 'static>(&mut self) {
3436 self.ranges
3437 .retain(|item| item.key.type_id != TypeId::of::<K>());
3438 }
3439 }
3440
3441 impl Key {
3442 fn new<K: Hash + 'static>(key: &K) -> Self {
3443 let type_id = TypeId::of::<K>();
3444 let mut hasher = collections::FxHasher::default();
3445 key.hash(&mut hasher);
3446 Key {
3447 type_id,
3448 hash: hasher.finish(),
3449 }
3450 }
3451 }
3452
3453 pub trait ToDebugRanges {
3454 fn to_debug_ranges(&self, snapshot: &BufferSnapshot) -> Vec<Range<usize>>;
3455 }
3456
3457 impl<T: ToOffset> ToDebugRanges for T {
3458 fn to_debug_ranges(&self, snapshot: &BufferSnapshot) -> Vec<Range<usize>> {
3459 [self.to_offset(snapshot)].to_debug_ranges(snapshot)
3460 }
3461 }
3462
3463 impl<T: ToOffset + Clone> ToDebugRanges for Range<T> {
3464 fn to_debug_ranges(&self, snapshot: &BufferSnapshot) -> Vec<Range<usize>> {
3465 [self.clone()].to_debug_ranges(snapshot)
3466 }
3467 }
3468
3469 impl<T: ToOffset> ToDebugRanges for Vec<T> {
3470 fn to_debug_ranges(&self, snapshot: &BufferSnapshot) -> Vec<Range<usize>> {
3471 self.as_slice().to_debug_ranges(snapshot)
3472 }
3473 }
3474
3475 impl<T: ToOffset> ToDebugRanges for Vec<Range<T>> {
3476 fn to_debug_ranges(&self, snapshot: &BufferSnapshot) -> Vec<Range<usize>> {
3477 self.as_slice().to_debug_ranges(snapshot)
3478 }
3479 }
3480
3481 impl<T: ToOffset> ToDebugRanges for [T] {
3482 fn to_debug_ranges(&self, snapshot: &BufferSnapshot) -> Vec<Range<usize>> {
3483 self.iter()
3484 .map(|item| {
3485 let offset = item.to_offset(snapshot);
3486 offset..offset
3487 })
3488 .collect()
3489 }
3490 }
3491
3492 impl<T: ToOffset> ToDebugRanges for [Range<T>] {
3493 fn to_debug_ranges(&self, snapshot: &BufferSnapshot) -> Vec<Range<usize>> {
3494 self.iter()
3495 .map(|range| range.start.to_offset(snapshot)..range.end.to_offset(snapshot))
3496 .collect()
3497 }
3498 }
3499}