1mod anchor;
2pub mod locator;
3#[cfg(any(test, feature = "test-support"))]
4pub mod network;
5pub mod operation_queue;
6mod patch;
7mod selection;
8pub mod subscription;
9#[cfg(test)]
10mod tests;
11mod undo_map;
12
13pub use anchor::*;
14use anyhow::{Context as _, Result};
15use clock::Lamport;
16pub use clock::ReplicaId;
17use collections::{HashMap, HashSet};
18use locator::Locator;
19use operation_queue::OperationQueue;
20pub use patch::Patch;
21use postage::{oneshot, prelude::*};
22
23use regex::Regex;
24pub use rope::*;
25pub use selection::*;
26use std::{
27 borrow::Cow,
28 cmp::{self, Ordering, Reverse},
29 fmt::Display,
30 future::Future,
31 iter::Iterator,
32 num::NonZeroU64,
33 ops::{self, Deref, Range, Sub},
34 str,
35 sync::{Arc, LazyLock},
36 time::{Duration, Instant},
37};
38pub use subscription::*;
39pub use sum_tree::Bias;
40use sum_tree::{Dimensions, FilterCursor, SumTree, TreeMap, TreeSet};
41use undo_map::UndoMap;
42
43#[cfg(any(test, feature = "test-support"))]
44use util::RandomCharIter;
45
46static LINE_SEPARATORS_REGEX: LazyLock<Regex> =
47 LazyLock::new(|| Regex::new(r"\r\n|\r").expect("Failed to create LINE_SEPARATORS_REGEX"));
48
49pub type TransactionId = clock::Lamport;
50
51pub struct Buffer {
52 snapshot: BufferSnapshot,
53 history: History,
54 deferred_ops: OperationQueue<Operation>,
55 deferred_replicas: HashSet<ReplicaId>,
56 pub lamport_clock: clock::Lamport,
57 subscriptions: Topic,
58 edit_id_resolvers: HashMap<clock::Lamport, Vec<oneshot::Sender<()>>>,
59 wait_for_version_txs: Vec<(clock::Global, oneshot::Sender<()>)>,
60}
61
62#[repr(transparent)]
63#[derive(Clone, Copy, Debug, Hash, PartialEq, PartialOrd, Ord, Eq)]
64pub struct BufferId(NonZeroU64);
65
66impl Display for BufferId {
67 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
68 write!(f, "{}", self.0)
69 }
70}
71
72impl From<NonZeroU64> for BufferId {
73 fn from(id: NonZeroU64) -> Self {
74 BufferId(id)
75 }
76}
77
78impl BufferId {
79 /// Returns Err if `id` is outside of BufferId domain.
80 pub fn new(id: u64) -> anyhow::Result<Self> {
81 let id = NonZeroU64::new(id).context("Buffer id cannot be 0.")?;
82 Ok(Self(id))
83 }
84
85 /// Increments this buffer id, returning the old value.
86 /// So that's a post-increment operator in disguise.
87 pub fn next(&mut self) -> Self {
88 let old = *self;
89 self.0 = self.0.saturating_add(1);
90 old
91 }
92
93 pub fn to_proto(self) -> u64 {
94 self.into()
95 }
96}
97
98impl From<BufferId> for u64 {
99 fn from(id: BufferId) -> Self {
100 id.0.get()
101 }
102}
103
104#[derive(Clone)]
105pub struct BufferSnapshot {
106 replica_id: ReplicaId,
107 remote_id: BufferId,
108 visible_text: Rope,
109 deleted_text: Rope,
110 line_ending: LineEnding,
111 undo_map: UndoMap,
112 fragments: SumTree<Fragment>,
113 insertions: SumTree<InsertionFragment>,
114 insertion_slices: TreeSet<InsertionSlice>,
115 pub version: clock::Global,
116}
117
118#[derive(Clone, Debug)]
119pub struct HistoryEntry {
120 transaction: Transaction,
121 first_edit_at: Instant,
122 last_edit_at: Instant,
123 suppress_grouping: bool,
124}
125
126#[derive(Clone, Debug)]
127pub struct Transaction {
128 pub id: TransactionId,
129 pub edit_ids: Vec<clock::Lamport>,
130 pub start: clock::Global,
131}
132
133impl Transaction {
134 pub fn merge_in(&mut self, other: Transaction) {
135 self.edit_ids.extend(other.edit_ids);
136 }
137}
138
139impl HistoryEntry {
140 pub fn transaction_id(&self) -> TransactionId {
141 self.transaction.id
142 }
143}
144
145struct History {
146 base_text: Rope,
147 operations: TreeMap<clock::Lamport, Operation>,
148 undo_stack: Vec<HistoryEntry>,
149 redo_stack: Vec<HistoryEntry>,
150 transaction_depth: usize,
151 group_interval: Duration,
152}
153
154#[derive(Clone, Debug, Eq, PartialEq)]
155struct InsertionSlice {
156 edit_id: clock::Lamport,
157 insertion_id: clock::Lamport,
158 range: Range<usize>,
159}
160
161impl Ord for InsertionSlice {
162 fn cmp(&self, other: &Self) -> Ordering {
163 self.edit_id
164 .cmp(&other.edit_id)
165 .then_with(|| self.insertion_id.cmp(&other.insertion_id))
166 .then_with(|| self.range.start.cmp(&other.range.start))
167 .then_with(|| self.range.end.cmp(&other.range.end))
168 }
169}
170
171impl PartialOrd for InsertionSlice {
172 fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
173 Some(self.cmp(other))
174 }
175}
176
177impl InsertionSlice {
178 fn from_fragment(edit_id: clock::Lamport, fragment: &Fragment) -> Self {
179 Self {
180 edit_id,
181 insertion_id: fragment.timestamp,
182 range: fragment.insertion_offset..fragment.insertion_offset + fragment.len,
183 }
184 }
185}
186
187impl History {
188 pub fn new(base_text: Rope) -> Self {
189 Self {
190 base_text,
191 operations: Default::default(),
192 undo_stack: Vec::new(),
193 redo_stack: Vec::new(),
194 transaction_depth: 0,
195 // Don't group transactions in tests unless we opt in, because it's a footgun.
196 #[cfg(any(test, feature = "test-support"))]
197 group_interval: Duration::ZERO,
198 #[cfg(not(any(test, feature = "test-support")))]
199 group_interval: Duration::from_millis(300),
200 }
201 }
202
203 fn push(&mut self, op: Operation) {
204 self.operations.insert(op.timestamp(), op);
205 }
206
207 fn start_transaction(
208 &mut self,
209 start: clock::Global,
210 now: Instant,
211 clock: &mut clock::Lamport,
212 ) -> Option<TransactionId> {
213 self.transaction_depth += 1;
214 if self.transaction_depth == 1 {
215 let id = clock.tick();
216 self.undo_stack.push(HistoryEntry {
217 transaction: Transaction {
218 id,
219 start,
220 edit_ids: Default::default(),
221 },
222 first_edit_at: now,
223 last_edit_at: now,
224 suppress_grouping: false,
225 });
226 Some(id)
227 } else {
228 None
229 }
230 }
231
232 fn end_transaction(&mut self, now: Instant) -> Option<&HistoryEntry> {
233 assert_ne!(self.transaction_depth, 0);
234 self.transaction_depth -= 1;
235 if self.transaction_depth == 0 {
236 if self
237 .undo_stack
238 .last()
239 .unwrap()
240 .transaction
241 .edit_ids
242 .is_empty()
243 {
244 self.undo_stack.pop();
245 None
246 } else {
247 self.redo_stack.clear();
248 let entry = self.undo_stack.last_mut().unwrap();
249 entry.last_edit_at = now;
250 Some(entry)
251 }
252 } else {
253 None
254 }
255 }
256
257 fn group(&mut self) -> Option<TransactionId> {
258 let mut count = 0;
259 let mut entries = self.undo_stack.iter();
260 if let Some(mut entry) = entries.next_back() {
261 while let Some(prev_entry) = entries.next_back() {
262 if !prev_entry.suppress_grouping
263 && entry.first_edit_at - prev_entry.last_edit_at < self.group_interval
264 {
265 entry = prev_entry;
266 count += 1;
267 } else {
268 break;
269 }
270 }
271 }
272 self.group_trailing(count)
273 }
274
275 fn group_until(&mut self, transaction_id: TransactionId) {
276 let mut count = 0;
277 for entry in self.undo_stack.iter().rev() {
278 if entry.transaction_id() == transaction_id {
279 self.group_trailing(count);
280 break;
281 } else if entry.suppress_grouping {
282 break;
283 } else {
284 count += 1;
285 }
286 }
287 }
288
289 fn group_trailing(&mut self, n: usize) -> Option<TransactionId> {
290 let new_len = self.undo_stack.len() - n;
291 let (entries_to_keep, entries_to_merge) = self.undo_stack.split_at_mut(new_len);
292 if let Some(last_entry) = entries_to_keep.last_mut() {
293 for entry in &*entries_to_merge {
294 for edit_id in &entry.transaction.edit_ids {
295 last_entry.transaction.edit_ids.push(*edit_id);
296 }
297 }
298
299 if let Some(entry) = entries_to_merge.last_mut() {
300 last_entry.last_edit_at = entry.last_edit_at;
301 }
302 }
303
304 self.undo_stack.truncate(new_len);
305 self.undo_stack.last().map(|e| e.transaction.id)
306 }
307
308 fn finalize_last_transaction(&mut self) -> Option<&Transaction> {
309 self.undo_stack.last_mut().map(|entry| {
310 entry.suppress_grouping = true;
311 &entry.transaction
312 })
313 }
314
315 fn push_transaction(&mut self, transaction: Transaction, now: Instant) {
316 assert_eq!(self.transaction_depth, 0);
317 self.undo_stack.push(HistoryEntry {
318 transaction,
319 first_edit_at: now,
320 last_edit_at: now,
321 suppress_grouping: false,
322 });
323 }
324
325 /// Differs from `push_transaction` in that it does not clear the redo
326 /// stack. Intended to be used to create a parent transaction to merge
327 /// potential child transactions into.
328 ///
329 /// The caller is responsible for removing it from the undo history using
330 /// `forget_transaction` if no edits are merged into it. Otherwise, if edits
331 /// are merged into this transaction, the caller is responsible for ensuring
332 /// the redo stack is cleared. The easiest way to ensure the redo stack is
333 /// cleared is to create transactions with the usual `start_transaction` and
334 /// `end_transaction` methods and merging the resulting transactions into
335 /// the transaction created by this method
336 fn push_empty_transaction(
337 &mut self,
338 start: clock::Global,
339 now: Instant,
340 clock: &mut clock::Lamport,
341 ) -> TransactionId {
342 assert_eq!(self.transaction_depth, 0);
343 let id = clock.tick();
344 let transaction = Transaction {
345 id,
346 start,
347 edit_ids: Vec::new(),
348 };
349 self.undo_stack.push(HistoryEntry {
350 transaction,
351 first_edit_at: now,
352 last_edit_at: now,
353 suppress_grouping: false,
354 });
355 id
356 }
357
358 fn push_undo(&mut self, op_id: clock::Lamport) {
359 assert_ne!(self.transaction_depth, 0);
360 if let Some(Operation::Edit(_)) = self.operations.get(&op_id) {
361 let last_transaction = self.undo_stack.last_mut().unwrap();
362 last_transaction.transaction.edit_ids.push(op_id);
363 }
364 }
365
366 fn pop_undo(&mut self) -> Option<&HistoryEntry> {
367 assert_eq!(self.transaction_depth, 0);
368 if let Some(entry) = self.undo_stack.pop() {
369 self.redo_stack.push(entry);
370 self.redo_stack.last()
371 } else {
372 None
373 }
374 }
375
376 fn remove_from_undo(&mut self, transaction_id: TransactionId) -> Option<&HistoryEntry> {
377 assert_eq!(self.transaction_depth, 0);
378
379 let entry_ix = self
380 .undo_stack
381 .iter()
382 .rposition(|entry| entry.transaction.id == transaction_id)?;
383 let entry = self.undo_stack.remove(entry_ix);
384 self.redo_stack.push(entry);
385 self.redo_stack.last()
386 }
387
388 fn remove_from_undo_until(&mut self, transaction_id: TransactionId) -> &[HistoryEntry] {
389 assert_eq!(self.transaction_depth, 0);
390
391 let redo_stack_start_len = self.redo_stack.len();
392 if let Some(entry_ix) = self
393 .undo_stack
394 .iter()
395 .rposition(|entry| entry.transaction.id == transaction_id)
396 {
397 self.redo_stack
398 .extend(self.undo_stack.drain(entry_ix..).rev());
399 }
400 &self.redo_stack[redo_stack_start_len..]
401 }
402
403 fn forget(&mut self, transaction_id: TransactionId) -> Option<Transaction> {
404 assert_eq!(self.transaction_depth, 0);
405 if let Some(entry_ix) = self
406 .undo_stack
407 .iter()
408 .rposition(|entry| entry.transaction.id == transaction_id)
409 {
410 Some(self.undo_stack.remove(entry_ix).transaction)
411 } else if let Some(entry_ix) = self
412 .redo_stack
413 .iter()
414 .rposition(|entry| entry.transaction.id == transaction_id)
415 {
416 Some(self.redo_stack.remove(entry_ix).transaction)
417 } else {
418 None
419 }
420 }
421
422 fn transaction(&self, transaction_id: TransactionId) -> Option<&Transaction> {
423 let entry = self
424 .undo_stack
425 .iter()
426 .rfind(|entry| entry.transaction.id == transaction_id)
427 .or_else(|| {
428 self.redo_stack
429 .iter()
430 .rfind(|entry| entry.transaction.id == transaction_id)
431 })?;
432 Some(&entry.transaction)
433 }
434
435 fn transaction_mut(&mut self, transaction_id: TransactionId) -> Option<&mut Transaction> {
436 let entry = self
437 .undo_stack
438 .iter_mut()
439 .rfind(|entry| entry.transaction.id == transaction_id)
440 .or_else(|| {
441 self.redo_stack
442 .iter_mut()
443 .rfind(|entry| entry.transaction.id == transaction_id)
444 })?;
445 Some(&mut entry.transaction)
446 }
447
448 fn merge_transactions(&mut self, transaction: TransactionId, destination: TransactionId) {
449 if let Some(transaction) = self.forget(transaction)
450 && let Some(destination) = self.transaction_mut(destination)
451 {
452 destination.edit_ids.extend(transaction.edit_ids);
453 }
454 }
455
456 fn pop_redo(&mut self) -> Option<&HistoryEntry> {
457 assert_eq!(self.transaction_depth, 0);
458 if let Some(entry) = self.redo_stack.pop() {
459 self.undo_stack.push(entry);
460 self.undo_stack.last()
461 } else {
462 None
463 }
464 }
465
466 fn remove_from_redo(&mut self, transaction_id: TransactionId) -> &[HistoryEntry] {
467 assert_eq!(self.transaction_depth, 0);
468
469 let undo_stack_start_len = self.undo_stack.len();
470 if let Some(entry_ix) = self
471 .redo_stack
472 .iter()
473 .rposition(|entry| entry.transaction.id == transaction_id)
474 {
475 self.undo_stack
476 .extend(self.redo_stack.drain(entry_ix..).rev());
477 }
478 &self.undo_stack[undo_stack_start_len..]
479 }
480}
481
482struct Edits<'a, D: TextDimension, F: FnMut(&FragmentSummary) -> bool> {
483 visible_cursor: rope::Cursor<'a>,
484 deleted_cursor: rope::Cursor<'a>,
485 fragments_cursor: Option<FilterCursor<'a, 'static, F, Fragment, FragmentTextSummary>>,
486 undos: &'a UndoMap,
487 since: &'a clock::Global,
488 old_end: D,
489 new_end: D,
490 range: Range<(&'a Locator, usize)>,
491 buffer_id: BufferId,
492}
493
494#[derive(Clone, Debug, Default, Eq, PartialEq)]
495pub struct Edit<D> {
496 pub old: Range<D>,
497 pub new: Range<D>,
498}
499
500impl<D> Edit<D>
501where
502 D: Sub<D, Output = D> + PartialEq + Copy,
503{
504 pub fn old_len(&self) -> D {
505 self.old.end - self.old.start
506 }
507
508 pub fn new_len(&self) -> D {
509 self.new.end - self.new.start
510 }
511
512 pub fn is_empty(&self) -> bool {
513 self.old.start == self.old.end && self.new.start == self.new.end
514 }
515}
516
517impl<D1, D2> Edit<(D1, D2)> {
518 pub fn flatten(self) -> (Edit<D1>, Edit<D2>) {
519 (
520 Edit {
521 old: self.old.start.0..self.old.end.0,
522 new: self.new.start.0..self.new.end.0,
523 },
524 Edit {
525 old: self.old.start.1..self.old.end.1,
526 new: self.new.start.1..self.new.end.1,
527 },
528 )
529 }
530}
531
532#[derive(Eq, PartialEq, Clone, Debug)]
533pub struct Fragment {
534 pub id: Locator,
535 pub timestamp: clock::Lamport,
536 pub insertion_offset: usize,
537 pub len: usize,
538 pub visible: bool,
539 pub deletions: HashSet<clock::Lamport>,
540 pub max_undos: clock::Global,
541}
542
543#[derive(Eq, PartialEq, Clone, Debug)]
544pub struct FragmentSummary {
545 text: FragmentTextSummary,
546 max_id: Locator,
547 max_version: clock::Global,
548 min_insertion_version: clock::Global,
549 max_insertion_version: clock::Global,
550}
551
552#[derive(Copy, Default, Clone, Debug, PartialEq, Eq)]
553struct FragmentTextSummary {
554 visible: usize,
555 deleted: usize,
556}
557
558impl<'a> sum_tree::Dimension<'a, FragmentSummary> for FragmentTextSummary {
559 fn zero(_: &Option<clock::Global>) -> Self {
560 Default::default()
561 }
562
563 fn add_summary(&mut self, summary: &'a FragmentSummary, _: &Option<clock::Global>) {
564 self.visible += summary.text.visible;
565 self.deleted += summary.text.deleted;
566 }
567}
568
569#[derive(Eq, PartialEq, Clone, Debug)]
570struct InsertionFragment {
571 timestamp: clock::Lamport,
572 split_offset: usize,
573 fragment_id: Locator,
574}
575
576#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord)]
577struct InsertionFragmentKey {
578 timestamp: clock::Lamport,
579 split_offset: usize,
580}
581
582#[derive(Clone, Debug, Eq, PartialEq)]
583pub enum Operation {
584 Edit(EditOperation),
585 Undo(UndoOperation),
586}
587
588#[derive(Clone, Debug, Eq, PartialEq)]
589pub struct EditOperation {
590 pub timestamp: clock::Lamport,
591 pub version: clock::Global,
592 pub ranges: Vec<Range<FullOffset>>,
593 pub new_text: Vec<Arc<str>>,
594}
595
596#[derive(Clone, Debug, Eq, PartialEq)]
597pub struct UndoOperation {
598 pub timestamp: clock::Lamport,
599 pub version: clock::Global,
600 pub counts: HashMap<clock::Lamport, u32>,
601}
602
603/// Stores information about the indentation of a line (tabs and spaces).
604#[derive(Clone, Copy, Debug, Eq, PartialEq)]
605pub struct LineIndent {
606 pub tabs: u32,
607 pub spaces: u32,
608 pub line_blank: bool,
609}
610
611impl LineIndent {
612 pub fn from_chunks(chunks: &mut Chunks) -> Self {
613 let mut tabs = 0;
614 let mut spaces = 0;
615 let mut line_blank = true;
616
617 'outer: while let Some(chunk) = chunks.peek() {
618 for ch in chunk.chars() {
619 if ch == '\t' {
620 tabs += 1;
621 } else if ch == ' ' {
622 spaces += 1;
623 } else {
624 if ch != '\n' {
625 line_blank = false;
626 }
627 break 'outer;
628 }
629 }
630
631 chunks.next();
632 }
633
634 Self {
635 tabs,
636 spaces,
637 line_blank,
638 }
639 }
640
641 /// Constructs a new `LineIndent` which only contains spaces.
642 pub fn spaces(spaces: u32) -> Self {
643 Self {
644 tabs: 0,
645 spaces,
646 line_blank: true,
647 }
648 }
649
650 /// Constructs a new `LineIndent` which only contains tabs.
651 pub fn tabs(tabs: u32) -> Self {
652 Self {
653 tabs,
654 spaces: 0,
655 line_blank: true,
656 }
657 }
658
659 /// Indicates whether the line is empty.
660 pub fn is_line_empty(&self) -> bool {
661 self.tabs == 0 && self.spaces == 0 && self.line_blank
662 }
663
664 /// Indicates whether the line is blank (contains only whitespace).
665 pub fn is_line_blank(&self) -> bool {
666 self.line_blank
667 }
668
669 /// Returns the number of indentation characters (tabs or spaces).
670 pub fn raw_len(&self) -> u32 {
671 self.tabs + self.spaces
672 }
673
674 /// Returns the number of indentation characters (tabs or spaces), taking tab size into account.
675 pub fn len(&self, tab_size: u32) -> u32 {
676 self.tabs * tab_size + self.spaces
677 }
678}
679
680impl From<&str> for LineIndent {
681 fn from(value: &str) -> Self {
682 Self::from_iter(value.chars())
683 }
684}
685
686impl FromIterator<char> for LineIndent {
687 fn from_iter<T: IntoIterator<Item = char>>(chars: T) -> Self {
688 let mut tabs = 0;
689 let mut spaces = 0;
690 let mut line_blank = true;
691 for c in chars {
692 if c == '\t' {
693 tabs += 1;
694 } else if c == ' ' {
695 spaces += 1;
696 } else {
697 if c != '\n' {
698 line_blank = false;
699 }
700 break;
701 }
702 }
703 Self {
704 tabs,
705 spaces,
706 line_blank,
707 }
708 }
709}
710
711impl Buffer {
712 pub fn new(replica_id: ReplicaId, remote_id: BufferId, base_text: impl Into<String>) -> Buffer {
713 let mut base_text = base_text.into();
714 let line_ending = LineEnding::detect(&base_text);
715 LineEnding::normalize(&mut base_text);
716 Self::new_normalized(replica_id, remote_id, line_ending, Rope::from(&*base_text))
717 }
718
719 pub fn new_normalized(
720 replica_id: ReplicaId,
721 remote_id: BufferId,
722 line_ending: LineEnding,
723 normalized: Rope,
724 ) -> Buffer {
725 let history = History::new(normalized);
726 let mut fragments = SumTree::new(&None);
727 let mut insertions = SumTree::default();
728
729 let mut lamport_clock = clock::Lamport::new(replica_id);
730 let mut version = clock::Global::new();
731
732 let visible_text = history.base_text.clone();
733 if !visible_text.is_empty() {
734 let insertion_timestamp = clock::Lamport::new(ReplicaId::LOCAL);
735 lamport_clock.observe(insertion_timestamp);
736 version.observe(insertion_timestamp);
737 let fragment_id = Locator::between(&Locator::min(), &Locator::max());
738 let fragment = Fragment {
739 id: fragment_id,
740 timestamp: insertion_timestamp,
741 insertion_offset: 0,
742 len: visible_text.len(),
743 visible: true,
744 deletions: Default::default(),
745 max_undos: Default::default(),
746 };
747 insertions.push(InsertionFragment::new(&fragment), ());
748 fragments.push(fragment, &None);
749 }
750
751 Buffer {
752 snapshot: BufferSnapshot {
753 replica_id,
754 remote_id,
755 visible_text,
756 deleted_text: Rope::new(),
757 line_ending,
758 fragments,
759 insertions,
760 version,
761 undo_map: Default::default(),
762 insertion_slices: Default::default(),
763 },
764 history,
765 deferred_ops: OperationQueue::new(),
766 deferred_replicas: HashSet::default(),
767 lamport_clock,
768 subscriptions: Default::default(),
769 edit_id_resolvers: Default::default(),
770 wait_for_version_txs: Default::default(),
771 }
772 }
773
774 pub fn version(&self) -> clock::Global {
775 self.version.clone()
776 }
777
778 pub fn snapshot(&self) -> BufferSnapshot {
779 self.snapshot.clone()
780 }
781
782 pub fn branch(&self) -> Self {
783 Self {
784 snapshot: self.snapshot.clone(),
785 history: History::new(self.base_text().clone()),
786 deferred_ops: OperationQueue::new(),
787 deferred_replicas: HashSet::default(),
788 lamport_clock: clock::Lamport::new(ReplicaId::LOCAL_BRANCH),
789 subscriptions: Default::default(),
790 edit_id_resolvers: Default::default(),
791 wait_for_version_txs: Default::default(),
792 }
793 }
794
795 pub fn replica_id(&self) -> ReplicaId {
796 self.lamport_clock.replica_id
797 }
798
799 pub fn remote_id(&self) -> BufferId {
800 self.remote_id
801 }
802
803 pub fn deferred_ops_len(&self) -> usize {
804 self.deferred_ops.len()
805 }
806
807 pub fn transaction_group_interval(&self) -> Duration {
808 self.history.group_interval
809 }
810
811 pub fn edit<R, I, S, T>(&mut self, edits: R) -> Operation
812 where
813 R: IntoIterator<IntoIter = I>,
814 I: ExactSizeIterator<Item = (Range<S>, T)>,
815 S: ToOffset,
816 T: Into<Arc<str>>,
817 {
818 let edits = edits
819 .into_iter()
820 .map(|(range, new_text)| (range, new_text.into()));
821
822 self.start_transaction();
823 let timestamp = self.lamport_clock.tick();
824 let operation = Operation::Edit(self.apply_local_edit(edits, timestamp));
825
826 self.history.push(operation.clone());
827 self.history.push_undo(operation.timestamp());
828 self.snapshot.version.observe(operation.timestamp());
829 self.end_transaction();
830 operation
831 }
832
833 fn apply_local_edit<S: ToOffset, T: Into<Arc<str>>>(
834 &mut self,
835 edits: impl ExactSizeIterator<Item = (Range<S>, T)>,
836 timestamp: clock::Lamport,
837 ) -> EditOperation {
838 let mut edits_patch = Patch::default();
839 let mut edit_op = EditOperation {
840 timestamp,
841 version: self.version(),
842 ranges: Vec::with_capacity(edits.len()),
843 new_text: Vec::with_capacity(edits.len()),
844 };
845 let mut new_insertions = Vec::new();
846 let mut insertion_offset = 0;
847 let mut insertion_slices = Vec::new();
848
849 let mut edits = edits
850 .map(|(range, new_text)| (range.to_offset(&*self), new_text))
851 .peekable();
852
853 let mut new_ropes =
854 RopeBuilder::new(self.visible_text.cursor(0), self.deleted_text.cursor(0));
855 let mut old_fragments = self.fragments.cursor::<FragmentTextSummary>(&None);
856 let mut new_fragments = old_fragments.slice(&edits.peek().unwrap().0.start, Bias::Right);
857 new_ropes.append(new_fragments.summary().text);
858
859 let mut fragment_start = old_fragments.start().visible;
860 for (range, new_text) in edits {
861 let new_text = LineEnding::normalize_arc(new_text.into());
862 let fragment_end = old_fragments.end().visible;
863
864 // If the current fragment ends before this range, then jump ahead to the first fragment
865 // that extends past the start of this range, reusing any intervening fragments.
866 if fragment_end < range.start {
867 // If the current fragment has been partially consumed, then consume the rest of it
868 // and advance to the next fragment before slicing.
869 if fragment_start > old_fragments.start().visible {
870 if fragment_end > fragment_start {
871 let mut suffix = old_fragments.item().unwrap().clone();
872 suffix.len = fragment_end - fragment_start;
873 suffix.insertion_offset += fragment_start - old_fragments.start().visible;
874 new_insertions.push(InsertionFragment::insert_new(&suffix));
875 new_ropes.push_fragment(&suffix, suffix.visible);
876 new_fragments.push(suffix, &None);
877 }
878 old_fragments.next();
879 }
880
881 let slice = old_fragments.slice(&range.start, Bias::Right);
882 new_ropes.append(slice.summary().text);
883 new_fragments.append(slice, &None);
884 fragment_start = old_fragments.start().visible;
885 }
886
887 let full_range_start = FullOffset(range.start + old_fragments.start().deleted);
888
889 // Preserve any portion of the current fragment that precedes this range.
890 if fragment_start < range.start {
891 let mut prefix = old_fragments.item().unwrap().clone();
892 prefix.len = range.start - fragment_start;
893 prefix.insertion_offset += fragment_start - old_fragments.start().visible;
894 prefix.id = Locator::between(&new_fragments.summary().max_id, &prefix.id);
895 new_insertions.push(InsertionFragment::insert_new(&prefix));
896 new_ropes.push_fragment(&prefix, prefix.visible);
897 new_fragments.push(prefix, &None);
898 fragment_start = range.start;
899 }
900
901 // Insert the new text before any existing fragments within the range.
902 if !new_text.is_empty() {
903 let new_start = new_fragments.summary().text.visible;
904
905 let fragment = Fragment {
906 id: Locator::between(
907 &new_fragments.summary().max_id,
908 old_fragments
909 .item()
910 .map_or(&Locator::max(), |old_fragment| &old_fragment.id),
911 ),
912 timestamp,
913 insertion_offset,
914 len: new_text.len(),
915 deletions: Default::default(),
916 max_undos: Default::default(),
917 visible: true,
918 };
919 edits_patch.push(Edit {
920 old: fragment_start..fragment_start,
921 new: new_start..new_start + new_text.len(),
922 });
923 insertion_slices.push(InsertionSlice::from_fragment(timestamp, &fragment));
924 new_insertions.push(InsertionFragment::insert_new(&fragment));
925 new_ropes.push_str(new_text.as_ref());
926 new_fragments.push(fragment, &None);
927 insertion_offset += new_text.len();
928 }
929
930 // Advance through every fragment that intersects this range, marking the intersecting
931 // portions as deleted.
932 while fragment_start < range.end {
933 let fragment = old_fragments.item().unwrap();
934 let fragment_end = old_fragments.end().visible;
935 let mut intersection = fragment.clone();
936 let intersection_end = cmp::min(range.end, fragment_end);
937 if fragment.visible {
938 intersection.len = intersection_end - fragment_start;
939 intersection.insertion_offset += fragment_start - old_fragments.start().visible;
940 intersection.id =
941 Locator::between(&new_fragments.summary().max_id, &intersection.id);
942 intersection.deletions.insert(timestamp);
943 intersection.visible = false;
944 }
945 if intersection.len > 0 {
946 if fragment.visible && !intersection.visible {
947 let new_start = new_fragments.summary().text.visible;
948 edits_patch.push(Edit {
949 old: fragment_start..intersection_end,
950 new: new_start..new_start,
951 });
952 insertion_slices
953 .push(InsertionSlice::from_fragment(timestamp, &intersection));
954 }
955 new_insertions.push(InsertionFragment::insert_new(&intersection));
956 new_ropes.push_fragment(&intersection, fragment.visible);
957 new_fragments.push(intersection, &None);
958 fragment_start = intersection_end;
959 }
960 if fragment_end <= range.end {
961 old_fragments.next();
962 }
963 }
964
965 let full_range_end = FullOffset(range.end + old_fragments.start().deleted);
966 edit_op.ranges.push(full_range_start..full_range_end);
967 edit_op.new_text.push(new_text);
968 }
969
970 // If the current fragment has been partially consumed, then consume the rest of it
971 // and advance to the next fragment before slicing.
972 if fragment_start > old_fragments.start().visible {
973 let fragment_end = old_fragments.end().visible;
974 if fragment_end > fragment_start {
975 let mut suffix = old_fragments.item().unwrap().clone();
976 suffix.len = fragment_end - fragment_start;
977 suffix.insertion_offset += fragment_start - old_fragments.start().visible;
978 new_insertions.push(InsertionFragment::insert_new(&suffix));
979 new_ropes.push_fragment(&suffix, suffix.visible);
980 new_fragments.push(suffix, &None);
981 }
982 old_fragments.next();
983 }
984
985 let suffix = old_fragments.suffix();
986 new_ropes.append(suffix.summary().text);
987 new_fragments.append(suffix, &None);
988 let (visible_text, deleted_text) = new_ropes.finish();
989 drop(old_fragments);
990
991 self.snapshot.fragments = new_fragments;
992 self.snapshot.insertions.edit(new_insertions, ());
993 self.snapshot.visible_text = visible_text;
994 self.snapshot.deleted_text = deleted_text;
995 self.subscriptions.publish_mut(&edits_patch);
996 self.snapshot.insertion_slices.extend(insertion_slices);
997 edit_op
998 }
999
1000 pub fn set_line_ending(&mut self, line_ending: LineEnding) {
1001 self.snapshot.line_ending = line_ending;
1002 }
1003
1004 pub fn apply_ops<I: IntoIterator<Item = Operation>>(&mut self, ops: I) {
1005 let mut deferred_ops = Vec::new();
1006 for op in ops {
1007 self.history.push(op.clone());
1008 if self.can_apply_op(&op) {
1009 self.apply_op(op);
1010 } else {
1011 self.deferred_replicas.insert(op.replica_id());
1012 deferred_ops.push(op);
1013 }
1014 }
1015 self.deferred_ops.insert(deferred_ops);
1016 self.flush_deferred_ops();
1017 }
1018
1019 fn apply_op(&mut self, op: Operation) {
1020 match op {
1021 Operation::Edit(edit) => {
1022 if !self.version.observed(edit.timestamp) {
1023 self.apply_remote_edit(
1024 &edit.version,
1025 &edit.ranges,
1026 &edit.new_text,
1027 edit.timestamp,
1028 );
1029 self.snapshot.version.observe(edit.timestamp);
1030 self.lamport_clock.observe(edit.timestamp);
1031 self.resolve_edit(edit.timestamp);
1032 }
1033 }
1034 Operation::Undo(undo) => {
1035 if !self.version.observed(undo.timestamp) {
1036 self.apply_undo(&undo);
1037 self.snapshot.version.observe(undo.timestamp);
1038 self.lamport_clock.observe(undo.timestamp);
1039 }
1040 }
1041 }
1042 self.wait_for_version_txs.retain_mut(|(version, tx)| {
1043 if self.snapshot.version().observed_all(version) {
1044 tx.try_send(()).ok();
1045 false
1046 } else {
1047 true
1048 }
1049 });
1050 }
1051
1052 fn apply_remote_edit(
1053 &mut self,
1054 version: &clock::Global,
1055 ranges: &[Range<FullOffset>],
1056 new_text: &[Arc<str>],
1057 timestamp: clock::Lamport,
1058 ) {
1059 if ranges.is_empty() {
1060 return;
1061 }
1062
1063 let edits = ranges.iter().zip(new_text.iter());
1064 let mut edits_patch = Patch::default();
1065 let mut insertion_slices = Vec::new();
1066 let cx = Some(version.clone());
1067 let mut new_insertions = Vec::new();
1068 let mut insertion_offset = 0;
1069 let mut new_ropes =
1070 RopeBuilder::new(self.visible_text.cursor(0), self.deleted_text.cursor(0));
1071 let mut old_fragments = self
1072 .fragments
1073 .cursor::<Dimensions<VersionedFullOffset, usize>>(&cx);
1074 let mut new_fragments =
1075 old_fragments.slice(&VersionedFullOffset::Offset(ranges[0].start), Bias::Left);
1076 new_ropes.append(new_fragments.summary().text);
1077
1078 let mut fragment_start = old_fragments.start().0.full_offset();
1079 for (range, new_text) in edits {
1080 let fragment_end = old_fragments.end().0.full_offset();
1081
1082 // If the current fragment ends before this range, then jump ahead to the first fragment
1083 // that extends past the start of this range, reusing any intervening fragments.
1084 if fragment_end < range.start {
1085 // If the current fragment has been partially consumed, then consume the rest of it
1086 // and advance to the next fragment before slicing.
1087 if fragment_start > old_fragments.start().0.full_offset() {
1088 if fragment_end > fragment_start {
1089 let mut suffix = old_fragments.item().unwrap().clone();
1090 suffix.len = fragment_end.0 - fragment_start.0;
1091 suffix.insertion_offset +=
1092 fragment_start - old_fragments.start().0.full_offset();
1093 new_insertions.push(InsertionFragment::insert_new(&suffix));
1094 new_ropes.push_fragment(&suffix, suffix.visible);
1095 new_fragments.push(suffix, &None);
1096 }
1097 old_fragments.next();
1098 }
1099
1100 let slice =
1101 old_fragments.slice(&VersionedFullOffset::Offset(range.start), Bias::Left);
1102 new_ropes.append(slice.summary().text);
1103 new_fragments.append(slice, &None);
1104 fragment_start = old_fragments.start().0.full_offset();
1105 }
1106
1107 // If we are at the end of a non-concurrent fragment, advance to the next one.
1108 let fragment_end = old_fragments.end().0.full_offset();
1109 if fragment_end == range.start && fragment_end > fragment_start {
1110 let mut fragment = old_fragments.item().unwrap().clone();
1111 fragment.len = fragment_end.0 - fragment_start.0;
1112 fragment.insertion_offset += fragment_start - old_fragments.start().0.full_offset();
1113 new_insertions.push(InsertionFragment::insert_new(&fragment));
1114 new_ropes.push_fragment(&fragment, fragment.visible);
1115 new_fragments.push(fragment, &None);
1116 old_fragments.next();
1117 fragment_start = old_fragments.start().0.full_offset();
1118 }
1119
1120 // Skip over insertions that are concurrent to this edit, but have a lower lamport
1121 // timestamp.
1122 while let Some(fragment) = old_fragments.item() {
1123 if fragment_start == range.start && fragment.timestamp > timestamp {
1124 new_ropes.push_fragment(fragment, fragment.visible);
1125 new_fragments.push(fragment.clone(), &None);
1126 old_fragments.next();
1127 debug_assert_eq!(fragment_start, range.start);
1128 } else {
1129 break;
1130 }
1131 }
1132 debug_assert!(fragment_start <= range.start);
1133
1134 // Preserve any portion of the current fragment that precedes this range.
1135 if fragment_start < range.start {
1136 let mut prefix = old_fragments.item().unwrap().clone();
1137 prefix.len = range.start.0 - fragment_start.0;
1138 prefix.insertion_offset += fragment_start - old_fragments.start().0.full_offset();
1139 prefix.id = Locator::between(&new_fragments.summary().max_id, &prefix.id);
1140 new_insertions.push(InsertionFragment::insert_new(&prefix));
1141 fragment_start = range.start;
1142 new_ropes.push_fragment(&prefix, prefix.visible);
1143 new_fragments.push(prefix, &None);
1144 }
1145
1146 // Insert the new text before any existing fragments within the range.
1147 if !new_text.is_empty() {
1148 let mut old_start = old_fragments.start().1;
1149 if old_fragments.item().is_some_and(|f| f.visible) {
1150 old_start += fragment_start.0 - old_fragments.start().0.full_offset().0;
1151 }
1152 let new_start = new_fragments.summary().text.visible;
1153 let fragment = Fragment {
1154 id: Locator::between(
1155 &new_fragments.summary().max_id,
1156 old_fragments
1157 .item()
1158 .map_or(&Locator::max(), |old_fragment| &old_fragment.id),
1159 ),
1160 timestamp,
1161 insertion_offset,
1162 len: new_text.len(),
1163 deletions: Default::default(),
1164 max_undos: Default::default(),
1165 visible: true,
1166 };
1167 edits_patch.push(Edit {
1168 old: old_start..old_start,
1169 new: new_start..new_start + new_text.len(),
1170 });
1171 insertion_slices.push(InsertionSlice::from_fragment(timestamp, &fragment));
1172 new_insertions.push(InsertionFragment::insert_new(&fragment));
1173 new_ropes.push_str(new_text);
1174 new_fragments.push(fragment, &None);
1175 insertion_offset += new_text.len();
1176 }
1177
1178 // Advance through every fragment that intersects this range, marking the intersecting
1179 // portions as deleted.
1180 while fragment_start < range.end {
1181 let fragment = old_fragments.item().unwrap();
1182 let fragment_end = old_fragments.end().0.full_offset();
1183 let mut intersection = fragment.clone();
1184 let intersection_end = cmp::min(range.end, fragment_end);
1185 if fragment.was_visible(version, &self.undo_map) {
1186 intersection.len = intersection_end.0 - fragment_start.0;
1187 intersection.insertion_offset +=
1188 fragment_start - old_fragments.start().0.full_offset();
1189 intersection.id =
1190 Locator::between(&new_fragments.summary().max_id, &intersection.id);
1191 intersection.deletions.insert(timestamp);
1192 intersection.visible = false;
1193 insertion_slices.push(InsertionSlice::from_fragment(timestamp, &intersection));
1194 }
1195 if intersection.len > 0 {
1196 if fragment.visible && !intersection.visible {
1197 let old_start = old_fragments.start().1
1198 + (fragment_start.0 - old_fragments.start().0.full_offset().0);
1199 let new_start = new_fragments.summary().text.visible;
1200 edits_patch.push(Edit {
1201 old: old_start..old_start + intersection.len,
1202 new: new_start..new_start,
1203 });
1204 }
1205 new_insertions.push(InsertionFragment::insert_new(&intersection));
1206 new_ropes.push_fragment(&intersection, fragment.visible);
1207 new_fragments.push(intersection, &None);
1208 fragment_start = intersection_end;
1209 }
1210 if fragment_end <= range.end {
1211 old_fragments.next();
1212 }
1213 }
1214 }
1215
1216 // If the current fragment has been partially consumed, then consume the rest of it
1217 // and advance to the next fragment before slicing.
1218 if fragment_start > old_fragments.start().0.full_offset() {
1219 let fragment_end = old_fragments.end().0.full_offset();
1220 if fragment_end > fragment_start {
1221 let mut suffix = old_fragments.item().unwrap().clone();
1222 suffix.len = fragment_end.0 - fragment_start.0;
1223 suffix.insertion_offset += fragment_start - old_fragments.start().0.full_offset();
1224 new_insertions.push(InsertionFragment::insert_new(&suffix));
1225 new_ropes.push_fragment(&suffix, suffix.visible);
1226 new_fragments.push(suffix, &None);
1227 }
1228 old_fragments.next();
1229 }
1230
1231 let suffix = old_fragments.suffix();
1232 new_ropes.append(suffix.summary().text);
1233 new_fragments.append(suffix, &None);
1234 let (visible_text, deleted_text) = new_ropes.finish();
1235 drop(old_fragments);
1236
1237 self.snapshot.fragments = new_fragments;
1238 self.snapshot.visible_text = visible_text;
1239 self.snapshot.deleted_text = deleted_text;
1240 self.snapshot.insertions.edit(new_insertions, ());
1241 self.snapshot.insertion_slices.extend(insertion_slices);
1242 self.subscriptions.publish_mut(&edits_patch)
1243 }
1244
1245 fn fragment_ids_for_edits<'a>(
1246 &'a self,
1247 edit_ids: impl Iterator<Item = &'a clock::Lamport>,
1248 ) -> Vec<&'a Locator> {
1249 // Get all of the insertion slices changed by the given edits.
1250 let mut insertion_slices = Vec::new();
1251 for edit_id in edit_ids {
1252 let insertion_slice = InsertionSlice {
1253 edit_id: *edit_id,
1254 insertion_id: clock::Lamport::MIN,
1255 range: 0..0,
1256 };
1257 let slices = self
1258 .snapshot
1259 .insertion_slices
1260 .iter_from(&insertion_slice)
1261 .take_while(|slice| slice.edit_id == *edit_id);
1262 insertion_slices.extend(slices)
1263 }
1264 insertion_slices
1265 .sort_unstable_by_key(|s| (s.insertion_id, s.range.start, Reverse(s.range.end)));
1266
1267 // Get all of the fragments corresponding to these insertion slices.
1268 let mut fragment_ids = Vec::new();
1269 let mut insertions_cursor = self.insertions.cursor::<InsertionFragmentKey>(());
1270 for insertion_slice in &insertion_slices {
1271 if insertion_slice.insertion_id != insertions_cursor.start().timestamp
1272 || insertion_slice.range.start > insertions_cursor.start().split_offset
1273 {
1274 insertions_cursor.seek_forward(
1275 &InsertionFragmentKey {
1276 timestamp: insertion_slice.insertion_id,
1277 split_offset: insertion_slice.range.start,
1278 },
1279 Bias::Left,
1280 );
1281 }
1282 while let Some(item) = insertions_cursor.item() {
1283 if item.timestamp != insertion_slice.insertion_id
1284 || item.split_offset >= insertion_slice.range.end
1285 {
1286 break;
1287 }
1288 fragment_ids.push(&item.fragment_id);
1289 insertions_cursor.next();
1290 }
1291 }
1292 fragment_ids.sort_unstable();
1293 fragment_ids
1294 }
1295
1296 fn apply_undo(&mut self, undo: &UndoOperation) {
1297 self.snapshot.undo_map.insert(undo);
1298
1299 let mut edits = Patch::default();
1300 let mut old_fragments = self
1301 .fragments
1302 .cursor::<Dimensions<Option<&Locator>, usize>>(&None);
1303 let mut new_fragments = SumTree::new(&None);
1304 let mut new_ropes =
1305 RopeBuilder::new(self.visible_text.cursor(0), self.deleted_text.cursor(0));
1306
1307 for fragment_id in self.fragment_ids_for_edits(undo.counts.keys()) {
1308 let preceding_fragments = old_fragments.slice(&Some(fragment_id), Bias::Left);
1309 new_ropes.append(preceding_fragments.summary().text);
1310 new_fragments.append(preceding_fragments, &None);
1311
1312 if let Some(fragment) = old_fragments.item() {
1313 let mut fragment = fragment.clone();
1314 let fragment_was_visible = fragment.visible;
1315
1316 fragment.visible = fragment.is_visible(&self.undo_map);
1317 fragment.max_undos.observe(undo.timestamp);
1318
1319 let old_start = old_fragments.start().1;
1320 let new_start = new_fragments.summary().text.visible;
1321 if fragment_was_visible && !fragment.visible {
1322 edits.push(Edit {
1323 old: old_start..old_start + fragment.len,
1324 new: new_start..new_start,
1325 });
1326 } else if !fragment_was_visible && fragment.visible {
1327 edits.push(Edit {
1328 old: old_start..old_start,
1329 new: new_start..new_start + fragment.len,
1330 });
1331 }
1332 new_ropes.push_fragment(&fragment, fragment_was_visible);
1333 new_fragments.push(fragment, &None);
1334
1335 old_fragments.next();
1336 }
1337 }
1338
1339 let suffix = old_fragments.suffix();
1340 new_ropes.append(suffix.summary().text);
1341 new_fragments.append(suffix, &None);
1342
1343 drop(old_fragments);
1344 let (visible_text, deleted_text) = new_ropes.finish();
1345 self.snapshot.fragments = new_fragments;
1346 self.snapshot.visible_text = visible_text;
1347 self.snapshot.deleted_text = deleted_text;
1348 self.subscriptions.publish_mut(&edits);
1349 }
1350
1351 fn flush_deferred_ops(&mut self) {
1352 self.deferred_replicas.clear();
1353 let mut deferred_ops = Vec::new();
1354 for op in self.deferred_ops.drain().iter().cloned() {
1355 if self.can_apply_op(&op) {
1356 self.apply_op(op);
1357 } else {
1358 self.deferred_replicas.insert(op.replica_id());
1359 deferred_ops.push(op);
1360 }
1361 }
1362 self.deferred_ops.insert(deferred_ops);
1363 }
1364
1365 fn can_apply_op(&self, op: &Operation) -> bool {
1366 if self.deferred_replicas.contains(&op.replica_id()) {
1367 false
1368 } else {
1369 self.version.observed_all(match op {
1370 Operation::Edit(edit) => &edit.version,
1371 Operation::Undo(undo) => &undo.version,
1372 })
1373 }
1374 }
1375
1376 pub fn has_deferred_ops(&self) -> bool {
1377 !self.deferred_ops.is_empty()
1378 }
1379
1380 pub fn peek_undo_stack(&self) -> Option<&HistoryEntry> {
1381 self.history.undo_stack.last()
1382 }
1383
1384 pub fn peek_redo_stack(&self) -> Option<&HistoryEntry> {
1385 self.history.redo_stack.last()
1386 }
1387
1388 pub fn start_transaction(&mut self) -> Option<TransactionId> {
1389 self.start_transaction_at(Instant::now())
1390 }
1391
1392 pub fn start_transaction_at(&mut self, now: Instant) -> Option<TransactionId> {
1393 self.history
1394 .start_transaction(self.version.clone(), now, &mut self.lamport_clock)
1395 }
1396
1397 pub fn end_transaction(&mut self) -> Option<(TransactionId, clock::Global)> {
1398 self.end_transaction_at(Instant::now())
1399 }
1400
1401 pub fn end_transaction_at(&mut self, now: Instant) -> Option<(TransactionId, clock::Global)> {
1402 if let Some(entry) = self.history.end_transaction(now) {
1403 let since = entry.transaction.start.clone();
1404 let id = self.history.group().unwrap();
1405 Some((id, since))
1406 } else {
1407 None
1408 }
1409 }
1410
1411 pub fn finalize_last_transaction(&mut self) -> Option<&Transaction> {
1412 self.history.finalize_last_transaction()
1413 }
1414
1415 pub fn group_until_transaction(&mut self, transaction_id: TransactionId) {
1416 self.history.group_until(transaction_id);
1417 }
1418
1419 pub fn base_text(&self) -> &Rope {
1420 &self.history.base_text
1421 }
1422
1423 pub fn operations(&self) -> &TreeMap<clock::Lamport, Operation> {
1424 &self.history.operations
1425 }
1426
1427 pub fn undo(&mut self) -> Option<(TransactionId, Operation)> {
1428 if let Some(entry) = self.history.pop_undo() {
1429 let transaction = entry.transaction.clone();
1430 let transaction_id = transaction.id;
1431 let op = self.undo_or_redo(transaction);
1432 Some((transaction_id, op))
1433 } else {
1434 None
1435 }
1436 }
1437
1438 pub fn undo_transaction(&mut self, transaction_id: TransactionId) -> Option<Operation> {
1439 let transaction = self
1440 .history
1441 .remove_from_undo(transaction_id)?
1442 .transaction
1443 .clone();
1444 Some(self.undo_or_redo(transaction))
1445 }
1446
1447 pub fn undo_to_transaction(&mut self, transaction_id: TransactionId) -> Vec<Operation> {
1448 let transactions = self
1449 .history
1450 .remove_from_undo_until(transaction_id)
1451 .iter()
1452 .map(|entry| entry.transaction.clone())
1453 .collect::<Vec<_>>();
1454
1455 transactions
1456 .into_iter()
1457 .map(|transaction| self.undo_or_redo(transaction))
1458 .collect()
1459 }
1460
1461 pub fn forget_transaction(&mut self, transaction_id: TransactionId) -> Option<Transaction> {
1462 self.history.forget(transaction_id)
1463 }
1464
1465 pub fn get_transaction(&self, transaction_id: TransactionId) -> Option<&Transaction> {
1466 self.history.transaction(transaction_id)
1467 }
1468
1469 pub fn merge_transactions(&mut self, transaction: TransactionId, destination: TransactionId) {
1470 self.history.merge_transactions(transaction, destination);
1471 }
1472
1473 pub fn redo(&mut self) -> Option<(TransactionId, Operation)> {
1474 if let Some(entry) = self.history.pop_redo() {
1475 let transaction = entry.transaction.clone();
1476 let transaction_id = transaction.id;
1477 let op = self.undo_or_redo(transaction);
1478 Some((transaction_id, op))
1479 } else {
1480 None
1481 }
1482 }
1483
1484 pub fn redo_to_transaction(&mut self, transaction_id: TransactionId) -> Vec<Operation> {
1485 let transactions = self
1486 .history
1487 .remove_from_redo(transaction_id)
1488 .iter()
1489 .map(|entry| entry.transaction.clone())
1490 .collect::<Vec<_>>();
1491
1492 transactions
1493 .into_iter()
1494 .map(|transaction| self.undo_or_redo(transaction))
1495 .collect()
1496 }
1497
1498 fn undo_or_redo(&mut self, transaction: Transaction) -> Operation {
1499 let mut counts = HashMap::default();
1500 for edit_id in transaction.edit_ids {
1501 counts.insert(edit_id, self.undo_map.undo_count(edit_id).saturating_add(1));
1502 }
1503
1504 let operation = self.undo_operations(counts);
1505 self.history.push(operation.clone());
1506 operation
1507 }
1508
1509 pub fn undo_operations(&mut self, counts: HashMap<clock::Lamport, u32>) -> Operation {
1510 let timestamp = self.lamport_clock.tick();
1511 let version = self.version();
1512 self.snapshot.version.observe(timestamp);
1513 let undo = UndoOperation {
1514 timestamp,
1515 version,
1516 counts,
1517 };
1518 self.apply_undo(&undo);
1519 Operation::Undo(undo)
1520 }
1521
1522 pub fn push_transaction(&mut self, transaction: Transaction, now: Instant) {
1523 self.history.push_transaction(transaction, now);
1524 }
1525
1526 /// Differs from `push_transaction` in that it does not clear the redo stack.
1527 /// The caller responsible for
1528 /// Differs from `push_transaction` in that it does not clear the redo
1529 /// stack. Intended to be used to create a parent transaction to merge
1530 /// potential child transactions into.
1531 ///
1532 /// The caller is responsible for removing it from the undo history using
1533 /// `forget_transaction` if no edits are merged into it. Otherwise, if edits
1534 /// are merged into this transaction, the caller is responsible for ensuring
1535 /// the redo stack is cleared. The easiest way to ensure the redo stack is
1536 /// cleared is to create transactions with the usual `start_transaction` and
1537 /// `end_transaction` methods and merging the resulting transactions into
1538 /// the transaction created by this method
1539 pub fn push_empty_transaction(&mut self, now: Instant) -> TransactionId {
1540 self.history
1541 .push_empty_transaction(self.version.clone(), now, &mut self.lamport_clock)
1542 }
1543
1544 pub fn edited_ranges_for_transaction_id<D>(
1545 &self,
1546 transaction_id: TransactionId,
1547 ) -> impl '_ + Iterator<Item = Range<D>>
1548 where
1549 D: TextDimension,
1550 {
1551 self.history
1552 .transaction(transaction_id)
1553 .into_iter()
1554 .flat_map(|transaction| self.edited_ranges_for_transaction(transaction))
1555 }
1556
1557 pub fn edited_ranges_for_edit_ids<'a, D>(
1558 &'a self,
1559 edit_ids: impl IntoIterator<Item = &'a clock::Lamport>,
1560 ) -> impl 'a + Iterator<Item = Range<D>>
1561 where
1562 D: TextDimension,
1563 {
1564 // get fragment ranges
1565 let mut cursor = self
1566 .fragments
1567 .cursor::<Dimensions<Option<&Locator>, usize>>(&None);
1568 let offset_ranges = self
1569 .fragment_ids_for_edits(edit_ids.into_iter())
1570 .into_iter()
1571 .filter_map(move |fragment_id| {
1572 cursor.seek_forward(&Some(fragment_id), Bias::Left);
1573 let fragment = cursor.item()?;
1574 let start_offset = cursor.start().1;
1575 let end_offset = start_offset + if fragment.visible { fragment.len } else { 0 };
1576 Some(start_offset..end_offset)
1577 });
1578
1579 // combine adjacent ranges
1580 let mut prev_range: Option<Range<usize>> = None;
1581 let disjoint_ranges = offset_ranges
1582 .map(Some)
1583 .chain([None])
1584 .filter_map(move |range| {
1585 if let Some((range, prev_range)) = range.as_ref().zip(prev_range.as_mut())
1586 && prev_range.end == range.start
1587 {
1588 prev_range.end = range.end;
1589 return None;
1590 }
1591 let result = prev_range.clone();
1592 prev_range = range;
1593 result
1594 });
1595
1596 // convert to the desired text dimension.
1597 let mut position = D::zero(());
1598 let mut rope_cursor = self.visible_text.cursor(0);
1599 disjoint_ranges.map(move |range| {
1600 position.add_assign(&rope_cursor.summary(range.start));
1601 let start = position;
1602 position.add_assign(&rope_cursor.summary(range.end));
1603 let end = position;
1604 start..end
1605 })
1606 }
1607
1608 pub fn edited_ranges_for_transaction<'a, D>(
1609 &'a self,
1610 transaction: &'a Transaction,
1611 ) -> impl 'a + Iterator<Item = Range<D>>
1612 where
1613 D: TextDimension,
1614 {
1615 self.edited_ranges_for_edit_ids(&transaction.edit_ids)
1616 }
1617
1618 pub fn subscribe(&mut self) -> Subscription {
1619 self.subscriptions.subscribe()
1620 }
1621
1622 pub fn wait_for_edits<It: IntoIterator<Item = clock::Lamport>>(
1623 &mut self,
1624 edit_ids: It,
1625 ) -> impl 'static + Future<Output = Result<()>> + use<It> {
1626 let mut futures = Vec::new();
1627 for edit_id in edit_ids {
1628 if !self.version.observed(edit_id) {
1629 let (tx, rx) = oneshot::channel();
1630 self.edit_id_resolvers.entry(edit_id).or_default().push(tx);
1631 futures.push(rx);
1632 }
1633 }
1634
1635 async move {
1636 for mut future in futures {
1637 if future.recv().await.is_none() {
1638 anyhow::bail!("gave up waiting for edits");
1639 }
1640 }
1641 Ok(())
1642 }
1643 }
1644
1645 pub fn wait_for_anchors<It: IntoIterator<Item = Anchor>>(
1646 &mut self,
1647 anchors: It,
1648 ) -> impl 'static + Future<Output = Result<()>> + use<It> {
1649 let mut futures = Vec::new();
1650 for anchor in anchors {
1651 if !self.version.observed(anchor.timestamp)
1652 && anchor != Anchor::MAX
1653 && anchor != Anchor::MIN
1654 {
1655 let (tx, rx) = oneshot::channel();
1656 self.edit_id_resolvers
1657 .entry(anchor.timestamp)
1658 .or_default()
1659 .push(tx);
1660 futures.push(rx);
1661 }
1662 }
1663
1664 async move {
1665 for mut future in futures {
1666 if future.recv().await.is_none() {
1667 anyhow::bail!("gave up waiting for anchors");
1668 }
1669 }
1670 Ok(())
1671 }
1672 }
1673
1674 pub fn wait_for_version(
1675 &mut self,
1676 version: clock::Global,
1677 ) -> impl Future<Output = Result<()>> + use<> {
1678 let mut rx = None;
1679 if !self.snapshot.version.observed_all(&version) {
1680 let channel = oneshot::channel();
1681 self.wait_for_version_txs.push((version, channel.0));
1682 rx = Some(channel.1);
1683 }
1684 async move {
1685 if let Some(mut rx) = rx
1686 && rx.recv().await.is_none()
1687 {
1688 anyhow::bail!("gave up waiting for version");
1689 }
1690 Ok(())
1691 }
1692 }
1693
1694 pub fn give_up_waiting(&mut self) {
1695 self.edit_id_resolvers.clear();
1696 self.wait_for_version_txs.clear();
1697 }
1698
1699 fn resolve_edit(&mut self, edit_id: clock::Lamport) {
1700 for mut tx in self
1701 .edit_id_resolvers
1702 .remove(&edit_id)
1703 .into_iter()
1704 .flatten()
1705 {
1706 tx.try_send(()).ok();
1707 }
1708 }
1709}
1710
1711#[cfg(any(test, feature = "test-support"))]
1712impl Buffer {
1713 #[track_caller]
1714 pub fn edit_via_marked_text(&mut self, marked_string: &str) {
1715 let edits = self.edits_for_marked_text(marked_string);
1716 self.edit(edits);
1717 }
1718
1719 #[track_caller]
1720 pub fn edits_for_marked_text(&self, marked_string: &str) -> Vec<(Range<usize>, String)> {
1721 let old_text = self.text();
1722 let (new_text, mut ranges) = util::test::marked_text_ranges(marked_string, false);
1723 if ranges.is_empty() {
1724 ranges.push(0..new_text.len());
1725 }
1726
1727 assert_eq!(
1728 old_text[..ranges[0].start],
1729 new_text[..ranges[0].start],
1730 "invalid edit"
1731 );
1732
1733 let mut delta = 0;
1734 let mut edits = Vec::new();
1735 let mut ranges = ranges.into_iter().peekable();
1736
1737 while let Some(inserted_range) = ranges.next() {
1738 let new_start = inserted_range.start;
1739 let old_start = (new_start as isize - delta) as usize;
1740
1741 let following_text = if let Some(next_range) = ranges.peek() {
1742 &new_text[inserted_range.end..next_range.start]
1743 } else {
1744 &new_text[inserted_range.end..]
1745 };
1746
1747 let inserted_len = inserted_range.len();
1748 let deleted_len = old_text[old_start..]
1749 .find(following_text)
1750 .expect("invalid edit");
1751
1752 let old_range = old_start..old_start + deleted_len;
1753 edits.push((old_range, new_text[inserted_range].to_string()));
1754 delta += inserted_len as isize - deleted_len as isize;
1755 }
1756
1757 assert_eq!(
1758 old_text.len() as isize + delta,
1759 new_text.len() as isize,
1760 "invalid edit"
1761 );
1762
1763 edits
1764 }
1765
1766 pub fn check_invariants(&self) {
1767 // Ensure every fragment is ordered by locator in the fragment tree and corresponds
1768 // to an insertion fragment in the insertions tree.
1769 let mut prev_fragment_id = Locator::min();
1770 for fragment in self.snapshot.fragments.items(&None) {
1771 assert!(fragment.id > prev_fragment_id);
1772 prev_fragment_id = fragment.id.clone();
1773
1774 let insertion_fragment = self
1775 .snapshot
1776 .insertions
1777 .get(
1778 &InsertionFragmentKey {
1779 timestamp: fragment.timestamp,
1780 split_offset: fragment.insertion_offset,
1781 },
1782 (),
1783 )
1784 .unwrap();
1785 assert_eq!(
1786 insertion_fragment.fragment_id, fragment.id,
1787 "fragment: {:?}\ninsertion: {:?}",
1788 fragment, insertion_fragment
1789 );
1790 }
1791
1792 let mut cursor = self.snapshot.fragments.cursor::<Option<&Locator>>(&None);
1793 for insertion_fragment in self.snapshot.insertions.cursor::<()>(()) {
1794 cursor.seek(&Some(&insertion_fragment.fragment_id), Bias::Left);
1795 let fragment = cursor.item().unwrap();
1796 assert_eq!(insertion_fragment.fragment_id, fragment.id);
1797 assert_eq!(insertion_fragment.split_offset, fragment.insertion_offset);
1798 }
1799
1800 let fragment_summary = self.snapshot.fragments.summary();
1801 assert_eq!(
1802 fragment_summary.text.visible,
1803 self.snapshot.visible_text.len()
1804 );
1805 assert_eq!(
1806 fragment_summary.text.deleted,
1807 self.snapshot.deleted_text.len()
1808 );
1809
1810 assert!(!self.text().contains("\r\n"));
1811 }
1812
1813 pub fn set_group_interval(&mut self, group_interval: Duration) {
1814 self.history.group_interval = group_interval;
1815 }
1816
1817 pub fn random_byte_range(&self, start_offset: usize, rng: &mut impl rand::Rng) -> Range<usize> {
1818 let end = self.clip_offset(rng.random_range(start_offset..=self.len()), Bias::Right);
1819 let start = self.clip_offset(rng.random_range(start_offset..=end), Bias::Right);
1820 start..end
1821 }
1822
1823 pub fn get_random_edits<T>(
1824 &self,
1825 rng: &mut T,
1826 edit_count: usize,
1827 ) -> Vec<(Range<usize>, Arc<str>)>
1828 where
1829 T: rand::Rng,
1830 {
1831 let mut edits: Vec<(Range<usize>, Arc<str>)> = Vec::new();
1832 let mut last_end = None;
1833 for _ in 0..edit_count {
1834 if last_end.is_some_and(|last_end| last_end >= self.len()) {
1835 break;
1836 }
1837 let new_start = last_end.map_or(0, |last_end| last_end + 1);
1838 let range = self.random_byte_range(new_start, rng);
1839 last_end = Some(range.end);
1840
1841 let new_text_len = rng.random_range(0..10);
1842 let new_text: String = RandomCharIter::new(&mut *rng).take(new_text_len).collect();
1843
1844 edits.push((range, new_text.into()));
1845 }
1846 edits
1847 }
1848
1849 pub fn randomly_edit<T>(
1850 &mut self,
1851 rng: &mut T,
1852 edit_count: usize,
1853 ) -> (Vec<(Range<usize>, Arc<str>)>, Operation)
1854 where
1855 T: rand::Rng,
1856 {
1857 let mut edits = self.get_random_edits(rng, edit_count);
1858 log::info!("mutating buffer {:?} with {:?}", self.replica_id, edits);
1859
1860 let op = self.edit(edits.iter().cloned());
1861 if let Operation::Edit(edit) = &op {
1862 assert_eq!(edits.len(), edit.new_text.len());
1863 for (edit, new_text) in edits.iter_mut().zip(&edit.new_text) {
1864 edit.1 = new_text.clone();
1865 }
1866 } else {
1867 unreachable!()
1868 }
1869
1870 (edits, op)
1871 }
1872
1873 pub fn randomly_undo_redo(&mut self, rng: &mut impl rand::Rng) -> Vec<Operation> {
1874 use rand::prelude::*;
1875
1876 let mut ops = Vec::new();
1877 for _ in 0..rng.random_range(1..=5) {
1878 if let Some(entry) = self.history.undo_stack.choose(rng) {
1879 let transaction = entry.transaction.clone();
1880 log::info!(
1881 "undoing buffer {:?} transaction {:?}",
1882 self.replica_id,
1883 transaction
1884 );
1885 ops.push(self.undo_or_redo(transaction));
1886 }
1887 }
1888 ops
1889 }
1890}
1891
1892impl Deref for Buffer {
1893 type Target = BufferSnapshot;
1894
1895 fn deref(&self) -> &Self::Target {
1896 &self.snapshot
1897 }
1898}
1899
1900impl BufferSnapshot {
1901 pub fn as_rope(&self) -> &Rope {
1902 &self.visible_text
1903 }
1904
1905 pub fn rope_for_version(&self, version: &clock::Global) -> Rope {
1906 let mut rope = Rope::new();
1907
1908 let mut cursor = self
1909 .fragments
1910 .filter::<_, FragmentTextSummary>(&None, move |summary| {
1911 !version.observed_all(&summary.max_version)
1912 });
1913 cursor.next();
1914
1915 let mut visible_cursor = self.visible_text.cursor(0);
1916 let mut deleted_cursor = self.deleted_text.cursor(0);
1917
1918 while let Some(fragment) = cursor.item() {
1919 if cursor.start().visible > visible_cursor.offset() {
1920 let text = visible_cursor.slice(cursor.start().visible);
1921 rope.append(text);
1922 }
1923
1924 if fragment.was_visible(version, &self.undo_map) {
1925 if fragment.visible {
1926 let text = visible_cursor.slice(cursor.end().visible);
1927 rope.append(text);
1928 } else {
1929 deleted_cursor.seek_forward(cursor.start().deleted);
1930 let text = deleted_cursor.slice(cursor.end().deleted);
1931 rope.append(text);
1932 }
1933 } else if fragment.visible {
1934 visible_cursor.seek_forward(cursor.end().visible);
1935 }
1936
1937 cursor.next();
1938 }
1939
1940 if cursor.start().visible > visible_cursor.offset() {
1941 let text = visible_cursor.slice(cursor.start().visible);
1942 rope.append(text);
1943 }
1944
1945 rope
1946 }
1947
1948 pub fn remote_id(&self) -> BufferId {
1949 self.remote_id
1950 }
1951
1952 pub fn replica_id(&self) -> ReplicaId {
1953 self.replica_id
1954 }
1955
1956 pub fn row_count(&self) -> u32 {
1957 self.max_point().row + 1
1958 }
1959
1960 pub fn len(&self) -> usize {
1961 self.visible_text.len()
1962 }
1963
1964 pub fn is_empty(&self) -> bool {
1965 self.len() == 0
1966 }
1967
1968 pub fn chars(&self) -> impl Iterator<Item = char> + '_ {
1969 self.chars_at(0)
1970 }
1971
1972 pub fn chars_for_range<T: ToOffset>(&self, range: Range<T>) -> impl Iterator<Item = char> + '_ {
1973 self.text_for_range(range).flat_map(str::chars)
1974 }
1975
1976 pub fn reversed_chars_for_range<T: ToOffset>(
1977 &self,
1978 range: Range<T>,
1979 ) -> impl Iterator<Item = char> + '_ {
1980 self.reversed_chunks_in_range(range)
1981 .flat_map(|chunk| chunk.chars().rev())
1982 }
1983
1984 pub fn contains_str_at<T>(&self, position: T, needle: &str) -> bool
1985 where
1986 T: ToOffset,
1987 {
1988 let position = position.to_offset(self);
1989 position == self.clip_offset(position, Bias::Left)
1990 && self
1991 .bytes_in_range(position..self.len())
1992 .flatten()
1993 .copied()
1994 .take(needle.len())
1995 .eq(needle.bytes())
1996 }
1997
1998 pub fn common_prefix_at<T>(&self, position: T, needle: &str) -> Range<T>
1999 where
2000 T: ToOffset + TextDimension,
2001 {
2002 let offset = position.to_offset(self);
2003 let common_prefix_len = needle
2004 .char_indices()
2005 .map(|(index, _)| index)
2006 .chain([needle.len()])
2007 .take_while(|&len| len <= offset)
2008 .filter(|&len| {
2009 let left = self
2010 .chars_for_range(offset - len..offset)
2011 .flat_map(char::to_lowercase);
2012 let right = needle[..len].chars().flat_map(char::to_lowercase);
2013 left.eq(right)
2014 })
2015 .last()
2016 .unwrap_or(0);
2017 let start_offset = offset - common_prefix_len;
2018 let start = self.text_summary_for_range(0..start_offset);
2019 start..position
2020 }
2021
2022 pub fn text(&self) -> String {
2023 self.visible_text.to_string()
2024 }
2025
2026 pub fn line_ending(&self) -> LineEnding {
2027 self.line_ending
2028 }
2029
2030 pub fn deleted_text(&self) -> String {
2031 self.deleted_text.to_string()
2032 }
2033
2034 pub fn fragments(&self) -> impl Iterator<Item = &Fragment> {
2035 self.fragments.iter()
2036 }
2037
2038 pub fn text_summary(&self) -> TextSummary {
2039 self.visible_text.summary()
2040 }
2041
2042 pub fn max_point(&self) -> Point {
2043 self.visible_text.max_point()
2044 }
2045
2046 pub fn max_point_utf16(&self) -> PointUtf16 {
2047 self.visible_text.max_point_utf16()
2048 }
2049
2050 pub fn point_to_offset(&self, point: Point) -> usize {
2051 self.visible_text.point_to_offset(point)
2052 }
2053
2054 pub fn point_utf16_to_offset(&self, point: PointUtf16) -> usize {
2055 self.visible_text.point_utf16_to_offset(point)
2056 }
2057
2058 pub fn unclipped_point_utf16_to_offset(&self, point: Unclipped<PointUtf16>) -> usize {
2059 self.visible_text.unclipped_point_utf16_to_offset(point)
2060 }
2061
2062 pub fn unclipped_point_utf16_to_point(&self, point: Unclipped<PointUtf16>) -> Point {
2063 self.visible_text.unclipped_point_utf16_to_point(point)
2064 }
2065
2066 pub fn offset_utf16_to_offset(&self, offset: OffsetUtf16) -> usize {
2067 self.visible_text.offset_utf16_to_offset(offset)
2068 }
2069
2070 pub fn offset_to_offset_utf16(&self, offset: usize) -> OffsetUtf16 {
2071 self.visible_text.offset_to_offset_utf16(offset)
2072 }
2073
2074 pub fn offset_to_point(&self, offset: usize) -> Point {
2075 self.visible_text.offset_to_point(offset)
2076 }
2077
2078 pub fn offset_to_point_utf16(&self, offset: usize) -> PointUtf16 {
2079 self.visible_text.offset_to_point_utf16(offset)
2080 }
2081
2082 pub fn point_to_point_utf16(&self, point: Point) -> PointUtf16 {
2083 self.visible_text.point_to_point_utf16(point)
2084 }
2085
2086 pub fn version(&self) -> &clock::Global {
2087 &self.version
2088 }
2089
2090 pub fn chars_at<T: ToOffset>(&self, position: T) -> impl Iterator<Item = char> + '_ {
2091 let offset = position.to_offset(self);
2092 self.visible_text.chars_at(offset)
2093 }
2094
2095 pub fn reversed_chars_at<T: ToOffset>(&self, position: T) -> impl Iterator<Item = char> + '_ {
2096 let offset = position.to_offset(self);
2097 self.visible_text.reversed_chars_at(offset)
2098 }
2099
2100 pub fn reversed_chunks_in_range<T: ToOffset>(&self, range: Range<T>) -> rope::Chunks<'_> {
2101 let range = range.start.to_offset(self)..range.end.to_offset(self);
2102 self.visible_text.reversed_chunks_in_range(range)
2103 }
2104
2105 pub fn bytes_in_range<T: ToOffset>(&self, range: Range<T>) -> rope::Bytes<'_> {
2106 let start = range.start.to_offset(self);
2107 let end = range.end.to_offset(self);
2108 self.visible_text.bytes_in_range(start..end)
2109 }
2110
2111 pub fn reversed_bytes_in_range<T: ToOffset>(&self, range: Range<T>) -> rope::Bytes<'_> {
2112 let start = range.start.to_offset(self);
2113 let end = range.end.to_offset(self);
2114 self.visible_text.reversed_bytes_in_range(start..end)
2115 }
2116
2117 pub fn text_for_range<T: ToOffset>(&self, range: Range<T>) -> Chunks<'_> {
2118 let start = range.start.to_offset(self);
2119 let end = range.end.to_offset(self);
2120 self.visible_text.chunks_in_range(start..end)
2121 }
2122
2123 pub fn line_len(&self, row: u32) -> u32 {
2124 let row_start_offset = Point::new(row, 0).to_offset(self);
2125 let row_end_offset = if row >= self.max_point().row {
2126 self.len()
2127 } else {
2128 Point::new(row + 1, 0).to_previous_offset(self)
2129 };
2130 (row_end_offset - row_start_offset) as u32
2131 }
2132
2133 pub fn line_indents_in_row_range(
2134 &self,
2135 row_range: Range<u32>,
2136 ) -> impl Iterator<Item = (u32, LineIndent)> + '_ {
2137 let start = Point::new(row_range.start, 0).to_offset(self);
2138 let end = Point::new(row_range.end, self.line_len(row_range.end)).to_offset(self);
2139
2140 let mut chunks = self.as_rope().chunks_in_range(start..end);
2141 let mut row = row_range.start;
2142 let mut done = false;
2143 std::iter::from_fn(move || {
2144 if done {
2145 None
2146 } else {
2147 let indent = (row, LineIndent::from_chunks(&mut chunks));
2148 done = !chunks.next_line();
2149 row += 1;
2150 Some(indent)
2151 }
2152 })
2153 }
2154
2155 /// Returns the line indents in the given row range, exclusive of end row, in reversed order.
2156 pub fn reversed_line_indents_in_row_range(
2157 &self,
2158 row_range: Range<u32>,
2159 ) -> impl Iterator<Item = (u32, LineIndent)> + '_ {
2160 let start = Point::new(row_range.start, 0).to_offset(self);
2161
2162 let end_point;
2163 let end;
2164 if row_range.end > row_range.start {
2165 end_point = Point::new(row_range.end - 1, self.line_len(row_range.end - 1));
2166 end = end_point.to_offset(self);
2167 } else {
2168 end_point = Point::new(row_range.start, 0);
2169 end = start;
2170 };
2171
2172 let mut chunks = self.as_rope().chunks_in_range(start..end);
2173 // Move the cursor to the start of the last line if it's not empty.
2174 chunks.seek(end);
2175 if end_point.column > 0 {
2176 chunks.prev_line();
2177 }
2178
2179 let mut row = end_point.row;
2180 let mut done = false;
2181 std::iter::from_fn(move || {
2182 if done {
2183 None
2184 } else {
2185 let initial_offset = chunks.offset();
2186 let indent = (row, LineIndent::from_chunks(&mut chunks));
2187 if chunks.offset() > initial_offset {
2188 chunks.prev_line();
2189 }
2190 done = !chunks.prev_line();
2191 if !done {
2192 row -= 1;
2193 }
2194
2195 Some(indent)
2196 }
2197 })
2198 }
2199
2200 pub fn line_indent_for_row(&self, row: u32) -> LineIndent {
2201 LineIndent::from_iter(self.chars_at(Point::new(row, 0)))
2202 }
2203
2204 pub fn is_line_blank(&self, row: u32) -> bool {
2205 self.text_for_range(Point::new(row, 0)..Point::new(row, self.line_len(row)))
2206 .all(|chunk| chunk.matches(|c: char| !c.is_whitespace()).next().is_none())
2207 }
2208
2209 pub fn text_summary_for_range<D, O: ToOffset>(&self, range: Range<O>) -> D
2210 where
2211 D: TextDimension,
2212 {
2213 self.visible_text
2214 .cursor(range.start.to_offset(self))
2215 .summary(range.end.to_offset(self))
2216 }
2217
2218 pub fn summaries_for_anchors<'a, D, A>(&'a self, anchors: A) -> impl 'a + Iterator<Item = D>
2219 where
2220 D: 'a + TextDimension,
2221 A: 'a + IntoIterator<Item = &'a Anchor>,
2222 {
2223 let anchors = anchors.into_iter();
2224 self.summaries_for_anchors_with_payload::<D, _, ()>(anchors.map(|a| (a, ())))
2225 .map(|d| d.0)
2226 }
2227
2228 pub fn summaries_for_anchors_with_payload<'a, D, A, T>(
2229 &'a self,
2230 anchors: A,
2231 ) -> impl 'a + Iterator<Item = (D, T)>
2232 where
2233 D: 'a + TextDimension,
2234 A: 'a + IntoIterator<Item = (&'a Anchor, T)>,
2235 {
2236 let anchors = anchors.into_iter();
2237 let mut insertion_cursor = self.insertions.cursor::<InsertionFragmentKey>(());
2238 let mut fragment_cursor = self
2239 .fragments
2240 .cursor::<Dimensions<Option<&Locator>, usize>>(&None);
2241 let mut text_cursor = self.visible_text.cursor(0);
2242 let mut position = D::zero(());
2243
2244 anchors.map(move |(anchor, payload)| {
2245 if *anchor == Anchor::MIN {
2246 return (D::zero(()), payload);
2247 } else if *anchor == Anchor::MAX {
2248 return (D::from_text_summary(&self.visible_text.summary()), payload);
2249 }
2250
2251 let anchor_key = InsertionFragmentKey {
2252 timestamp: anchor.timestamp,
2253 split_offset: anchor.offset,
2254 };
2255 insertion_cursor.seek(&anchor_key, anchor.bias);
2256 if let Some(insertion) = insertion_cursor.item() {
2257 let comparison = sum_tree::KeyedItem::key(insertion).cmp(&anchor_key);
2258 if comparison == Ordering::Greater
2259 || (anchor.bias == Bias::Left
2260 && comparison == Ordering::Equal
2261 && anchor.offset > 0)
2262 {
2263 insertion_cursor.prev();
2264 }
2265 } else {
2266 insertion_cursor.prev();
2267 }
2268 let insertion = insertion_cursor.item().expect("invalid insertion");
2269 assert_eq!(insertion.timestamp, anchor.timestamp, "invalid insertion");
2270
2271 fragment_cursor.seek_forward(&Some(&insertion.fragment_id), Bias::Left);
2272 let fragment = fragment_cursor.item().unwrap();
2273 let mut fragment_offset = fragment_cursor.start().1;
2274 if fragment.visible {
2275 fragment_offset += anchor.offset - insertion.split_offset;
2276 }
2277
2278 position.add_assign(&text_cursor.summary(fragment_offset));
2279 (position, payload)
2280 })
2281 }
2282
2283 pub fn summary_for_anchor<D>(&self, anchor: &Anchor) -> D
2284 where
2285 D: TextDimension,
2286 {
2287 self.text_summary_for_range(0..self.offset_for_anchor(anchor))
2288 }
2289
2290 pub fn offset_for_anchor(&self, anchor: &Anchor) -> usize {
2291 if *anchor == Anchor::MIN {
2292 0
2293 } else if *anchor == Anchor::MAX {
2294 self.visible_text.len()
2295 } else {
2296 debug_assert!(anchor.buffer_id == Some(self.remote_id));
2297 let anchor_key = InsertionFragmentKey {
2298 timestamp: anchor.timestamp,
2299 split_offset: anchor.offset,
2300 };
2301 let mut insertion_cursor = self.insertions.cursor::<InsertionFragmentKey>(());
2302 insertion_cursor.seek(&anchor_key, anchor.bias);
2303 if let Some(insertion) = insertion_cursor.item() {
2304 let comparison = sum_tree::KeyedItem::key(insertion).cmp(&anchor_key);
2305 if comparison == Ordering::Greater
2306 || (anchor.bias == Bias::Left
2307 && comparison == Ordering::Equal
2308 && anchor.offset > 0)
2309 {
2310 insertion_cursor.prev();
2311 }
2312 } else {
2313 insertion_cursor.prev();
2314 }
2315
2316 let Some(insertion) = insertion_cursor
2317 .item()
2318 .filter(|insertion| insertion.timestamp == anchor.timestamp)
2319 else {
2320 panic!(
2321 "invalid anchor {:?}. buffer id: {}, version: {:?}",
2322 anchor, self.remote_id, self.version
2323 );
2324 };
2325
2326 let (start, _, item) = self
2327 .fragments
2328 .find::<Dimensions<Option<&Locator>, usize>, _>(
2329 &None,
2330 &Some(&insertion.fragment_id),
2331 Bias::Left,
2332 );
2333 let fragment = item.unwrap();
2334 let mut fragment_offset = start.1;
2335 if fragment.visible {
2336 fragment_offset += anchor.offset - insertion.split_offset;
2337 }
2338 fragment_offset
2339 }
2340 }
2341
2342 fn fragment_id_for_anchor(&self, anchor: &Anchor) -> &Locator {
2343 self.try_fragment_id_for_anchor(anchor).unwrap_or_else(|| {
2344 panic!(
2345 "invalid anchor {:?}. buffer id: {}, version: {:?}",
2346 anchor, self.remote_id, self.version,
2347 )
2348 })
2349 }
2350
2351 fn try_fragment_id_for_anchor(&self, anchor: &Anchor) -> Option<&Locator> {
2352 if *anchor == Anchor::MIN {
2353 Some(Locator::min_ref())
2354 } else if *anchor == Anchor::MAX {
2355 Some(Locator::max_ref())
2356 } else {
2357 let anchor_key = InsertionFragmentKey {
2358 timestamp: anchor.timestamp,
2359 split_offset: anchor.offset,
2360 };
2361 let mut insertion_cursor = self.insertions.cursor::<InsertionFragmentKey>(());
2362 insertion_cursor.seek(&anchor_key, anchor.bias);
2363 if let Some(insertion) = insertion_cursor.item() {
2364 let comparison = sum_tree::KeyedItem::key(insertion).cmp(&anchor_key);
2365 if comparison == Ordering::Greater
2366 || (anchor.bias == Bias::Left
2367 && comparison == Ordering::Equal
2368 && anchor.offset > 0)
2369 {
2370 insertion_cursor.prev();
2371 }
2372 } else {
2373 insertion_cursor.prev();
2374 }
2375
2376 insertion_cursor
2377 .item()
2378 .filter(|insertion| {
2379 !cfg!(debug_assertions) || insertion.timestamp == anchor.timestamp
2380 })
2381 .map(|insertion| &insertion.fragment_id)
2382 }
2383 }
2384
2385 pub fn anchor_before<T: ToOffset>(&self, position: T) -> Anchor {
2386 self.anchor_at(position, Bias::Left)
2387 }
2388
2389 pub fn anchor_after<T: ToOffset>(&self, position: T) -> Anchor {
2390 self.anchor_at(position, Bias::Right)
2391 }
2392
2393 pub fn anchor_at<T: ToOffset>(&self, position: T, bias: Bias) -> Anchor {
2394 self.anchor_at_offset(position.to_offset(self), bias)
2395 }
2396
2397 fn anchor_at_offset(&self, offset: usize, bias: Bias) -> Anchor {
2398 if bias == Bias::Left && offset == 0 {
2399 Anchor::MIN
2400 } else if bias == Bias::Right && offset == self.len() {
2401 Anchor::MAX
2402 } else {
2403 if offset > self.visible_text.len() {
2404 panic!("offset {} is out of bounds", offset)
2405 } else if !self.visible_text.is_char_boundary(offset) {
2406 // find the character
2407 let char_start = self.visible_text.floor_char_boundary(offset);
2408 // `char_start` must be less than len and a char boundary
2409 let ch = self.visible_text.chars_at(char_start).next().unwrap();
2410 let char_range = char_start..char_start + ch.len_utf8();
2411 panic!(
2412 "byte index {} is not a char boundary; it is inside {:?} (bytes {:?})",
2413 offset, ch, char_range,
2414 );
2415 }
2416 let (start, _, item) = self.fragments.find::<usize, _>(&None, &offset, bias);
2417 let fragment = item.unwrap();
2418 let overshoot = offset - start;
2419 Anchor {
2420 timestamp: fragment.timestamp,
2421 offset: fragment.insertion_offset + overshoot,
2422 bias,
2423 buffer_id: Some(self.remote_id),
2424 }
2425 }
2426 }
2427
2428 pub fn can_resolve(&self, anchor: &Anchor) -> bool {
2429 *anchor == Anchor::MIN
2430 || *anchor == Anchor::MAX
2431 || (Some(self.remote_id) == anchor.buffer_id && self.version.observed(anchor.timestamp))
2432 }
2433
2434 pub fn clip_offset(&self, offset: usize, bias: Bias) -> usize {
2435 self.visible_text.clip_offset(offset, bias)
2436 }
2437
2438 pub fn clip_point(&self, point: Point, bias: Bias) -> Point {
2439 self.visible_text.clip_point(point, bias)
2440 }
2441
2442 pub fn clip_offset_utf16(&self, offset: OffsetUtf16, bias: Bias) -> OffsetUtf16 {
2443 self.visible_text.clip_offset_utf16(offset, bias)
2444 }
2445
2446 pub fn clip_point_utf16(&self, point: Unclipped<PointUtf16>, bias: Bias) -> PointUtf16 {
2447 self.visible_text.clip_point_utf16(point, bias)
2448 }
2449
2450 pub fn edits_since<'a, D>(
2451 &'a self,
2452 since: &'a clock::Global,
2453 ) -> impl 'a + Iterator<Item = Edit<D>>
2454 where
2455 D: TextDimension + Ord,
2456 {
2457 self.edits_since_in_range(since, Anchor::MIN..Anchor::MAX)
2458 }
2459
2460 pub fn anchored_edits_since<'a, D>(
2461 &'a self,
2462 since: &'a clock::Global,
2463 ) -> impl 'a + Iterator<Item = (Edit<D>, Range<Anchor>)>
2464 where
2465 D: TextDimension + Ord,
2466 {
2467 self.anchored_edits_since_in_range(since, Anchor::MIN..Anchor::MAX)
2468 }
2469
2470 pub fn edits_since_in_range<'a, D>(
2471 &'a self,
2472 since: &'a clock::Global,
2473 range: Range<Anchor>,
2474 ) -> impl 'a + Iterator<Item = Edit<D>>
2475 where
2476 D: TextDimension + Ord,
2477 {
2478 self.anchored_edits_since_in_range(since, range)
2479 .map(|item| item.0)
2480 }
2481
2482 pub fn anchored_edits_since_in_range<'a, D>(
2483 &'a self,
2484 since: &'a clock::Global,
2485 range: Range<Anchor>,
2486 ) -> impl 'a + Iterator<Item = (Edit<D>, Range<Anchor>)>
2487 where
2488 D: TextDimension + Ord,
2489 {
2490 let fragments_cursor = if *since == self.version {
2491 None
2492 } else {
2493 let mut cursor = self.fragments.filter(&None, move |summary| {
2494 !since.observed_all(&summary.max_version)
2495 });
2496 cursor.next();
2497 Some(cursor)
2498 };
2499 let start_fragment_id = self.fragment_id_for_anchor(&range.start);
2500 let (start, _, item) = self
2501 .fragments
2502 .find::<Dimensions<Option<&Locator>, FragmentTextSummary>, _>(
2503 &None,
2504 &Some(start_fragment_id),
2505 Bias::Left,
2506 );
2507 let mut visible_start = start.1.visible;
2508 let mut deleted_start = start.1.deleted;
2509 if let Some(fragment) = item {
2510 let overshoot = range.start.offset - fragment.insertion_offset;
2511 if fragment.visible {
2512 visible_start += overshoot;
2513 } else {
2514 deleted_start += overshoot;
2515 }
2516 }
2517 let end_fragment_id = self.fragment_id_for_anchor(&range.end);
2518
2519 Edits {
2520 visible_cursor: self.visible_text.cursor(visible_start),
2521 deleted_cursor: self.deleted_text.cursor(deleted_start),
2522 fragments_cursor,
2523 undos: &self.undo_map,
2524 since,
2525 old_end: D::zero(()),
2526 new_end: D::zero(()),
2527 range: (start_fragment_id, range.start.offset)..(end_fragment_id, range.end.offset),
2528 buffer_id: self.remote_id,
2529 }
2530 }
2531
2532 pub fn has_edits_since_in_range(&self, since: &clock::Global, range: Range<Anchor>) -> bool {
2533 if *since != self.version {
2534 let start_fragment_id = self.fragment_id_for_anchor(&range.start);
2535 let end_fragment_id = self.fragment_id_for_anchor(&range.end);
2536 let mut cursor = self.fragments.filter::<_, usize>(&None, move |summary| {
2537 !since.observed_all(&summary.max_version)
2538 });
2539 cursor.next();
2540 while let Some(fragment) = cursor.item() {
2541 if fragment.id > *end_fragment_id {
2542 break;
2543 }
2544 if fragment.id > *start_fragment_id {
2545 let was_visible = fragment.was_visible(since, &self.undo_map);
2546 let is_visible = fragment.visible;
2547 if was_visible != is_visible {
2548 return true;
2549 }
2550 }
2551 cursor.next();
2552 }
2553 }
2554 false
2555 }
2556
2557 pub fn has_edits_since(&self, since: &clock::Global) -> bool {
2558 if *since != self.version {
2559 let mut cursor = self.fragments.filter::<_, usize>(&None, move |summary| {
2560 !since.observed_all(&summary.max_version)
2561 });
2562 cursor.next();
2563 while let Some(fragment) = cursor.item() {
2564 let was_visible = fragment.was_visible(since, &self.undo_map);
2565 let is_visible = fragment.visible;
2566 if was_visible != is_visible {
2567 return true;
2568 }
2569 cursor.next();
2570 }
2571 }
2572 false
2573 }
2574
2575 pub fn range_to_version(&self, range: Range<usize>, version: &clock::Global) -> Range<usize> {
2576 let mut offsets = self.offsets_to_version([range.start, range.end], version);
2577 offsets.next().unwrap()..offsets.next().unwrap()
2578 }
2579
2580 /// Converts the given sequence of offsets into their corresponding offsets
2581 /// at a prior version of this buffer.
2582 pub fn offsets_to_version<'a>(
2583 &'a self,
2584 offsets: impl 'a + IntoIterator<Item = usize>,
2585 version: &'a clock::Global,
2586 ) -> impl 'a + Iterator<Item = usize> {
2587 let mut edits = self.edits_since(version).peekable();
2588 let mut last_old_end = 0;
2589 let mut last_new_end = 0;
2590 offsets.into_iter().map(move |new_offset| {
2591 while let Some(edit) = edits.peek() {
2592 if edit.new.start > new_offset {
2593 break;
2594 }
2595
2596 if edit.new.end <= new_offset {
2597 last_new_end = edit.new.end;
2598 last_old_end = edit.old.end;
2599 edits.next();
2600 continue;
2601 }
2602
2603 let overshoot = new_offset - edit.new.start;
2604 return (edit.old.start + overshoot).min(edit.old.end);
2605 }
2606
2607 last_old_end + new_offset.saturating_sub(last_new_end)
2608 })
2609 }
2610
2611 /// Visually annotates a position or range with the `Debug` representation of a value. The
2612 /// callsite of this function is used as a key - previous annotations will be removed.
2613 #[cfg(debug_assertions)]
2614 #[track_caller]
2615 pub fn debug<R, V>(&self, ranges: &R, value: V)
2616 where
2617 R: debug::ToDebugRanges,
2618 V: std::fmt::Debug,
2619 {
2620 self.debug_with_key(std::panic::Location::caller(), ranges, value);
2621 }
2622
2623 /// Visually annotates a position or range with the `Debug` representation of a value. Previous
2624 /// debug annotations with the same key will be removed. The key is also used to determine the
2625 /// annotation's color.
2626 #[cfg(debug_assertions)]
2627 pub fn debug_with_key<K, R, V>(&self, key: &K, ranges: &R, value: V)
2628 where
2629 K: std::hash::Hash + 'static,
2630 R: debug::ToDebugRanges,
2631 V: std::fmt::Debug,
2632 {
2633 let ranges = ranges
2634 .to_debug_ranges(self)
2635 .into_iter()
2636 .map(|range| self.anchor_after(range.start)..self.anchor_before(range.end))
2637 .collect();
2638 debug::GlobalDebugRanges::with_locked(|debug_ranges| {
2639 debug_ranges.insert(key, ranges, format!("{value:?}").into());
2640 });
2641 }
2642}
2643
2644struct RopeBuilder<'a> {
2645 old_visible_cursor: rope::Cursor<'a>,
2646 old_deleted_cursor: rope::Cursor<'a>,
2647 new_visible: Rope,
2648 new_deleted: Rope,
2649}
2650
2651impl<'a> RopeBuilder<'a> {
2652 fn new(old_visible_cursor: rope::Cursor<'a>, old_deleted_cursor: rope::Cursor<'a>) -> Self {
2653 Self {
2654 old_visible_cursor,
2655 old_deleted_cursor,
2656 new_visible: Rope::new(),
2657 new_deleted: Rope::new(),
2658 }
2659 }
2660
2661 fn append(&mut self, len: FragmentTextSummary) {
2662 self.push(len.visible, true, true);
2663 self.push(len.deleted, false, false);
2664 }
2665
2666 fn push_fragment(&mut self, fragment: &Fragment, was_visible: bool) {
2667 debug_assert!(fragment.len > 0);
2668 self.push(fragment.len, was_visible, fragment.visible)
2669 }
2670
2671 fn push(&mut self, len: usize, was_visible: bool, is_visible: bool) {
2672 let text = if was_visible {
2673 self.old_visible_cursor
2674 .slice(self.old_visible_cursor.offset() + len)
2675 } else {
2676 self.old_deleted_cursor
2677 .slice(self.old_deleted_cursor.offset() + len)
2678 };
2679 if is_visible {
2680 self.new_visible.append(text);
2681 } else {
2682 self.new_deleted.append(text);
2683 }
2684 }
2685
2686 fn push_str(&mut self, text: &str) {
2687 self.new_visible.push(text);
2688 }
2689
2690 fn finish(mut self) -> (Rope, Rope) {
2691 self.new_visible.append(self.old_visible_cursor.suffix());
2692 self.new_deleted.append(self.old_deleted_cursor.suffix());
2693 (self.new_visible, self.new_deleted)
2694 }
2695}
2696
2697impl<D: TextDimension + Ord, F: FnMut(&FragmentSummary) -> bool> Iterator for Edits<'_, D, F> {
2698 type Item = (Edit<D>, Range<Anchor>);
2699
2700 fn next(&mut self) -> Option<Self::Item> {
2701 let mut pending_edit: Option<Self::Item> = None;
2702 let cursor = self.fragments_cursor.as_mut()?;
2703
2704 while let Some(fragment) = cursor.item() {
2705 if fragment.id < *self.range.start.0 {
2706 cursor.next();
2707 continue;
2708 } else if fragment.id > *self.range.end.0 {
2709 break;
2710 }
2711
2712 if cursor.start().visible > self.visible_cursor.offset() {
2713 let summary = self.visible_cursor.summary(cursor.start().visible);
2714 self.old_end.add_assign(&summary);
2715 self.new_end.add_assign(&summary);
2716 }
2717
2718 if pending_edit
2719 .as_ref()
2720 .is_some_and(|(change, _)| change.new.end < self.new_end)
2721 {
2722 break;
2723 }
2724
2725 let start_anchor = Anchor {
2726 timestamp: fragment.timestamp,
2727 offset: fragment.insertion_offset,
2728 bias: Bias::Right,
2729 buffer_id: Some(self.buffer_id),
2730 };
2731 let end_anchor = Anchor {
2732 timestamp: fragment.timestamp,
2733 offset: fragment.insertion_offset + fragment.len,
2734 bias: Bias::Left,
2735 buffer_id: Some(self.buffer_id),
2736 };
2737
2738 if !fragment.was_visible(self.since, self.undos) && fragment.visible {
2739 let mut visible_end = cursor.end().visible;
2740 if fragment.id == *self.range.end.0 {
2741 visible_end = cmp::min(
2742 visible_end,
2743 cursor.start().visible + (self.range.end.1 - fragment.insertion_offset),
2744 );
2745 }
2746
2747 let fragment_summary = self.visible_cursor.summary(visible_end);
2748 let mut new_end = self.new_end;
2749 new_end.add_assign(&fragment_summary);
2750 if let Some((edit, range)) = pending_edit.as_mut() {
2751 edit.new.end = new_end;
2752 range.end = end_anchor;
2753 } else {
2754 pending_edit = Some((
2755 Edit {
2756 old: self.old_end..self.old_end,
2757 new: self.new_end..new_end,
2758 },
2759 start_anchor..end_anchor,
2760 ));
2761 }
2762
2763 self.new_end = new_end;
2764 } else if fragment.was_visible(self.since, self.undos) && !fragment.visible {
2765 let mut deleted_end = cursor.end().deleted;
2766 if fragment.id == *self.range.end.0 {
2767 deleted_end = cmp::min(
2768 deleted_end,
2769 cursor.start().deleted + (self.range.end.1 - fragment.insertion_offset),
2770 );
2771 }
2772
2773 if cursor.start().deleted > self.deleted_cursor.offset() {
2774 self.deleted_cursor.seek_forward(cursor.start().deleted);
2775 }
2776 let fragment_summary = self.deleted_cursor.summary(deleted_end);
2777 let mut old_end = self.old_end;
2778 old_end.add_assign(&fragment_summary);
2779 if let Some((edit, range)) = pending_edit.as_mut() {
2780 edit.old.end = old_end;
2781 range.end = end_anchor;
2782 } else {
2783 pending_edit = Some((
2784 Edit {
2785 old: self.old_end..old_end,
2786 new: self.new_end..self.new_end,
2787 },
2788 start_anchor..end_anchor,
2789 ));
2790 }
2791
2792 self.old_end = old_end;
2793 }
2794
2795 cursor.next();
2796 }
2797
2798 pending_edit
2799 }
2800}
2801
2802impl Fragment {
2803 fn is_visible(&self, undos: &UndoMap) -> bool {
2804 !undos.is_undone(self.timestamp) && self.deletions.iter().all(|d| undos.is_undone(*d))
2805 }
2806
2807 fn was_visible(&self, version: &clock::Global, undos: &UndoMap) -> bool {
2808 (version.observed(self.timestamp) && !undos.was_undone(self.timestamp, version))
2809 && self
2810 .deletions
2811 .iter()
2812 .all(|d| !version.observed(*d) || undos.was_undone(*d, version))
2813 }
2814}
2815
2816impl sum_tree::Item for Fragment {
2817 type Summary = FragmentSummary;
2818
2819 fn summary(&self, _cx: &Option<clock::Global>) -> Self::Summary {
2820 let mut max_version = clock::Global::new();
2821 max_version.observe(self.timestamp);
2822 for deletion in &self.deletions {
2823 max_version.observe(*deletion);
2824 }
2825 max_version.join(&self.max_undos);
2826
2827 let mut min_insertion_version = clock::Global::new();
2828 min_insertion_version.observe(self.timestamp);
2829 let max_insertion_version = min_insertion_version.clone();
2830 if self.visible {
2831 FragmentSummary {
2832 max_id: self.id.clone(),
2833 text: FragmentTextSummary {
2834 visible: self.len,
2835 deleted: 0,
2836 },
2837 max_version,
2838 min_insertion_version,
2839 max_insertion_version,
2840 }
2841 } else {
2842 FragmentSummary {
2843 max_id: self.id.clone(),
2844 text: FragmentTextSummary {
2845 visible: 0,
2846 deleted: self.len,
2847 },
2848 max_version,
2849 min_insertion_version,
2850 max_insertion_version,
2851 }
2852 }
2853 }
2854}
2855
2856impl sum_tree::Summary for FragmentSummary {
2857 type Context<'a> = &'a Option<clock::Global>;
2858
2859 fn zero(_cx: Self::Context<'_>) -> Self {
2860 Default::default()
2861 }
2862
2863 fn add_summary(&mut self, other: &Self, _: Self::Context<'_>) {
2864 self.max_id.assign(&other.max_id);
2865 self.text.visible += &other.text.visible;
2866 self.text.deleted += &other.text.deleted;
2867 self.max_version.join(&other.max_version);
2868 self.min_insertion_version
2869 .meet(&other.min_insertion_version);
2870 self.max_insertion_version
2871 .join(&other.max_insertion_version);
2872 }
2873}
2874
2875impl Default for FragmentSummary {
2876 fn default() -> Self {
2877 FragmentSummary {
2878 max_id: Locator::min(),
2879 text: FragmentTextSummary::default(),
2880 max_version: clock::Global::new(),
2881 min_insertion_version: clock::Global::new(),
2882 max_insertion_version: clock::Global::new(),
2883 }
2884 }
2885}
2886
2887impl sum_tree::Item for InsertionFragment {
2888 type Summary = InsertionFragmentKey;
2889
2890 fn summary(&self, _cx: ()) -> Self::Summary {
2891 InsertionFragmentKey {
2892 timestamp: self.timestamp,
2893 split_offset: self.split_offset,
2894 }
2895 }
2896}
2897
2898impl sum_tree::KeyedItem for InsertionFragment {
2899 type Key = InsertionFragmentKey;
2900
2901 fn key(&self) -> Self::Key {
2902 sum_tree::Item::summary(self, ())
2903 }
2904}
2905
2906impl InsertionFragment {
2907 fn new(fragment: &Fragment) -> Self {
2908 Self {
2909 timestamp: fragment.timestamp,
2910 split_offset: fragment.insertion_offset,
2911 fragment_id: fragment.id.clone(),
2912 }
2913 }
2914
2915 fn insert_new(fragment: &Fragment) -> sum_tree::Edit<Self> {
2916 sum_tree::Edit::Insert(Self::new(fragment))
2917 }
2918}
2919
2920impl sum_tree::ContextLessSummary for InsertionFragmentKey {
2921 fn zero() -> Self {
2922 InsertionFragmentKey {
2923 timestamp: Lamport::MIN,
2924 split_offset: 0,
2925 }
2926 }
2927
2928 fn add_summary(&mut self, summary: &Self) {
2929 *self = *summary;
2930 }
2931}
2932
2933#[derive(Copy, Clone, Debug, Default, PartialEq, Eq, PartialOrd, Ord, Hash)]
2934pub struct FullOffset(pub usize);
2935
2936impl ops::AddAssign<usize> for FullOffset {
2937 fn add_assign(&mut self, rhs: usize) {
2938 self.0 += rhs;
2939 }
2940}
2941
2942impl ops::Add<usize> for FullOffset {
2943 type Output = Self;
2944
2945 fn add(mut self, rhs: usize) -> Self::Output {
2946 self += rhs;
2947 self
2948 }
2949}
2950
2951impl ops::Sub for FullOffset {
2952 type Output = usize;
2953
2954 fn sub(self, rhs: Self) -> Self::Output {
2955 self.0 - rhs.0
2956 }
2957}
2958
2959impl sum_tree::Dimension<'_, FragmentSummary> for usize {
2960 fn zero(_: &Option<clock::Global>) -> Self {
2961 Default::default()
2962 }
2963
2964 fn add_summary(&mut self, summary: &FragmentSummary, _: &Option<clock::Global>) {
2965 *self += summary.text.visible;
2966 }
2967}
2968
2969impl sum_tree::Dimension<'_, FragmentSummary> for FullOffset {
2970 fn zero(_: &Option<clock::Global>) -> Self {
2971 Default::default()
2972 }
2973
2974 fn add_summary(&mut self, summary: &FragmentSummary, _: &Option<clock::Global>) {
2975 self.0 += summary.text.visible + summary.text.deleted;
2976 }
2977}
2978
2979impl<'a> sum_tree::Dimension<'a, FragmentSummary> for Option<&'a Locator> {
2980 fn zero(_: &Option<clock::Global>) -> Self {
2981 Default::default()
2982 }
2983
2984 fn add_summary(&mut self, summary: &'a FragmentSummary, _: &Option<clock::Global>) {
2985 *self = Some(&summary.max_id);
2986 }
2987}
2988
2989impl sum_tree::SeekTarget<'_, FragmentSummary, FragmentTextSummary> for usize {
2990 fn cmp(
2991 &self,
2992 cursor_location: &FragmentTextSummary,
2993 _: &Option<clock::Global>,
2994 ) -> cmp::Ordering {
2995 Ord::cmp(self, &cursor_location.visible)
2996 }
2997}
2998
2999#[derive(Copy, Clone, Debug, Eq, PartialEq)]
3000enum VersionedFullOffset {
3001 Offset(FullOffset),
3002 Invalid,
3003}
3004
3005impl VersionedFullOffset {
3006 fn full_offset(&self) -> FullOffset {
3007 if let Self::Offset(position) = self {
3008 *position
3009 } else {
3010 panic!("invalid version")
3011 }
3012 }
3013}
3014
3015impl Default for VersionedFullOffset {
3016 fn default() -> Self {
3017 Self::Offset(Default::default())
3018 }
3019}
3020
3021impl<'a> sum_tree::Dimension<'a, FragmentSummary> for VersionedFullOffset {
3022 fn zero(_cx: &Option<clock::Global>) -> Self {
3023 Default::default()
3024 }
3025
3026 fn add_summary(&mut self, summary: &'a FragmentSummary, cx: &Option<clock::Global>) {
3027 if let Self::Offset(offset) = self {
3028 let version = cx.as_ref().unwrap();
3029 if version.observed_all(&summary.max_insertion_version) {
3030 *offset += summary.text.visible + summary.text.deleted;
3031 } else if version.observed_any(&summary.min_insertion_version) {
3032 *self = Self::Invalid;
3033 }
3034 }
3035 }
3036}
3037
3038impl sum_tree::SeekTarget<'_, FragmentSummary, Self> for VersionedFullOffset {
3039 fn cmp(&self, cursor_position: &Self, _: &Option<clock::Global>) -> cmp::Ordering {
3040 match (self, cursor_position) {
3041 (Self::Offset(a), Self::Offset(b)) => Ord::cmp(a, b),
3042 (Self::Offset(_), Self::Invalid) => cmp::Ordering::Less,
3043 (Self::Invalid, _) => unreachable!(),
3044 }
3045 }
3046}
3047
3048impl Operation {
3049 fn replica_id(&self) -> ReplicaId {
3050 operation_queue::Operation::lamport_timestamp(self).replica_id
3051 }
3052
3053 pub fn timestamp(&self) -> clock::Lamport {
3054 match self {
3055 Operation::Edit(edit) => edit.timestamp,
3056 Operation::Undo(undo) => undo.timestamp,
3057 }
3058 }
3059
3060 pub fn as_edit(&self) -> Option<&EditOperation> {
3061 match self {
3062 Operation::Edit(edit) => Some(edit),
3063 _ => None,
3064 }
3065 }
3066
3067 pub fn is_edit(&self) -> bool {
3068 matches!(self, Operation::Edit { .. })
3069 }
3070}
3071
3072impl operation_queue::Operation for Operation {
3073 fn lamport_timestamp(&self) -> clock::Lamport {
3074 match self {
3075 Operation::Edit(edit) => edit.timestamp,
3076 Operation::Undo(undo) => undo.timestamp,
3077 }
3078 }
3079}
3080
3081pub trait ToOffset {
3082 fn to_offset(&self, snapshot: &BufferSnapshot) -> usize;
3083 /// Turns this point into the next offset in the buffer that comes after this, respecting utf8 boundaries.
3084 fn to_next_offset(&self, snapshot: &BufferSnapshot) -> usize {
3085 snapshot
3086 .visible_text
3087 .ceil_char_boundary(self.to_offset(snapshot) + 1)
3088 }
3089 /// Turns this point into the previous offset in the buffer that comes before this, respecting utf8 boundaries.
3090 fn to_previous_offset(&self, snapshot: &BufferSnapshot) -> usize {
3091 snapshot
3092 .visible_text
3093 .floor_char_boundary(self.to_offset(snapshot).saturating_sub(1))
3094 }
3095}
3096
3097impl ToOffset for Point {
3098 fn to_offset(&self, snapshot: &BufferSnapshot) -> usize {
3099 snapshot.point_to_offset(*self)
3100 }
3101}
3102
3103impl ToOffset for usize {
3104 #[track_caller]
3105 fn to_offset(&self, snapshot: &BufferSnapshot) -> usize {
3106 assert!(
3107 *self <= snapshot.len(),
3108 "offset {} is out of range, snapshot length is {}",
3109 self,
3110 snapshot.len()
3111 );
3112 *self
3113 }
3114}
3115
3116impl ToOffset for Anchor {
3117 fn to_offset(&self, snapshot: &BufferSnapshot) -> usize {
3118 snapshot.summary_for_anchor(self)
3119 }
3120}
3121
3122impl<T: ToOffset> ToOffset for &T {
3123 fn to_offset(&self, content: &BufferSnapshot) -> usize {
3124 (*self).to_offset(content)
3125 }
3126}
3127
3128impl ToOffset for PointUtf16 {
3129 fn to_offset(&self, snapshot: &BufferSnapshot) -> usize {
3130 snapshot.point_utf16_to_offset(*self)
3131 }
3132}
3133
3134impl ToOffset for Unclipped<PointUtf16> {
3135 fn to_offset(&self, snapshot: &BufferSnapshot) -> usize {
3136 snapshot.unclipped_point_utf16_to_offset(*self)
3137 }
3138}
3139
3140pub trait ToPoint {
3141 fn to_point(&self, snapshot: &BufferSnapshot) -> Point;
3142}
3143
3144impl ToPoint for Anchor {
3145 fn to_point(&self, snapshot: &BufferSnapshot) -> Point {
3146 snapshot.summary_for_anchor(self)
3147 }
3148}
3149
3150impl ToPoint for usize {
3151 fn to_point(&self, snapshot: &BufferSnapshot) -> Point {
3152 snapshot.offset_to_point(*self)
3153 }
3154}
3155
3156impl ToPoint for Point {
3157 fn to_point(&self, _: &BufferSnapshot) -> Point {
3158 *self
3159 }
3160}
3161
3162impl ToPoint for Unclipped<PointUtf16> {
3163 fn to_point(&self, snapshot: &BufferSnapshot) -> Point {
3164 snapshot.unclipped_point_utf16_to_point(*self)
3165 }
3166}
3167
3168pub trait ToPointUtf16 {
3169 fn to_point_utf16(&self, snapshot: &BufferSnapshot) -> PointUtf16;
3170}
3171
3172impl ToPointUtf16 for Anchor {
3173 fn to_point_utf16(&self, snapshot: &BufferSnapshot) -> PointUtf16 {
3174 snapshot.summary_for_anchor(self)
3175 }
3176}
3177
3178impl ToPointUtf16 for usize {
3179 fn to_point_utf16(&self, snapshot: &BufferSnapshot) -> PointUtf16 {
3180 snapshot.offset_to_point_utf16(*self)
3181 }
3182}
3183
3184impl ToPointUtf16 for PointUtf16 {
3185 fn to_point_utf16(&self, _: &BufferSnapshot) -> PointUtf16 {
3186 *self
3187 }
3188}
3189
3190impl ToPointUtf16 for Point {
3191 fn to_point_utf16(&self, snapshot: &BufferSnapshot) -> PointUtf16 {
3192 snapshot.point_to_point_utf16(*self)
3193 }
3194}
3195
3196pub trait ToOffsetUtf16 {
3197 fn to_offset_utf16(&self, snapshot: &BufferSnapshot) -> OffsetUtf16;
3198}
3199
3200impl ToOffsetUtf16 for Anchor {
3201 fn to_offset_utf16(&self, snapshot: &BufferSnapshot) -> OffsetUtf16 {
3202 snapshot.summary_for_anchor(self)
3203 }
3204}
3205
3206impl ToOffsetUtf16 for usize {
3207 fn to_offset_utf16(&self, snapshot: &BufferSnapshot) -> OffsetUtf16 {
3208 snapshot.offset_to_offset_utf16(*self)
3209 }
3210}
3211
3212impl ToOffsetUtf16 for OffsetUtf16 {
3213 fn to_offset_utf16(&self, _snapshot: &BufferSnapshot) -> OffsetUtf16 {
3214 *self
3215 }
3216}
3217
3218pub trait FromAnchor {
3219 fn from_anchor(anchor: &Anchor, snapshot: &BufferSnapshot) -> Self;
3220}
3221
3222impl FromAnchor for Anchor {
3223 fn from_anchor(anchor: &Anchor, _snapshot: &BufferSnapshot) -> Self {
3224 *anchor
3225 }
3226}
3227
3228impl FromAnchor for Point {
3229 fn from_anchor(anchor: &Anchor, snapshot: &BufferSnapshot) -> Self {
3230 snapshot.summary_for_anchor(anchor)
3231 }
3232}
3233
3234impl FromAnchor for PointUtf16 {
3235 fn from_anchor(anchor: &Anchor, snapshot: &BufferSnapshot) -> Self {
3236 snapshot.summary_for_anchor(anchor)
3237 }
3238}
3239
3240impl FromAnchor for usize {
3241 fn from_anchor(anchor: &Anchor, snapshot: &BufferSnapshot) -> Self {
3242 snapshot.summary_for_anchor(anchor)
3243 }
3244}
3245
3246#[derive(Clone, Copy, Debug, PartialEq)]
3247pub enum LineEnding {
3248 Unix,
3249 Windows,
3250}
3251
3252impl Default for LineEnding {
3253 fn default() -> Self {
3254 #[cfg(unix)]
3255 return Self::Unix;
3256
3257 #[cfg(not(unix))]
3258 return Self::Windows;
3259 }
3260}
3261
3262impl LineEnding {
3263 pub fn as_str(&self) -> &'static str {
3264 match self {
3265 LineEnding::Unix => "\n",
3266 LineEnding::Windows => "\r\n",
3267 }
3268 }
3269
3270 pub fn label(&self) -> &'static str {
3271 match self {
3272 LineEnding::Unix => "LF",
3273 LineEnding::Windows => "CRLF",
3274 }
3275 }
3276
3277 pub fn detect(text: &str) -> Self {
3278 let mut max_ix = cmp::min(text.len(), 1000);
3279 while !text.is_char_boundary(max_ix) {
3280 max_ix -= 1;
3281 }
3282
3283 if let Some(ix) = text[..max_ix].find(['\n']) {
3284 if ix > 0 && text.as_bytes()[ix - 1] == b'\r' {
3285 Self::Windows
3286 } else {
3287 Self::Unix
3288 }
3289 } else {
3290 Self::default()
3291 }
3292 }
3293
3294 pub fn normalize(text: &mut String) {
3295 if let Cow::Owned(replaced) = LINE_SEPARATORS_REGEX.replace_all(text, "\n") {
3296 *text = replaced;
3297 }
3298 }
3299
3300 pub fn normalize_arc(text: Arc<str>) -> Arc<str> {
3301 if let Cow::Owned(replaced) = LINE_SEPARATORS_REGEX.replace_all(&text, "\n") {
3302 replaced.into()
3303 } else {
3304 text
3305 }
3306 }
3307
3308 pub fn normalize_cow(text: Cow<str>) -> Cow<str> {
3309 if let Cow::Owned(replaced) = LINE_SEPARATORS_REGEX.replace_all(&text, "\n") {
3310 replaced.into()
3311 } else {
3312 text
3313 }
3314 }
3315}
3316
3317#[cfg(debug_assertions)]
3318pub mod debug {
3319 use super::*;
3320 use parking_lot::Mutex;
3321 use std::any::TypeId;
3322 use std::hash::{Hash, Hasher};
3323
3324 static GLOBAL_DEBUG_RANGES: Mutex<Option<GlobalDebugRanges>> = Mutex::new(None);
3325
3326 pub struct GlobalDebugRanges {
3327 pub ranges: Vec<DebugRange>,
3328 key_to_occurrence_index: HashMap<Key, usize>,
3329 next_occurrence_index: usize,
3330 }
3331
3332 pub struct DebugRange {
3333 key: Key,
3334 pub ranges: Vec<Range<Anchor>>,
3335 pub value: Arc<str>,
3336 pub occurrence_index: usize,
3337 }
3338
3339 #[derive(Debug, Clone, PartialEq, Eq, Hash)]
3340 struct Key {
3341 type_id: TypeId,
3342 hash: u64,
3343 }
3344
3345 impl GlobalDebugRanges {
3346 pub fn with_locked<R>(f: impl FnOnce(&mut Self) -> R) -> R {
3347 let mut state = GLOBAL_DEBUG_RANGES.lock();
3348 if state.is_none() {
3349 *state = Some(GlobalDebugRanges {
3350 ranges: Vec::new(),
3351 key_to_occurrence_index: HashMap::default(),
3352 next_occurrence_index: 0,
3353 });
3354 }
3355 if let Some(global_debug_ranges) = state.as_mut() {
3356 f(global_debug_ranges)
3357 } else {
3358 unreachable!()
3359 }
3360 }
3361
3362 pub fn insert<K: Hash + 'static>(
3363 &mut self,
3364 key: &K,
3365 ranges: Vec<Range<Anchor>>,
3366 value: Arc<str>,
3367 ) {
3368 let occurrence_index = *self
3369 .key_to_occurrence_index
3370 .entry(Key::new(key))
3371 .or_insert_with(|| {
3372 let occurrence_index = self.next_occurrence_index;
3373 self.next_occurrence_index += 1;
3374 occurrence_index
3375 });
3376 let key = Key::new(key);
3377 let existing = self
3378 .ranges
3379 .iter()
3380 .enumerate()
3381 .rfind(|(_, existing)| existing.key == key);
3382 if let Some((existing_ix, _)) = existing {
3383 self.ranges.remove(existing_ix);
3384 }
3385 self.ranges.push(DebugRange {
3386 ranges,
3387 key,
3388 value,
3389 occurrence_index,
3390 });
3391 }
3392
3393 pub fn remove<K: Hash + 'static>(&mut self, key: &K) {
3394 self.remove_impl(&Key::new(key));
3395 }
3396
3397 fn remove_impl(&mut self, key: &Key) {
3398 let existing = self
3399 .ranges
3400 .iter()
3401 .enumerate()
3402 .rfind(|(_, existing)| &existing.key == key);
3403 if let Some((existing_ix, _)) = existing {
3404 self.ranges.remove(existing_ix);
3405 }
3406 }
3407
3408 pub fn remove_all_with_key_type<K: 'static>(&mut self) {
3409 self.ranges
3410 .retain(|item| item.key.type_id != TypeId::of::<K>());
3411 }
3412 }
3413
3414 impl Key {
3415 fn new<K: Hash + 'static>(key: &K) -> Self {
3416 let type_id = TypeId::of::<K>();
3417 let mut hasher = collections::FxHasher::default();
3418 key.hash(&mut hasher);
3419 Key {
3420 type_id,
3421 hash: hasher.finish(),
3422 }
3423 }
3424 }
3425
3426 pub trait ToDebugRanges {
3427 fn to_debug_ranges(&self, snapshot: &BufferSnapshot) -> Vec<Range<usize>>;
3428 }
3429
3430 impl<T: ToOffset> ToDebugRanges for T {
3431 fn to_debug_ranges(&self, snapshot: &BufferSnapshot) -> Vec<Range<usize>> {
3432 [self.to_offset(snapshot)].to_debug_ranges(snapshot)
3433 }
3434 }
3435
3436 impl<T: ToOffset + Clone> ToDebugRanges for Range<T> {
3437 fn to_debug_ranges(&self, snapshot: &BufferSnapshot) -> Vec<Range<usize>> {
3438 [self.clone()].to_debug_ranges(snapshot)
3439 }
3440 }
3441
3442 impl<T: ToOffset> ToDebugRanges for Vec<T> {
3443 fn to_debug_ranges(&self, snapshot: &BufferSnapshot) -> Vec<Range<usize>> {
3444 self.as_slice().to_debug_ranges(snapshot)
3445 }
3446 }
3447
3448 impl<T: ToOffset> ToDebugRanges for Vec<Range<T>> {
3449 fn to_debug_ranges(&self, snapshot: &BufferSnapshot) -> Vec<Range<usize>> {
3450 self.as_slice().to_debug_ranges(snapshot)
3451 }
3452 }
3453
3454 impl<T: ToOffset> ToDebugRanges for [T] {
3455 fn to_debug_ranges(&self, snapshot: &BufferSnapshot) -> Vec<Range<usize>> {
3456 self.iter()
3457 .map(|item| {
3458 let offset = item.to_offset(snapshot);
3459 offset..offset
3460 })
3461 .collect()
3462 }
3463 }
3464
3465 impl<T: ToOffset> ToDebugRanges for [Range<T>] {
3466 fn to_debug_ranges(&self, snapshot: &BufferSnapshot) -> Vec<Range<usize>> {
3467 self.iter()
3468 .map(|range| range.start.to_offset(snapshot)..range.end.to_offset(snapshot))
3469 .collect()
3470 }
3471 }
3472}