1mod anchor;
2pub mod locator;
3#[cfg(any(test, feature = "test-support"))]
4pub mod network;
5pub mod operation_queue;
6mod patch;
7mod selection;
8pub mod subscription;
9#[cfg(test)]
10mod tests;
11mod undo_map;
12
13pub use anchor::*;
14use anyhow::{Context as _, Result};
15use clock::Lamport;
16pub use clock::ReplicaId;
17use collections::{HashMap, HashSet};
18use locator::Locator;
19use operation_queue::OperationQueue;
20pub use patch::Patch;
21use postage::{oneshot, prelude::*};
22
23use regex::Regex;
24pub use rope::*;
25pub use selection::*;
26use smallvec::SmallVec;
27use std::{
28 borrow::Cow,
29 cmp::{self, Ordering, Reverse},
30 fmt::Display,
31 future::Future,
32 iter::Iterator,
33 num::NonZeroU64,
34 ops::{self, Deref, Range, Sub},
35 str,
36 sync::{Arc, LazyLock},
37 time::{Duration, Instant},
38};
39pub use subscription::*;
40pub use sum_tree::Bias;
41use sum_tree::{Dimensions, FilterCursor, SumTree, TreeMap, TreeSet};
42use undo_map::UndoMap;
43use util::debug_panic;
44
45#[cfg(any(test, feature = "test-support"))]
46use util::RandomCharIter;
47
48static LINE_SEPARATORS_REGEX: LazyLock<Regex> =
49 LazyLock::new(|| Regex::new(r"\r\n|\r").expect("Failed to create LINE_SEPARATORS_REGEX"));
50
51/// The maximum length of a single insertion operation.
52/// Fragments larger than this will be split into multiple smaller
53/// fragments. This allows us to use relative `u32` offsets instead of `usize`,
54/// reducing memory usage.
55const MAX_INSERTION_LEN: usize = if cfg!(test) { 16 } else { u32::MAX as usize };
56
57pub type TransactionId = clock::Lamport;
58
59pub struct Buffer {
60 snapshot: BufferSnapshot,
61 history: History,
62 deferred_ops: OperationQueue<Operation>,
63 deferred_replicas: HashSet<ReplicaId>,
64 pub lamport_clock: clock::Lamport,
65 subscriptions: Topic<usize>,
66 edit_id_resolvers: HashMap<clock::Lamport, Vec<oneshot::Sender<()>>>,
67 wait_for_version_txs: Vec<(clock::Global, oneshot::Sender<()>)>,
68}
69
70#[repr(transparent)]
71#[derive(Clone, Copy, Debug, Hash, PartialEq, PartialOrd, Ord, Eq)]
72pub struct BufferId(NonZeroU64);
73
74impl Display for BufferId {
75 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
76 write!(f, "{}", self.0)
77 }
78}
79
80impl From<NonZeroU64> for BufferId {
81 fn from(id: NonZeroU64) -> Self {
82 BufferId(id)
83 }
84}
85
86impl BufferId {
87 /// Returns Err if `id` is outside of BufferId domain.
88 pub fn new(id: u64) -> anyhow::Result<Self> {
89 let id = NonZeroU64::new(id).context("Buffer id cannot be 0.")?;
90 Ok(Self(id))
91 }
92
93 /// Increments this buffer id, returning the old value.
94 /// So that's a post-increment operator in disguise.
95 pub fn next(&mut self) -> Self {
96 let old = *self;
97 self.0 = self.0.saturating_add(1);
98 old
99 }
100
101 pub fn to_proto(self) -> u64 {
102 self.into()
103 }
104}
105
106impl From<BufferId> for u64 {
107 fn from(id: BufferId) -> Self {
108 id.0.get()
109 }
110}
111
112#[derive(Clone)]
113pub struct BufferSnapshot {
114 visible_text: Rope,
115 deleted_text: Rope,
116 fragments: SumTree<Fragment>,
117 insertions: SumTree<InsertionFragment>,
118 insertion_slices: TreeSet<InsertionSlice>,
119 undo_map: UndoMap,
120 pub version: clock::Global,
121 remote_id: BufferId,
122 replica_id: ReplicaId,
123 line_ending: LineEnding,
124}
125
126#[derive(Clone, Debug)]
127pub struct HistoryEntry {
128 transaction: Transaction,
129 first_edit_at: Instant,
130 last_edit_at: Instant,
131 suppress_grouping: bool,
132}
133
134#[derive(Clone, Debug)]
135pub struct Transaction {
136 pub id: TransactionId,
137 pub edit_ids: Vec<clock::Lamport>,
138 pub start: clock::Global,
139}
140
141impl Transaction {
142 pub fn merge_in(&mut self, other: Transaction) {
143 self.edit_ids.extend(other.edit_ids);
144 }
145}
146
147impl HistoryEntry {
148 pub fn transaction_id(&self) -> TransactionId {
149 self.transaction.id
150 }
151}
152
153struct History {
154 base_text: Rope,
155 operations: TreeMap<clock::Lamport, Operation>,
156 undo_stack: Vec<HistoryEntry>,
157 redo_stack: Vec<HistoryEntry>,
158 transaction_depth: usize,
159 group_interval: Duration,
160}
161
162#[derive(Clone, Debug, Eq, PartialEq)]
163struct InsertionSlice {
164 // Inline the lamports to allow the replica ids to share the same alignment
165 // saving 4 bytes space edit_id: clock::Lamport,
166 edit_id_value: clock::Seq,
167 edit_id_replica_id: ReplicaId,
168 // insertion_id: clock::Lamport,
169 insertion_id_value: clock::Seq,
170 insertion_id_replica_id: ReplicaId,
171 range: Range<u32>,
172}
173
174impl Ord for InsertionSlice {
175 fn cmp(&self, other: &Self) -> Ordering {
176 Lamport {
177 value: self.edit_id_value,
178 replica_id: self.edit_id_replica_id,
179 }
180 .cmp(&Lamport {
181 value: other.edit_id_value,
182 replica_id: other.edit_id_replica_id,
183 })
184 .then_with(|| {
185 Lamport {
186 value: self.insertion_id_value,
187 replica_id: self.insertion_id_replica_id,
188 }
189 .cmp(&Lamport {
190 value: other.insertion_id_value,
191 replica_id: other.insertion_id_replica_id,
192 })
193 })
194 .then_with(|| self.range.start.cmp(&other.range.start))
195 .then_with(|| self.range.end.cmp(&other.range.end))
196 }
197}
198
199impl PartialOrd for InsertionSlice {
200 fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
201 Some(self.cmp(other))
202 }
203}
204
205impl InsertionSlice {
206 fn from_fragment(edit_id: clock::Lamport, fragment: &Fragment) -> Self {
207 Self {
208 edit_id_value: edit_id.value,
209 edit_id_replica_id: edit_id.replica_id,
210 insertion_id_value: fragment.timestamp.value,
211 insertion_id_replica_id: fragment.timestamp.replica_id,
212 range: fragment.insertion_offset..fragment.insertion_offset + fragment.len,
213 }
214 }
215}
216
217impl History {
218 pub fn new(base_text: Rope) -> Self {
219 Self {
220 base_text,
221 operations: Default::default(),
222 undo_stack: Vec::new(),
223 redo_stack: Vec::new(),
224 transaction_depth: 0,
225 // Don't group transactions in tests unless we opt in, because it's a footgun.
226 #[cfg(any(test, feature = "test-support"))]
227 group_interval: Duration::ZERO,
228 #[cfg(not(any(test, feature = "test-support")))]
229 group_interval: Duration::from_millis(300),
230 }
231 }
232
233 fn push(&mut self, op: Operation) {
234 self.operations.insert(op.timestamp(), op);
235 }
236
237 fn start_transaction(
238 &mut self,
239 start: clock::Global,
240 now: Instant,
241 clock: &mut clock::Lamport,
242 ) -> Option<TransactionId> {
243 self.transaction_depth += 1;
244 if self.transaction_depth == 1 {
245 let id = clock.tick();
246 self.undo_stack.push(HistoryEntry {
247 transaction: Transaction {
248 id,
249 start,
250 edit_ids: Default::default(),
251 },
252 first_edit_at: now,
253 last_edit_at: now,
254 suppress_grouping: false,
255 });
256 Some(id)
257 } else {
258 None
259 }
260 }
261
262 fn end_transaction(&mut self, now: Instant) -> Option<&HistoryEntry> {
263 assert_ne!(self.transaction_depth, 0);
264 self.transaction_depth -= 1;
265 if self.transaction_depth == 0 {
266 if self
267 .undo_stack
268 .last()
269 .unwrap()
270 .transaction
271 .edit_ids
272 .is_empty()
273 {
274 self.undo_stack.pop();
275 None
276 } else {
277 self.redo_stack.clear();
278 let entry = self.undo_stack.last_mut().unwrap();
279 entry.last_edit_at = now;
280 Some(entry)
281 }
282 } else {
283 None
284 }
285 }
286
287 fn group(&mut self) -> Option<TransactionId> {
288 let mut count = 0;
289 let mut entries = self.undo_stack.iter();
290 if let Some(mut entry) = entries.next_back() {
291 while let Some(prev_entry) = entries.next_back() {
292 if !prev_entry.suppress_grouping
293 && entry.first_edit_at - prev_entry.last_edit_at < self.group_interval
294 {
295 entry = prev_entry;
296 count += 1;
297 } else {
298 break;
299 }
300 }
301 }
302 self.group_trailing(count)
303 }
304
305 fn group_until(&mut self, transaction_id: TransactionId) {
306 let mut count = 0;
307 for entry in self.undo_stack.iter().rev() {
308 if entry.transaction_id() == transaction_id {
309 self.group_trailing(count);
310 break;
311 } else if entry.suppress_grouping {
312 break;
313 } else {
314 count += 1;
315 }
316 }
317 }
318
319 fn group_trailing(&mut self, n: usize) -> Option<TransactionId> {
320 let new_len = self.undo_stack.len() - n;
321 let (entries_to_keep, entries_to_merge) = self.undo_stack.split_at_mut(new_len);
322 if let Some(last_entry) = entries_to_keep.last_mut() {
323 for entry in &*entries_to_merge {
324 for edit_id in &entry.transaction.edit_ids {
325 last_entry.transaction.edit_ids.push(*edit_id);
326 }
327 }
328
329 if let Some(entry) = entries_to_merge.last_mut() {
330 last_entry.last_edit_at = entry.last_edit_at;
331 }
332 }
333
334 self.undo_stack.truncate(new_len);
335 self.undo_stack.last().map(|e| e.transaction.id)
336 }
337
338 fn finalize_last_transaction(&mut self) -> Option<&Transaction> {
339 self.undo_stack.last_mut().map(|entry| {
340 entry.transaction.edit_ids.shrink_to_fit();
341 entry.suppress_grouping = true;
342 &entry.transaction
343 })
344 }
345
346 fn push_transaction(&mut self, transaction: Transaction, now: Instant) {
347 assert_eq!(self.transaction_depth, 0);
348 self.undo_stack.push(HistoryEntry {
349 transaction,
350 first_edit_at: now,
351 last_edit_at: now,
352 suppress_grouping: false,
353 });
354 }
355
356 /// Differs from `push_transaction` in that it does not clear the redo
357 /// stack. Intended to be used to create a parent transaction to merge
358 /// potential child transactions into.
359 ///
360 /// The caller is responsible for removing it from the undo history using
361 /// `forget_transaction` if no edits are merged into it. Otherwise, if edits
362 /// are merged into this transaction, the caller is responsible for ensuring
363 /// the redo stack is cleared. The easiest way to ensure the redo stack is
364 /// cleared is to create transactions with the usual `start_transaction` and
365 /// `end_transaction` methods and merging the resulting transactions into
366 /// the transaction created by this method
367 fn push_empty_transaction(
368 &mut self,
369 start: clock::Global,
370 now: Instant,
371 clock: &mut clock::Lamport,
372 ) -> TransactionId {
373 assert_eq!(self.transaction_depth, 0);
374 let id = clock.tick();
375 let transaction = Transaction {
376 id,
377 start,
378 edit_ids: Vec::new(),
379 };
380 self.undo_stack.push(HistoryEntry {
381 transaction,
382 first_edit_at: now,
383 last_edit_at: now,
384 suppress_grouping: false,
385 });
386 id
387 }
388
389 fn push_undo(&mut self, op_id: clock::Lamport) {
390 assert_ne!(self.transaction_depth, 0);
391 if let Some(Operation::Edit(_)) = self.operations.get(&op_id) {
392 let last_transaction = self.undo_stack.last_mut().unwrap();
393 last_transaction.transaction.edit_ids.push(op_id);
394 }
395 }
396
397 fn pop_undo(&mut self) -> Option<&HistoryEntry> {
398 assert_eq!(self.transaction_depth, 0);
399 if let Some(entry) = self.undo_stack.pop() {
400 self.redo_stack.push(entry);
401 self.redo_stack.last()
402 } else {
403 None
404 }
405 }
406
407 fn remove_from_undo(&mut self, transaction_id: TransactionId) -> Option<&HistoryEntry> {
408 assert_eq!(self.transaction_depth, 0);
409
410 let entry_ix = self
411 .undo_stack
412 .iter()
413 .rposition(|entry| entry.transaction.id == transaction_id)?;
414 let entry = self.undo_stack.remove(entry_ix);
415 self.redo_stack.push(entry);
416 self.redo_stack.last()
417 }
418
419 fn remove_from_undo_until(&mut self, transaction_id: TransactionId) -> &[HistoryEntry] {
420 assert_eq!(self.transaction_depth, 0);
421
422 let redo_stack_start_len = self.redo_stack.len();
423 if let Some(entry_ix) = self
424 .undo_stack
425 .iter()
426 .rposition(|entry| entry.transaction.id == transaction_id)
427 {
428 self.redo_stack
429 .extend(self.undo_stack.drain(entry_ix..).rev());
430 }
431 &self.redo_stack[redo_stack_start_len..]
432 }
433
434 fn forget(&mut self, transaction_id: TransactionId) -> Option<Transaction> {
435 assert_eq!(self.transaction_depth, 0);
436 if let Some(entry_ix) = self
437 .undo_stack
438 .iter()
439 .rposition(|entry| entry.transaction.id == transaction_id)
440 {
441 Some(self.undo_stack.remove(entry_ix).transaction)
442 } else if let Some(entry_ix) = self
443 .redo_stack
444 .iter()
445 .rposition(|entry| entry.transaction.id == transaction_id)
446 {
447 Some(self.redo_stack.remove(entry_ix).transaction)
448 } else {
449 None
450 }
451 }
452
453 fn transaction(&self, transaction_id: TransactionId) -> Option<&Transaction> {
454 let entry = self
455 .undo_stack
456 .iter()
457 .rfind(|entry| entry.transaction.id == transaction_id)
458 .or_else(|| {
459 self.redo_stack
460 .iter()
461 .rfind(|entry| entry.transaction.id == transaction_id)
462 })?;
463 Some(&entry.transaction)
464 }
465
466 fn transaction_mut(&mut self, transaction_id: TransactionId) -> Option<&mut Transaction> {
467 let entry = self
468 .undo_stack
469 .iter_mut()
470 .rfind(|entry| entry.transaction.id == transaction_id)
471 .or_else(|| {
472 self.redo_stack
473 .iter_mut()
474 .rfind(|entry| entry.transaction.id == transaction_id)
475 })?;
476 Some(&mut entry.transaction)
477 }
478
479 fn merge_transactions(&mut self, transaction: TransactionId, destination: TransactionId) {
480 if let Some(transaction) = self.forget(transaction)
481 && let Some(destination) = self.transaction_mut(destination)
482 {
483 destination.edit_ids.extend(transaction.edit_ids);
484 }
485 }
486
487 fn pop_redo(&mut self) -> Option<&HistoryEntry> {
488 assert_eq!(self.transaction_depth, 0);
489 if let Some(entry) = self.redo_stack.pop() {
490 self.undo_stack.push(entry);
491 self.undo_stack.last()
492 } else {
493 None
494 }
495 }
496
497 fn remove_from_redo(&mut self, transaction_id: TransactionId) -> &[HistoryEntry] {
498 assert_eq!(self.transaction_depth, 0);
499
500 let undo_stack_start_len = self.undo_stack.len();
501 if let Some(entry_ix) = self
502 .redo_stack
503 .iter()
504 .rposition(|entry| entry.transaction.id == transaction_id)
505 {
506 self.undo_stack
507 .extend(self.redo_stack.drain(entry_ix..).rev());
508 }
509 &self.undo_stack[undo_stack_start_len..]
510 }
511}
512
513struct Edits<'a, D: TextDimension, F: FnMut(&FragmentSummary) -> bool> {
514 visible_cursor: rope::Cursor<'a>,
515 deleted_cursor: rope::Cursor<'a>,
516 fragments_cursor: Option<FilterCursor<'a, 'static, F, Fragment, FragmentTextSummary>>,
517 undos: &'a UndoMap,
518 since: &'a clock::Global,
519 old_end: D,
520 new_end: D,
521 range: Range<(&'a Locator, u32)>,
522 buffer_id: BufferId,
523}
524
525#[derive(Clone, Debug, Default, Eq, PartialEq)]
526pub struct Edit<D> {
527 pub old: Range<D>,
528 pub new: Range<D>,
529}
530impl<D> Edit<D>
531where
532 D: PartialEq,
533{
534 pub fn is_empty(&self) -> bool {
535 self.old.start == self.old.end && self.new.start == self.new.end
536 }
537}
538
539impl<D, DDelta> Edit<D>
540where
541 D: Sub<D, Output = DDelta> + Copy,
542{
543 pub fn old_len(&self) -> DDelta {
544 self.old.end - self.old.start
545 }
546
547 pub fn new_len(&self) -> DDelta {
548 self.new.end - self.new.start
549 }
550}
551
552impl<D1, D2> Edit<(D1, D2)> {
553 pub fn flatten(self) -> (Edit<D1>, Edit<D2>) {
554 (
555 Edit {
556 old: self.old.start.0..self.old.end.0,
557 new: self.new.start.0..self.new.end.0,
558 },
559 Edit {
560 old: self.old.start.1..self.old.end.1,
561 new: self.new.start.1..self.new.end.1,
562 },
563 )
564 }
565}
566
567#[derive(Eq, PartialEq, Clone, Debug)]
568struct Fragment {
569 id: Locator,
570 timestamp: clock::Lamport,
571 insertion_offset: u32,
572 len: u32,
573 visible: bool,
574 deletions: SmallVec<[clock::Lamport; 2]>,
575 max_undos: clock::Global,
576}
577
578#[derive(Eq, PartialEq, Clone, Debug)]
579struct FragmentSummary {
580 text: FragmentTextSummary,
581 max_id: Locator,
582 max_version: clock::Global,
583 min_insertion_version: clock::Global,
584 max_insertion_version: clock::Global,
585}
586
587#[derive(Copy, Default, Clone, Debug, PartialEq, Eq)]
588struct FragmentTextSummary {
589 visible: usize,
590 deleted: usize,
591}
592
593impl<'a> sum_tree::Dimension<'a, FragmentSummary> for FragmentTextSummary {
594 fn zero(_: &Option<clock::Global>) -> Self {
595 Default::default()
596 }
597
598 fn add_summary(&mut self, summary: &'a FragmentSummary, _: &Option<clock::Global>) {
599 self.visible += summary.text.visible;
600 self.deleted += summary.text.deleted;
601 }
602}
603
604#[derive(Eq, PartialEq, Clone, Debug)]
605struct InsertionFragment {
606 timestamp: clock::Lamport,
607 split_offset: u32,
608 fragment_id: Locator,
609}
610
611#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord)]
612struct InsertionFragmentKey {
613 timestamp: clock::Lamport,
614 split_offset: u32,
615}
616
617#[derive(Clone, Debug, Eq, PartialEq)]
618pub enum Operation {
619 Edit(EditOperation),
620 Undo(UndoOperation),
621}
622
623#[derive(Clone, Debug, Eq, PartialEq)]
624pub struct EditOperation {
625 pub timestamp: clock::Lamport,
626 pub version: clock::Global,
627 pub ranges: Vec<Range<FullOffset>>,
628 pub new_text: Vec<Arc<str>>,
629}
630
631#[derive(Clone, Debug, Eq, PartialEq)]
632pub struct UndoOperation {
633 pub timestamp: clock::Lamport,
634 pub version: clock::Global,
635 pub counts: HashMap<clock::Lamport, u32>,
636}
637
638/// Stores information about the indentation of a line (tabs and spaces).
639#[derive(Clone, Copy, Debug, Eq, PartialEq)]
640pub struct LineIndent {
641 pub tabs: u32,
642 pub spaces: u32,
643 pub line_blank: bool,
644}
645
646impl LineIndent {
647 pub fn from_chunks(chunks: &mut Chunks) -> Self {
648 let mut tabs = 0;
649 let mut spaces = 0;
650 let mut line_blank = true;
651
652 'outer: while let Some(chunk) = chunks.peek() {
653 for ch in chunk.chars() {
654 if ch == '\t' {
655 tabs += 1;
656 } else if ch == ' ' {
657 spaces += 1;
658 } else {
659 if ch != '\n' {
660 line_blank = false;
661 }
662 break 'outer;
663 }
664 }
665
666 chunks.next();
667 }
668
669 Self {
670 tabs,
671 spaces,
672 line_blank,
673 }
674 }
675
676 /// Constructs a new `LineIndent` which only contains spaces.
677 pub fn spaces(spaces: u32) -> Self {
678 Self {
679 tabs: 0,
680 spaces,
681 line_blank: true,
682 }
683 }
684
685 /// Constructs a new `LineIndent` which only contains tabs.
686 pub fn tabs(tabs: u32) -> Self {
687 Self {
688 tabs,
689 spaces: 0,
690 line_blank: true,
691 }
692 }
693
694 /// Indicates whether the line is empty.
695 pub fn is_line_empty(&self) -> bool {
696 self.tabs == 0 && self.spaces == 0 && self.line_blank
697 }
698
699 /// Indicates whether the line is blank (contains only whitespace).
700 pub fn is_line_blank(&self) -> bool {
701 self.line_blank
702 }
703
704 /// Returns the number of indentation characters (tabs or spaces).
705 pub fn raw_len(&self) -> u32 {
706 self.tabs + self.spaces
707 }
708
709 /// Returns the number of indentation characters (tabs or spaces), taking tab size into account.
710 pub fn len(&self, tab_size: u32) -> u32 {
711 self.tabs * tab_size + self.spaces
712 }
713}
714
715impl From<&str> for LineIndent {
716 fn from(value: &str) -> Self {
717 Self::from_iter(value.chars())
718 }
719}
720
721impl FromIterator<char> for LineIndent {
722 fn from_iter<T: IntoIterator<Item = char>>(chars: T) -> Self {
723 let mut tabs = 0;
724 let mut spaces = 0;
725 let mut line_blank = true;
726 for c in chars {
727 if c == '\t' {
728 tabs += 1;
729 } else if c == ' ' {
730 spaces += 1;
731 } else {
732 if c != '\n' {
733 line_blank = false;
734 }
735 break;
736 }
737 }
738 Self {
739 tabs,
740 spaces,
741 line_blank,
742 }
743 }
744}
745
746impl Buffer {
747 pub fn new(replica_id: ReplicaId, remote_id: BufferId, base_text: impl Into<String>) -> Buffer {
748 let mut base_text = base_text.into();
749 let line_ending = LineEnding::detect(&base_text);
750 LineEnding::normalize(&mut base_text);
751 Self::new_normalized(replica_id, remote_id, line_ending, Rope::from(&*base_text))
752 }
753
754 pub fn new_normalized(
755 replica_id: ReplicaId,
756 remote_id: BufferId,
757 line_ending: LineEnding,
758 normalized: Rope,
759 ) -> Buffer {
760 let history = History::new(normalized);
761 let mut fragments = SumTree::new(&None);
762 let mut insertions = SumTree::default();
763
764 let mut lamport_clock = clock::Lamport::new(replica_id);
765 let mut version = clock::Global::new();
766
767 let visible_text = history.base_text.clone();
768 if !visible_text.is_empty() {
769 let insertion_timestamp = clock::Lamport::new(ReplicaId::LOCAL);
770 lamport_clock.observe(insertion_timestamp);
771 version.observe(insertion_timestamp);
772
773 let mut insertion_offset: u32 = 0;
774 let mut text_offset: usize = 0;
775 let mut prev_locator = Locator::min();
776
777 while text_offset < visible_text.len() {
778 let target_end = visible_text.len().min(text_offset + MAX_INSERTION_LEN);
779 let chunk_end = if target_end == visible_text.len() {
780 target_end
781 } else {
782 visible_text.floor_char_boundary(target_end)
783 };
784 let chunk_len = chunk_end - text_offset;
785
786 let fragment_id = Locator::between(&prev_locator, &Locator::max());
787 let fragment = Fragment {
788 id: fragment_id.clone(),
789 timestamp: insertion_timestamp,
790 insertion_offset,
791 len: chunk_len as u32,
792 visible: true,
793 deletions: Default::default(),
794 max_undos: Default::default(),
795 };
796 insertions.push(InsertionFragment::new(&fragment), ());
797 fragments.push(fragment, &None);
798
799 prev_locator = fragment_id;
800 insertion_offset += chunk_len as u32;
801 text_offset = chunk_end;
802 }
803 }
804
805 Buffer {
806 snapshot: BufferSnapshot {
807 replica_id,
808 remote_id,
809 visible_text,
810 deleted_text: Rope::new(),
811 line_ending,
812 fragments,
813 insertions,
814 version,
815 undo_map: Default::default(),
816 insertion_slices: Default::default(),
817 },
818 history,
819 deferred_ops: OperationQueue::new(),
820 deferred_replicas: HashSet::default(),
821 lamport_clock,
822 subscriptions: Default::default(),
823 edit_id_resolvers: Default::default(),
824 wait_for_version_txs: Default::default(),
825 }
826 }
827
828 pub fn version(&self) -> clock::Global {
829 self.version.clone()
830 }
831
832 pub fn snapshot(&self) -> &BufferSnapshot {
833 &self.snapshot
834 }
835
836 pub fn into_snapshot(self) -> BufferSnapshot {
837 self.snapshot
838 }
839
840 pub fn branch(&self) -> Self {
841 Self {
842 snapshot: self.snapshot.clone(),
843 history: History::new(self.base_text().clone()),
844 deferred_ops: OperationQueue::new(),
845 deferred_replicas: HashSet::default(),
846 lamport_clock: clock::Lamport::new(ReplicaId::LOCAL_BRANCH),
847 subscriptions: Default::default(),
848 edit_id_resolvers: Default::default(),
849 wait_for_version_txs: Default::default(),
850 }
851 }
852
853 pub fn replica_id(&self) -> ReplicaId {
854 self.lamport_clock.replica_id
855 }
856
857 pub fn remote_id(&self) -> BufferId {
858 self.remote_id
859 }
860
861 pub fn deferred_ops_len(&self) -> usize {
862 self.deferred_ops.len()
863 }
864
865 pub fn transaction_group_interval(&self) -> Duration {
866 self.history.group_interval
867 }
868
869 pub fn edit<R, I, S, T>(&mut self, edits: R) -> Operation
870 where
871 R: IntoIterator<IntoIter = I>,
872 I: ExactSizeIterator<Item = (Range<S>, T)>,
873 S: ToOffset,
874 T: Into<Arc<str>>,
875 {
876 let edits = edits
877 .into_iter()
878 .map(|(range, new_text)| (range, new_text.into()));
879
880 self.start_transaction();
881 let timestamp = self.lamport_clock.tick();
882 let operation = Operation::Edit(self.apply_local_edit(edits, timestamp));
883
884 self.history.push(operation.clone());
885 self.history.push_undo(operation.timestamp());
886 self.snapshot.version.observe(operation.timestamp());
887 self.end_transaction();
888 operation
889 }
890
891 fn apply_local_edit<S: ToOffset, T: Into<Arc<str>>>(
892 &mut self,
893 edits: impl ExactSizeIterator<Item = (Range<S>, T)>,
894 timestamp: clock::Lamport,
895 ) -> EditOperation {
896 let mut edits_patch = Patch::default();
897 let mut edit_op = EditOperation {
898 timestamp,
899 version: self.version(),
900 ranges: Vec::with_capacity(edits.len()),
901 new_text: Vec::with_capacity(edits.len()),
902 };
903 let mut new_insertions = Vec::new();
904 let mut insertion_offset: u32 = 0;
905 let mut insertion_slices = Vec::new();
906
907 let mut edits = edits
908 .map(|(range, new_text)| (range.to_offset(&*self), new_text))
909 .peekable();
910
911 let mut new_ropes =
912 RopeBuilder::new(self.visible_text.cursor(0), self.deleted_text.cursor(0));
913 let mut old_fragments = self.fragments.cursor::<FragmentTextSummary>(&None);
914 let mut new_fragments = old_fragments.slice(&edits.peek().unwrap().0.start, Bias::Right);
915 new_ropes.append(new_fragments.summary().text);
916
917 let mut fragment_start = old_fragments.start().visible;
918 for (range, new_text) in edits {
919 let new_text = LineEnding::normalize_arc(new_text.into());
920 let fragment_end = old_fragments.end().visible;
921
922 // If the current fragment ends before this range, then jump ahead to the first fragment
923 // that extends past the start of this range, reusing any intervening fragments.
924 if fragment_end < range.start {
925 // If the current fragment has been partially consumed, then consume the rest of it
926 // and advance to the next fragment before slicing.
927 if fragment_start > old_fragments.start().visible {
928 if fragment_end > fragment_start {
929 let mut suffix = old_fragments.item().unwrap().clone();
930 suffix.len = (fragment_end - fragment_start) as u32;
931 suffix.insertion_offset +=
932 (fragment_start - old_fragments.start().visible) as u32;
933 new_insertions.push(InsertionFragment::insert_new(&suffix));
934 new_ropes.push_fragment(&suffix, suffix.visible);
935 new_fragments.push(suffix, &None);
936 }
937 old_fragments.next();
938 }
939
940 let slice = old_fragments.slice(&range.start, Bias::Right);
941 new_ropes.append(slice.summary().text);
942 new_fragments.append(slice, &None);
943 fragment_start = old_fragments.start().visible;
944 }
945
946 let full_range_start = FullOffset(range.start + old_fragments.start().deleted);
947
948 // Preserve any portion of the current fragment that precedes this range.
949 if fragment_start < range.start {
950 let mut prefix = old_fragments.item().unwrap().clone();
951 prefix.len = (range.start - fragment_start) as u32;
952 prefix.insertion_offset += (fragment_start - old_fragments.start().visible) as u32;
953 prefix.id = Locator::between(&new_fragments.summary().max_id, &prefix.id);
954 new_insertions.push(InsertionFragment::insert_new(&prefix));
955 new_ropes.push_fragment(&prefix, prefix.visible);
956 new_fragments.push(prefix, &None);
957 fragment_start = range.start;
958 }
959
960 // Insert the new text before any existing fragments within the range.
961 if !new_text.is_empty() {
962 let new_start = new_fragments.summary().text.visible;
963
964 let next_fragment_id = old_fragments
965 .item()
966 .map_or(Locator::max_ref(), |old_fragment| &old_fragment.id);
967 Self::push_fragments_for_insertion(
968 new_text.as_ref(),
969 timestamp,
970 &mut insertion_offset,
971 &mut new_fragments,
972 &mut new_insertions,
973 &mut insertion_slices,
974 &mut new_ropes,
975 next_fragment_id,
976 timestamp,
977 );
978 edits_patch.push(Edit {
979 old: fragment_start..fragment_start,
980 new: new_start..new_start + new_text.len(),
981 });
982 }
983
984 // Advance through every fragment that intersects this range, marking the intersecting
985 // portions as deleted.
986 while fragment_start < range.end {
987 let fragment = old_fragments.item().unwrap();
988 let fragment_end = old_fragments.end().visible;
989 let mut intersection = fragment.clone();
990 let intersection_end = cmp::min(range.end, fragment_end);
991 if fragment.visible {
992 intersection.len = (intersection_end - fragment_start) as u32;
993 intersection.insertion_offset +=
994 (fragment_start - old_fragments.start().visible) as u32;
995 intersection.id =
996 Locator::between(&new_fragments.summary().max_id, &intersection.id);
997 intersection.deletions.push(timestamp);
998 intersection.visible = false;
999 }
1000 if intersection.len > 0 {
1001 if fragment.visible && !intersection.visible {
1002 let new_start = new_fragments.summary().text.visible;
1003 edits_patch.push(Edit {
1004 old: fragment_start..intersection_end,
1005 new: new_start..new_start,
1006 });
1007 insertion_slices
1008 .push(InsertionSlice::from_fragment(timestamp, &intersection));
1009 }
1010 new_insertions.push(InsertionFragment::insert_new(&intersection));
1011 new_ropes.push_fragment(&intersection, fragment.visible);
1012 new_fragments.push(intersection, &None);
1013 fragment_start = intersection_end;
1014 }
1015 if fragment_end <= range.end {
1016 old_fragments.next();
1017 }
1018 }
1019
1020 let full_range_end = FullOffset(range.end + old_fragments.start().deleted);
1021 edit_op.ranges.push(full_range_start..full_range_end);
1022 edit_op.new_text.push(new_text);
1023 }
1024
1025 // If the current fragment has been partially consumed, then consume the rest of it
1026 // and advance to the next fragment before slicing.
1027 if fragment_start > old_fragments.start().visible {
1028 let fragment_end = old_fragments.end().visible;
1029 if fragment_end > fragment_start {
1030 let mut suffix = old_fragments.item().unwrap().clone();
1031 suffix.len = (fragment_end - fragment_start) as u32;
1032 suffix.insertion_offset += (fragment_start - old_fragments.start().visible) as u32;
1033 new_insertions.push(InsertionFragment::insert_new(&suffix));
1034 new_ropes.push_fragment(&suffix, suffix.visible);
1035 new_fragments.push(suffix, &None);
1036 }
1037 old_fragments.next();
1038 }
1039
1040 let suffix = old_fragments.suffix();
1041 new_ropes.append(suffix.summary().text);
1042 new_fragments.append(suffix, &None);
1043 let (visible_text, deleted_text) = new_ropes.finish();
1044 drop(old_fragments);
1045
1046 self.snapshot.fragments = new_fragments;
1047 self.snapshot.insertions.edit(new_insertions, ());
1048 self.snapshot.visible_text = visible_text;
1049 self.snapshot.deleted_text = deleted_text;
1050 self.subscriptions.publish_mut(&edits_patch);
1051 self.snapshot.insertion_slices.extend(insertion_slices);
1052 edit_op
1053 }
1054
1055 pub fn set_line_ending(&mut self, line_ending: LineEnding) {
1056 self.snapshot.line_ending = line_ending;
1057 }
1058
1059 pub fn apply_ops<I: IntoIterator<Item = Operation>>(&mut self, ops: I) {
1060 let mut deferred_ops = Vec::new();
1061 for op in ops {
1062 self.history.push(op.clone());
1063 if self.can_apply_op(&op) {
1064 self.apply_op(op);
1065 } else {
1066 self.deferred_replicas.insert(op.replica_id());
1067 deferred_ops.push(op);
1068 }
1069 }
1070 self.deferred_ops.insert(deferred_ops);
1071 self.flush_deferred_ops();
1072 }
1073
1074 fn apply_op(&mut self, op: Operation) {
1075 match op {
1076 Operation::Edit(edit) => {
1077 if !self.version.observed(edit.timestamp) {
1078 self.apply_remote_edit(
1079 &edit.version,
1080 &edit.ranges,
1081 &edit.new_text,
1082 edit.timestamp,
1083 );
1084 self.snapshot.version.observe(edit.timestamp);
1085 self.lamport_clock.observe(edit.timestamp);
1086 self.resolve_edit(edit.timestamp);
1087 }
1088 }
1089 Operation::Undo(undo) => {
1090 if !self.version.observed(undo.timestamp) {
1091 self.apply_undo(&undo);
1092 self.snapshot.version.observe(undo.timestamp);
1093 self.lamport_clock.observe(undo.timestamp);
1094 }
1095 }
1096 }
1097 self.wait_for_version_txs.retain_mut(|(version, tx)| {
1098 if self.snapshot.version().observed_all(version) {
1099 tx.try_send(()).ok();
1100 false
1101 } else {
1102 true
1103 }
1104 });
1105 }
1106
1107 fn apply_remote_edit(
1108 &mut self,
1109 version: &clock::Global,
1110 ranges: &[Range<FullOffset>],
1111 new_text: &[Arc<str>],
1112 timestamp: clock::Lamport,
1113 ) {
1114 if ranges.is_empty() {
1115 return;
1116 }
1117
1118 let edits = ranges.iter().zip(new_text.iter());
1119 let mut edits_patch = Patch::default();
1120 let mut insertion_slices = Vec::new();
1121 let cx = Some(version.clone());
1122 let mut new_insertions = Vec::new();
1123 let mut insertion_offset: u32 = 0;
1124 let mut new_ropes =
1125 RopeBuilder::new(self.visible_text.cursor(0), self.deleted_text.cursor(0));
1126 let mut old_fragments = self
1127 .fragments
1128 .cursor::<Dimensions<VersionedFullOffset, usize>>(&cx);
1129 let mut new_fragments =
1130 old_fragments.slice(&VersionedFullOffset::Offset(ranges[0].start), Bias::Left);
1131 new_ropes.append(new_fragments.summary().text);
1132
1133 let mut fragment_start = old_fragments.start().0.full_offset();
1134 for (range, new_text) in edits {
1135 let fragment_end = old_fragments.end().0.full_offset();
1136
1137 // If the current fragment ends before this range, then jump ahead to the first fragment
1138 // that extends past the start of this range, reusing any intervening fragments.
1139 if fragment_end < range.start {
1140 // If the current fragment has been partially consumed, then consume the rest of it
1141 // and advance to the next fragment before slicing.
1142 if fragment_start > old_fragments.start().0.full_offset() {
1143 if fragment_end > fragment_start {
1144 let mut suffix = old_fragments.item().unwrap().clone();
1145 suffix.len = (fragment_end.0 - fragment_start.0) as u32;
1146 suffix.insertion_offset +=
1147 (fragment_start - old_fragments.start().0.full_offset()) as u32;
1148 new_insertions.push(InsertionFragment::insert_new(&suffix));
1149 new_ropes.push_fragment(&suffix, suffix.visible);
1150 new_fragments.push(suffix, &None);
1151 }
1152 old_fragments.next();
1153 }
1154
1155 let slice =
1156 old_fragments.slice(&VersionedFullOffset::Offset(range.start), Bias::Left);
1157 new_ropes.append(slice.summary().text);
1158 new_fragments.append(slice, &None);
1159 fragment_start = old_fragments.start().0.full_offset();
1160 }
1161
1162 // If we are at the end of a non-concurrent fragment, advance to the next one.
1163 let fragment_end = old_fragments.end().0.full_offset();
1164 if fragment_end == range.start && fragment_end > fragment_start {
1165 let mut fragment = old_fragments.item().unwrap().clone();
1166 fragment.len = (fragment_end.0 - fragment_start.0) as u32;
1167 fragment.insertion_offset +=
1168 (fragment_start - old_fragments.start().0.full_offset()) as u32;
1169 new_insertions.push(InsertionFragment::insert_new(&fragment));
1170 new_ropes.push_fragment(&fragment, fragment.visible);
1171 new_fragments.push(fragment, &None);
1172 old_fragments.next();
1173 fragment_start = old_fragments.start().0.full_offset();
1174 }
1175
1176 // Skip over insertions that are concurrent to this edit, but have a lower lamport
1177 // timestamp.
1178 while let Some(fragment) = old_fragments.item() {
1179 if fragment_start == range.start && fragment.timestamp > timestamp {
1180 new_ropes.push_fragment(fragment, fragment.visible);
1181 new_fragments.push(fragment.clone(), &None);
1182 old_fragments.next();
1183 debug_assert_eq!(fragment_start, range.start);
1184 } else {
1185 break;
1186 }
1187 }
1188 debug_assert!(fragment_start <= range.start);
1189
1190 // Preserve any portion of the current fragment that precedes this range.
1191 if fragment_start < range.start {
1192 let mut prefix = old_fragments.item().unwrap().clone();
1193 prefix.len = (range.start.0 - fragment_start.0) as u32;
1194 prefix.insertion_offset +=
1195 (fragment_start - old_fragments.start().0.full_offset()) as u32;
1196 prefix.id = Locator::between(&new_fragments.summary().max_id, &prefix.id);
1197 new_insertions.push(InsertionFragment::insert_new(&prefix));
1198 fragment_start = range.start;
1199 new_ropes.push_fragment(&prefix, prefix.visible);
1200 new_fragments.push(prefix, &None);
1201 }
1202
1203 // Insert the new text before any existing fragments within the range.
1204 if !new_text.is_empty() {
1205 let mut old_start = old_fragments.start().1;
1206 if old_fragments.item().is_some_and(|f| f.visible) {
1207 old_start += fragment_start.0 - old_fragments.start().0.full_offset().0;
1208 }
1209 let new_start = new_fragments.summary().text.visible;
1210 let next_fragment_id = old_fragments
1211 .item()
1212 .map_or(Locator::max_ref(), |old_fragment| &old_fragment.id);
1213 Self::push_fragments_for_insertion(
1214 new_text,
1215 timestamp,
1216 &mut insertion_offset,
1217 &mut new_fragments,
1218 &mut new_insertions,
1219 &mut insertion_slices,
1220 &mut new_ropes,
1221 next_fragment_id,
1222 timestamp,
1223 );
1224 edits_patch.push(Edit {
1225 old: old_start..old_start,
1226 new: new_start..new_start + new_text.len(),
1227 });
1228 }
1229
1230 // Advance through every fragment that intersects this range, marking the intersecting
1231 // portions as deleted.
1232 while fragment_start < range.end {
1233 let fragment = old_fragments.item().unwrap();
1234 let fragment_end = old_fragments.end().0.full_offset();
1235 let mut intersection = fragment.clone();
1236 let intersection_end = cmp::min(range.end, fragment_end);
1237 if version.observed(fragment.timestamp) {
1238 intersection.len = (intersection_end.0 - fragment_start.0) as u32;
1239 intersection.insertion_offset +=
1240 (fragment_start - old_fragments.start().0.full_offset()) as u32;
1241 intersection.id =
1242 Locator::between(&new_fragments.summary().max_id, &intersection.id);
1243 if fragment.was_visible(version, &self.undo_map) {
1244 intersection.deletions.push(timestamp);
1245 intersection.visible = false;
1246 insertion_slices
1247 .push(InsertionSlice::from_fragment(timestamp, &intersection));
1248 }
1249 }
1250 if intersection.len > 0 {
1251 if fragment.visible && !intersection.visible {
1252 let old_start = old_fragments.start().1
1253 + (fragment_start.0 - old_fragments.start().0.full_offset().0);
1254 let new_start = new_fragments.summary().text.visible;
1255 edits_patch.push(Edit {
1256 old: old_start..old_start + intersection.len as usize,
1257 new: new_start..new_start,
1258 });
1259 }
1260 new_insertions.push(InsertionFragment::insert_new(&intersection));
1261 new_ropes.push_fragment(&intersection, fragment.visible);
1262 new_fragments.push(intersection, &None);
1263 fragment_start = intersection_end;
1264 }
1265 if fragment_end <= range.end {
1266 old_fragments.next();
1267 }
1268 }
1269 }
1270
1271 // If the current fragment has been partially consumed, then consume the rest of it
1272 // and advance to the next fragment before slicing.
1273 if fragment_start > old_fragments.start().0.full_offset() {
1274 let fragment_end = old_fragments.end().0.full_offset();
1275 if fragment_end > fragment_start {
1276 let mut suffix = old_fragments.item().unwrap().clone();
1277 suffix.len = (fragment_end.0 - fragment_start.0) as u32;
1278 suffix.insertion_offset +=
1279 (fragment_start - old_fragments.start().0.full_offset()) as u32;
1280 new_insertions.push(InsertionFragment::insert_new(&suffix));
1281 new_ropes.push_fragment(&suffix, suffix.visible);
1282 new_fragments.push(suffix, &None);
1283 }
1284 old_fragments.next();
1285 }
1286
1287 let suffix = old_fragments.suffix();
1288 new_ropes.append(suffix.summary().text);
1289 new_fragments.append(suffix, &None);
1290 let (visible_text, deleted_text) = new_ropes.finish();
1291 drop(old_fragments);
1292
1293 self.snapshot.fragments = new_fragments;
1294 self.snapshot.visible_text = visible_text;
1295 self.snapshot.deleted_text = deleted_text;
1296 self.snapshot.insertions.edit(new_insertions, ());
1297 self.snapshot.insertion_slices.extend(insertion_slices);
1298 self.subscriptions.publish_mut(&edits_patch)
1299 }
1300
1301 fn push_fragments_for_insertion(
1302 new_text: &str,
1303 timestamp: clock::Lamport,
1304 insertion_offset: &mut u32,
1305 new_fragments: &mut SumTree<Fragment>,
1306 new_insertions: &mut Vec<sum_tree::Edit<InsertionFragment>>,
1307 insertion_slices: &mut Vec<InsertionSlice>,
1308 new_ropes: &mut RopeBuilder,
1309 next_fragment_id: &Locator,
1310 edit_timestamp: clock::Lamport,
1311 ) {
1312 let mut text_offset = 0;
1313 while text_offset < new_text.len() {
1314 let target_end = new_text.len().min(text_offset + MAX_INSERTION_LEN);
1315 let chunk_end = if target_end == new_text.len() {
1316 target_end
1317 } else {
1318 new_text.floor_char_boundary(target_end)
1319 };
1320 if chunk_end == text_offset {
1321 break;
1322 }
1323 let chunk_len = chunk_end - text_offset;
1324
1325 let fragment = Fragment {
1326 id: Locator::between(&new_fragments.summary().max_id, next_fragment_id),
1327 timestamp,
1328 insertion_offset: *insertion_offset,
1329 len: chunk_len as u32,
1330 deletions: Default::default(),
1331 max_undos: Default::default(),
1332 visible: true,
1333 };
1334 insertion_slices.push(InsertionSlice::from_fragment(edit_timestamp, &fragment));
1335 new_insertions.push(InsertionFragment::insert_new(&fragment));
1336 new_fragments.push(fragment, &None);
1337
1338 *insertion_offset += chunk_len as u32;
1339 text_offset = chunk_end;
1340 }
1341 new_ropes.push_str(new_text);
1342 }
1343
1344 fn fragment_ids_for_edits<'a>(
1345 &'a self,
1346 edit_ids: impl Iterator<Item = &'a clock::Lamport>,
1347 ) -> Vec<&'a Locator> {
1348 // Get all of the insertion slices changed by the given edits.
1349 let mut insertion_slices = Vec::new();
1350 for edit_id in edit_ids {
1351 let insertion_slice = InsertionSlice {
1352 edit_id_value: edit_id.value,
1353 edit_id_replica_id: edit_id.replica_id,
1354 insertion_id_value: Lamport::MIN.value,
1355 insertion_id_replica_id: Lamport::MIN.replica_id,
1356 range: 0..0,
1357 };
1358 let slices = self
1359 .snapshot
1360 .insertion_slices
1361 .iter_from(&insertion_slice)
1362 .take_while(|slice| {
1363 Lamport {
1364 value: slice.edit_id_value,
1365 replica_id: slice.edit_id_replica_id,
1366 } == *edit_id
1367 });
1368 insertion_slices.extend(slices)
1369 }
1370 insertion_slices.sort_unstable_by_key(|s| {
1371 (
1372 Lamport {
1373 value: s.insertion_id_value,
1374 replica_id: s.insertion_id_replica_id,
1375 },
1376 s.range.start,
1377 Reverse(s.range.end),
1378 )
1379 });
1380
1381 // Get all of the fragments corresponding to these insertion slices.
1382 let mut fragment_ids = Vec::new();
1383 let mut insertions_cursor = self.insertions.cursor::<InsertionFragmentKey>(());
1384 for insertion_slice in &insertion_slices {
1385 let insertion_id = Lamport {
1386 value: insertion_slice.insertion_id_value,
1387 replica_id: insertion_slice.insertion_id_replica_id,
1388 };
1389 if insertion_id != insertions_cursor.start().timestamp
1390 || insertion_slice.range.start > insertions_cursor.start().split_offset
1391 {
1392 insertions_cursor.seek_forward(
1393 &InsertionFragmentKey {
1394 timestamp: insertion_id,
1395 split_offset: insertion_slice.range.start,
1396 },
1397 Bias::Left,
1398 );
1399 }
1400 while let Some(item) = insertions_cursor.item() {
1401 if item.timestamp != insertion_id || item.split_offset >= insertion_slice.range.end
1402 {
1403 break;
1404 }
1405 fragment_ids.push(&item.fragment_id);
1406 insertions_cursor.next();
1407 }
1408 }
1409 fragment_ids.sort_unstable();
1410 fragment_ids
1411 }
1412
1413 fn apply_undo(&mut self, undo: &UndoOperation) {
1414 self.snapshot.undo_map.insert(undo);
1415
1416 let mut edits = Patch::default();
1417 let mut old_fragments = self
1418 .fragments
1419 .cursor::<Dimensions<Option<&Locator>, usize>>(&None);
1420 let mut new_fragments = SumTree::new(&None);
1421 let mut new_ropes =
1422 RopeBuilder::new(self.visible_text.cursor(0), self.deleted_text.cursor(0));
1423
1424 for fragment_id in self.fragment_ids_for_edits(undo.counts.keys()) {
1425 let preceding_fragments = old_fragments.slice(&Some(fragment_id), Bias::Left);
1426 new_ropes.append(preceding_fragments.summary().text);
1427 new_fragments.append(preceding_fragments, &None);
1428
1429 if let Some(fragment) = old_fragments.item() {
1430 let mut fragment = fragment.clone();
1431 let fragment_was_visible = fragment.visible;
1432
1433 fragment.visible = fragment.is_visible(&self.undo_map);
1434 fragment.max_undos.observe(undo.timestamp);
1435
1436 let old_start = old_fragments.start().1;
1437 let new_start = new_fragments.summary().text.visible;
1438 if fragment_was_visible && !fragment.visible {
1439 edits.push(Edit {
1440 old: old_start..old_start + fragment.len as usize,
1441 new: new_start..new_start,
1442 });
1443 } else if !fragment_was_visible && fragment.visible {
1444 edits.push(Edit {
1445 old: old_start..old_start,
1446 new: new_start..new_start + fragment.len as usize,
1447 });
1448 }
1449 new_ropes.push_fragment(&fragment, fragment_was_visible);
1450 new_fragments.push(fragment, &None);
1451
1452 old_fragments.next();
1453 }
1454 }
1455
1456 let suffix = old_fragments.suffix();
1457 new_ropes.append(suffix.summary().text);
1458 new_fragments.append(suffix, &None);
1459
1460 drop(old_fragments);
1461 let (visible_text, deleted_text) = new_ropes.finish();
1462 self.snapshot.fragments = new_fragments;
1463 self.snapshot.visible_text = visible_text;
1464 self.snapshot.deleted_text = deleted_text;
1465 self.subscriptions.publish_mut(&edits);
1466 }
1467
1468 fn flush_deferred_ops(&mut self) {
1469 self.deferred_replicas.clear();
1470 let mut deferred_ops = Vec::new();
1471 for op in self.deferred_ops.drain().iter().cloned() {
1472 if self.can_apply_op(&op) {
1473 self.apply_op(op);
1474 } else {
1475 self.deferred_replicas.insert(op.replica_id());
1476 deferred_ops.push(op);
1477 }
1478 }
1479 self.deferred_ops.insert(deferred_ops);
1480 }
1481
1482 fn can_apply_op(&self, op: &Operation) -> bool {
1483 if self.deferred_replicas.contains(&op.replica_id()) {
1484 false
1485 } else {
1486 self.version.observed_all(match op {
1487 Operation::Edit(edit) => &edit.version,
1488 Operation::Undo(undo) => &undo.version,
1489 })
1490 }
1491 }
1492
1493 pub fn has_deferred_ops(&self) -> bool {
1494 !self.deferred_ops.is_empty()
1495 }
1496
1497 pub fn peek_undo_stack(&self) -> Option<&HistoryEntry> {
1498 self.history.undo_stack.last()
1499 }
1500
1501 pub fn peek_redo_stack(&self) -> Option<&HistoryEntry> {
1502 self.history.redo_stack.last()
1503 }
1504
1505 pub fn start_transaction(&mut self) -> Option<TransactionId> {
1506 self.start_transaction_at(Instant::now())
1507 }
1508
1509 pub fn start_transaction_at(&mut self, now: Instant) -> Option<TransactionId> {
1510 self.history
1511 .start_transaction(self.version.clone(), now, &mut self.lamport_clock)
1512 }
1513
1514 pub fn end_transaction(&mut self) -> Option<(TransactionId, clock::Global)> {
1515 self.end_transaction_at(Instant::now())
1516 }
1517
1518 pub fn end_transaction_at(&mut self, now: Instant) -> Option<(TransactionId, clock::Global)> {
1519 if let Some(entry) = self.history.end_transaction(now) {
1520 let since = entry.transaction.start.clone();
1521 let id = self.history.group().unwrap();
1522 Some((id, since))
1523 } else {
1524 None
1525 }
1526 }
1527
1528 pub fn finalize_last_transaction(&mut self) -> Option<&Transaction> {
1529 self.history.finalize_last_transaction()
1530 }
1531
1532 pub fn group_until_transaction(&mut self, transaction_id: TransactionId) {
1533 self.history.group_until(transaction_id);
1534 }
1535
1536 pub fn base_text(&self) -> &Rope {
1537 &self.history.base_text
1538 }
1539
1540 pub fn operations(&self) -> &TreeMap<clock::Lamport, Operation> {
1541 &self.history.operations
1542 }
1543
1544 pub fn undo(&mut self) -> Option<(TransactionId, Operation)> {
1545 if let Some(entry) = self.history.pop_undo() {
1546 let transaction = entry.transaction.clone();
1547 let transaction_id = transaction.id;
1548 let op = self.undo_or_redo(transaction);
1549 Some((transaction_id, op))
1550 } else {
1551 None
1552 }
1553 }
1554
1555 pub fn undo_transaction(&mut self, transaction_id: TransactionId) -> Option<Operation> {
1556 let transaction = self
1557 .history
1558 .remove_from_undo(transaction_id)?
1559 .transaction
1560 .clone();
1561 Some(self.undo_or_redo(transaction))
1562 }
1563
1564 pub fn undo_to_transaction(&mut self, transaction_id: TransactionId) -> Vec<Operation> {
1565 let transactions = self
1566 .history
1567 .remove_from_undo_until(transaction_id)
1568 .iter()
1569 .map(|entry| entry.transaction.clone())
1570 .collect::<Vec<_>>();
1571
1572 transactions
1573 .into_iter()
1574 .map(|transaction| self.undo_or_redo(transaction))
1575 .collect()
1576 }
1577
1578 pub fn forget_transaction(&mut self, transaction_id: TransactionId) -> Option<Transaction> {
1579 self.history.forget(transaction_id)
1580 }
1581
1582 pub fn get_transaction(&self, transaction_id: TransactionId) -> Option<&Transaction> {
1583 self.history.transaction(transaction_id)
1584 }
1585
1586 pub fn merge_transactions(&mut self, transaction: TransactionId, destination: TransactionId) {
1587 self.history.merge_transactions(transaction, destination);
1588 }
1589
1590 pub fn redo(&mut self) -> Option<(TransactionId, Operation)> {
1591 if let Some(entry) = self.history.pop_redo() {
1592 let transaction = entry.transaction.clone();
1593 let transaction_id = transaction.id;
1594 let op = self.undo_or_redo(transaction);
1595 Some((transaction_id, op))
1596 } else {
1597 None
1598 }
1599 }
1600
1601 pub fn redo_to_transaction(&mut self, transaction_id: TransactionId) -> Vec<Operation> {
1602 let transactions = self
1603 .history
1604 .remove_from_redo(transaction_id)
1605 .iter()
1606 .map(|entry| entry.transaction.clone())
1607 .collect::<Vec<_>>();
1608
1609 transactions
1610 .into_iter()
1611 .map(|transaction| self.undo_or_redo(transaction))
1612 .collect()
1613 }
1614
1615 fn undo_or_redo(&mut self, transaction: Transaction) -> Operation {
1616 let mut counts = HashMap::default();
1617 for edit_id in transaction.edit_ids {
1618 counts.insert(edit_id, self.undo_map.undo_count(edit_id).saturating_add(1));
1619 }
1620
1621 let operation = self.undo_operations(counts);
1622 self.history.push(operation.clone());
1623 operation
1624 }
1625
1626 pub fn undo_operations(&mut self, counts: HashMap<clock::Lamport, u32>) -> Operation {
1627 let timestamp = self.lamport_clock.tick();
1628 let version = self.version();
1629 self.snapshot.version.observe(timestamp);
1630 let undo = UndoOperation {
1631 timestamp,
1632 version,
1633 counts,
1634 };
1635 self.apply_undo(&undo);
1636 Operation::Undo(undo)
1637 }
1638
1639 pub fn push_transaction(&mut self, transaction: Transaction, now: Instant) {
1640 self.history.push_transaction(transaction, now);
1641 }
1642
1643 /// Differs from `push_transaction` in that it does not clear the redo stack.
1644 /// The caller responsible for
1645 /// Differs from `push_transaction` in that it does not clear the redo
1646 /// stack. Intended to be used to create a parent transaction to merge
1647 /// potential child transactions into.
1648 ///
1649 /// The caller is responsible for removing it from the undo history using
1650 /// `forget_transaction` if no edits are merged into it. Otherwise, if edits
1651 /// are merged into this transaction, the caller is responsible for ensuring
1652 /// the redo stack is cleared. The easiest way to ensure the redo stack is
1653 /// cleared is to create transactions with the usual `start_transaction` and
1654 /// `end_transaction` methods and merging the resulting transactions into
1655 /// the transaction created by this method
1656 pub fn push_empty_transaction(&mut self, now: Instant) -> TransactionId {
1657 self.history
1658 .push_empty_transaction(self.version.clone(), now, &mut self.lamport_clock)
1659 }
1660
1661 pub fn edited_ranges_for_transaction_id<D>(
1662 &self,
1663 transaction_id: TransactionId,
1664 ) -> impl '_ + Iterator<Item = Range<D>>
1665 where
1666 D: TextDimension,
1667 {
1668 self.history
1669 .transaction(transaction_id)
1670 .into_iter()
1671 .flat_map(|transaction| self.edited_ranges_for_transaction(transaction))
1672 }
1673
1674 pub fn edited_ranges_for_edit_ids<'a, D>(
1675 &'a self,
1676 edit_ids: impl IntoIterator<Item = &'a clock::Lamport>,
1677 ) -> impl 'a + Iterator<Item = Range<D>>
1678 where
1679 D: TextDimension,
1680 {
1681 // get fragment ranges
1682 let mut cursor = self
1683 .fragments
1684 .cursor::<Dimensions<Option<&Locator>, usize>>(&None);
1685 let offset_ranges = self
1686 .fragment_ids_for_edits(edit_ids.into_iter())
1687 .into_iter()
1688 .filter_map(move |fragment_id| {
1689 cursor.seek_forward(&Some(fragment_id), Bias::Left);
1690 let fragment = cursor.item()?;
1691 let start_offset = cursor.start().1;
1692 let end_offset = start_offset
1693 + if fragment.visible {
1694 fragment.len as usize
1695 } else {
1696 0
1697 };
1698 Some(start_offset..end_offset)
1699 });
1700
1701 // combine adjacent ranges
1702 let mut prev_range: Option<Range<usize>> = None;
1703 let disjoint_ranges = offset_ranges
1704 .map(Some)
1705 .chain([None])
1706 .filter_map(move |range| {
1707 if let Some((range, prev_range)) = range.as_ref().zip(prev_range.as_mut())
1708 && prev_range.end == range.start
1709 {
1710 prev_range.end = range.end;
1711 return None;
1712 }
1713 let result = prev_range.clone();
1714 prev_range = range;
1715 result
1716 });
1717
1718 // convert to the desired text dimension.
1719 let mut position = D::zero(());
1720 let mut rope_cursor = self.visible_text.cursor(0);
1721 disjoint_ranges.map(move |range| {
1722 position.add_assign(&rope_cursor.summary(range.start));
1723 let start = position;
1724 position.add_assign(&rope_cursor.summary(range.end));
1725 let end = position;
1726 start..end
1727 })
1728 }
1729
1730 pub fn edited_ranges_for_transaction<'a, D>(
1731 &'a self,
1732 transaction: &'a Transaction,
1733 ) -> impl 'a + Iterator<Item = Range<D>>
1734 where
1735 D: TextDimension,
1736 {
1737 self.edited_ranges_for_edit_ids(&transaction.edit_ids)
1738 }
1739
1740 pub fn subscribe(&mut self) -> Subscription<usize> {
1741 self.subscriptions.subscribe()
1742 }
1743
1744 pub fn wait_for_edits<It: IntoIterator<Item = clock::Lamport>>(
1745 &mut self,
1746 edit_ids: It,
1747 ) -> impl 'static + Future<Output = Result<()>> + use<It> {
1748 let mut futures = Vec::new();
1749 for edit_id in edit_ids {
1750 if !self.version.observed(edit_id) {
1751 let (tx, rx) = oneshot::channel();
1752 self.edit_id_resolvers.entry(edit_id).or_default().push(tx);
1753 futures.push(rx);
1754 }
1755 }
1756
1757 async move {
1758 for mut future in futures {
1759 if future.recv().await.is_none() {
1760 anyhow::bail!("gave up waiting for edits");
1761 }
1762 }
1763 Ok(())
1764 }
1765 }
1766
1767 pub fn wait_for_anchors<It: IntoIterator<Item = Anchor>>(
1768 &mut self,
1769 anchors: It,
1770 ) -> impl 'static + Future<Output = Result<()>> + use<It> {
1771 let mut futures = Vec::new();
1772 for anchor in anchors {
1773 if !self.version.observed(anchor.timestamp()) && !anchor.is_max() && !anchor.is_min() {
1774 let (tx, rx) = oneshot::channel();
1775 self.edit_id_resolvers
1776 .entry(anchor.timestamp())
1777 .or_default()
1778 .push(tx);
1779 futures.push(rx);
1780 }
1781 }
1782
1783 async move {
1784 for mut future in futures {
1785 if future.recv().await.is_none() {
1786 anyhow::bail!("gave up waiting for anchors");
1787 }
1788 }
1789 Ok(())
1790 }
1791 }
1792
1793 pub fn wait_for_version(
1794 &mut self,
1795 version: clock::Global,
1796 ) -> impl Future<Output = Result<()>> + use<> {
1797 let mut rx = None;
1798 if !self.snapshot.version.observed_all(&version) {
1799 let channel = oneshot::channel();
1800 self.wait_for_version_txs.push((version, channel.0));
1801 rx = Some(channel.1);
1802 }
1803 async move {
1804 if let Some(mut rx) = rx
1805 && rx.recv().await.is_none()
1806 {
1807 anyhow::bail!("gave up waiting for version");
1808 }
1809 Ok(())
1810 }
1811 }
1812
1813 pub fn give_up_waiting(&mut self) {
1814 self.edit_id_resolvers.clear();
1815 self.wait_for_version_txs.clear();
1816 }
1817
1818 fn resolve_edit(&mut self, edit_id: clock::Lamport) {
1819 for mut tx in self
1820 .edit_id_resolvers
1821 .remove(&edit_id)
1822 .into_iter()
1823 .flatten()
1824 {
1825 tx.try_send(()).ok();
1826 }
1827 }
1828}
1829
1830#[cfg(any(test, feature = "test-support"))]
1831impl Buffer {
1832 #[track_caller]
1833 pub fn edit_via_marked_text(&mut self, marked_string: &str) {
1834 let edits = self.edits_for_marked_text(marked_string);
1835 self.edit(edits);
1836 }
1837
1838 #[track_caller]
1839 pub fn edits_for_marked_text(&self, marked_string: &str) -> Vec<(Range<usize>, String)> {
1840 let old_text = self.text();
1841 let (new_text, mut ranges) = util::test::marked_text_ranges(marked_string, false);
1842 if ranges.is_empty() {
1843 ranges.push(0..new_text.len());
1844 }
1845
1846 assert_eq!(
1847 old_text[..ranges[0].start],
1848 new_text[..ranges[0].start],
1849 "invalid edit"
1850 );
1851
1852 let mut delta = 0;
1853 let mut edits = Vec::new();
1854 let mut ranges = ranges.into_iter().peekable();
1855
1856 while let Some(inserted_range) = ranges.next() {
1857 let new_start = inserted_range.start;
1858 let old_start = (new_start as isize - delta) as usize;
1859
1860 let following_text = if let Some(next_range) = ranges.peek() {
1861 &new_text[inserted_range.end..next_range.start]
1862 } else {
1863 &new_text[inserted_range.end..]
1864 };
1865
1866 let inserted_len = inserted_range.len();
1867 let deleted_len = old_text[old_start..]
1868 .find(following_text)
1869 .expect("invalid edit");
1870
1871 let old_range = old_start..old_start + deleted_len;
1872 edits.push((old_range, new_text[inserted_range].to_string()));
1873 delta += inserted_len as isize - deleted_len as isize;
1874 }
1875
1876 assert_eq!(
1877 old_text.len() as isize + delta,
1878 new_text.len() as isize,
1879 "invalid edit"
1880 );
1881
1882 edits
1883 }
1884
1885 pub fn check_invariants(&self) {
1886 // Ensure every fragment is ordered by locator in the fragment tree and corresponds
1887 // to an insertion fragment in the insertions tree.
1888 let mut prev_fragment_id = Locator::min();
1889 for fragment in self.snapshot.fragments.items(&None) {
1890 assert!(fragment.id > prev_fragment_id);
1891 prev_fragment_id = fragment.id.clone();
1892
1893 let insertion_fragment = self
1894 .snapshot
1895 .insertions
1896 .get(
1897 &InsertionFragmentKey {
1898 timestamp: fragment.timestamp,
1899 split_offset: fragment.insertion_offset,
1900 },
1901 (),
1902 )
1903 .unwrap();
1904 assert_eq!(
1905 insertion_fragment.fragment_id, fragment.id,
1906 "fragment: {:?}\ninsertion: {:?}",
1907 fragment, insertion_fragment
1908 );
1909 }
1910
1911 let mut cursor = self.snapshot.fragments.cursor::<Option<&Locator>>(&None);
1912 for insertion_fragment in self.snapshot.insertions.cursor::<()>(()) {
1913 cursor.seek(&Some(&insertion_fragment.fragment_id), Bias::Left);
1914 let fragment = cursor.item().unwrap();
1915 assert_eq!(insertion_fragment.fragment_id, fragment.id);
1916 assert_eq!(insertion_fragment.split_offset, fragment.insertion_offset);
1917 }
1918
1919 let fragment_summary = self.snapshot.fragments.summary();
1920 assert_eq!(
1921 fragment_summary.text.visible,
1922 self.snapshot.visible_text.len()
1923 );
1924 assert_eq!(
1925 fragment_summary.text.deleted,
1926 self.snapshot.deleted_text.len()
1927 );
1928
1929 assert!(!self.text().contains("\r\n"));
1930 }
1931
1932 pub fn set_group_interval(&mut self, group_interval: Duration) {
1933 self.history.group_interval = group_interval;
1934 }
1935
1936 pub fn random_byte_range(&self, start_offset: usize, rng: &mut impl rand::Rng) -> Range<usize> {
1937 let end = self.clip_offset(rng.random_range(start_offset..=self.len()), Bias::Right);
1938 let start = self.clip_offset(rng.random_range(start_offset..=end), Bias::Right);
1939 start..end
1940 }
1941
1942 pub fn get_random_edits<T>(
1943 &self,
1944 rng: &mut T,
1945 edit_count: usize,
1946 ) -> Vec<(Range<usize>, Arc<str>)>
1947 where
1948 T: rand::Rng,
1949 {
1950 let mut edits: Vec<(Range<usize>, Arc<str>)> = Vec::new();
1951 let mut last_end = None;
1952 for _ in 0..edit_count {
1953 if last_end.is_some_and(|last_end| last_end >= self.len()) {
1954 break;
1955 }
1956 let new_start = last_end.map_or(0, |last_end| last_end + 1);
1957 let range = self.random_byte_range(new_start, rng);
1958 last_end = Some(range.end);
1959
1960 let new_text_len = rng.random_range(0..10);
1961 let new_text: String = RandomCharIter::new(&mut *rng).take(new_text_len).collect();
1962
1963 edits.push((range, new_text.into()));
1964 }
1965 edits
1966 }
1967
1968 pub fn randomly_edit<T>(
1969 &mut self,
1970 rng: &mut T,
1971 edit_count: usize,
1972 ) -> (Vec<(Range<usize>, Arc<str>)>, Operation)
1973 where
1974 T: rand::Rng,
1975 {
1976 let mut edits = self.get_random_edits(rng, edit_count);
1977 log::info!("mutating buffer {:?} with {:?}", self.replica_id, edits);
1978
1979 let op = self.edit(edits.iter().cloned());
1980 if let Operation::Edit(edit) = &op {
1981 assert_eq!(edits.len(), edit.new_text.len());
1982 for (edit, new_text) in edits.iter_mut().zip(&edit.new_text) {
1983 edit.1 = new_text.clone();
1984 }
1985 } else {
1986 unreachable!()
1987 }
1988
1989 (edits, op)
1990 }
1991
1992 pub fn randomly_undo_redo(&mut self, rng: &mut impl rand::Rng) -> Vec<Operation> {
1993 use rand::prelude::*;
1994
1995 let mut ops = Vec::new();
1996 for _ in 0..rng.random_range(1..=5) {
1997 if let Some(entry) = self.history.undo_stack.choose(rng) {
1998 let transaction = entry.transaction.clone();
1999 log::info!(
2000 "undoing buffer {:?} transaction {:?}",
2001 self.replica_id,
2002 transaction
2003 );
2004 ops.push(self.undo_or_redo(transaction));
2005 }
2006 }
2007 ops
2008 }
2009}
2010
2011impl Deref for Buffer {
2012 type Target = BufferSnapshot;
2013
2014 fn deref(&self) -> &Self::Target {
2015 &self.snapshot
2016 }
2017}
2018
2019impl BufferSnapshot {
2020 pub fn as_rope(&self) -> &Rope {
2021 &self.visible_text
2022 }
2023
2024 pub fn rope_for_version(&self, version: &clock::Global) -> Rope {
2025 let mut rope = Rope::new();
2026
2027 let mut cursor = self
2028 .fragments
2029 .filter::<_, FragmentTextSummary>(&None, move |summary| {
2030 !version.observed_all(&summary.max_version)
2031 });
2032 cursor.next();
2033
2034 let mut visible_cursor = self.visible_text.cursor(0);
2035 let mut deleted_cursor = self.deleted_text.cursor(0);
2036
2037 while let Some(fragment) = cursor.item() {
2038 if cursor.start().visible > visible_cursor.offset() {
2039 let text = visible_cursor.slice(cursor.start().visible);
2040 rope.append(text);
2041 }
2042
2043 if fragment.was_visible(version, &self.undo_map) {
2044 if fragment.visible {
2045 let text = visible_cursor.slice(cursor.end().visible);
2046 rope.append(text);
2047 } else {
2048 deleted_cursor.seek_forward(cursor.start().deleted);
2049 let text = deleted_cursor.slice(cursor.end().deleted);
2050 rope.append(text);
2051 }
2052 } else if fragment.visible {
2053 visible_cursor.seek_forward(cursor.end().visible);
2054 }
2055
2056 cursor.next();
2057 }
2058
2059 if cursor.start().visible > visible_cursor.offset() {
2060 let text = visible_cursor.slice(cursor.start().visible);
2061 rope.append(text);
2062 }
2063
2064 rope
2065 }
2066
2067 pub fn remote_id(&self) -> BufferId {
2068 self.remote_id
2069 }
2070
2071 pub fn replica_id(&self) -> ReplicaId {
2072 self.replica_id
2073 }
2074
2075 pub fn row_count(&self) -> u32 {
2076 self.max_point().row + 1
2077 }
2078
2079 pub fn len(&self) -> usize {
2080 self.visible_text.len()
2081 }
2082
2083 pub fn is_empty(&self) -> bool {
2084 self.len() == 0
2085 }
2086
2087 pub fn chars(&self) -> impl Iterator<Item = char> + '_ {
2088 self.chars_at(0)
2089 }
2090
2091 pub fn chars_for_range<T: ToOffset>(&self, range: Range<T>) -> impl Iterator<Item = char> + '_ {
2092 self.text_for_range(range).flat_map(str::chars)
2093 }
2094
2095 pub fn reversed_chars_for_range<T: ToOffset>(
2096 &self,
2097 range: Range<T>,
2098 ) -> impl Iterator<Item = char> + '_ {
2099 self.reversed_chunks_in_range(range)
2100 .flat_map(|chunk| chunk.chars().rev())
2101 }
2102
2103 pub fn contains_str_at<T>(&self, position: T, needle: &str) -> bool
2104 where
2105 T: ToOffset,
2106 {
2107 let position = position.to_offset(self);
2108 position == self.clip_offset(position, Bias::Left)
2109 && self
2110 .bytes_in_range(position..self.len())
2111 .flatten()
2112 .copied()
2113 .take(needle.len())
2114 .eq(needle.bytes())
2115 }
2116
2117 pub fn common_prefix_at<T>(&self, position: T, needle: &str) -> Range<T>
2118 where
2119 T: ToOffset + TextDimension,
2120 {
2121 let offset = position.to_offset(self);
2122 let common_prefix_len = needle
2123 .char_indices()
2124 .map(|(index, _)| index)
2125 .chain([needle.len()])
2126 .take_while(|&len| len <= offset)
2127 .filter(|&len| {
2128 let left = self
2129 .chars_for_range(offset - len..offset)
2130 .flat_map(char::to_lowercase);
2131 let right = needle[..len].chars().flat_map(char::to_lowercase);
2132 left.eq(right)
2133 })
2134 .last()
2135 .unwrap_or(0);
2136 let start_offset = offset - common_prefix_len;
2137 let start = self.text_summary_for_range(0..start_offset);
2138 start..position
2139 }
2140
2141 pub fn text(&self) -> String {
2142 self.visible_text.to_string()
2143 }
2144
2145 pub fn line_ending(&self) -> LineEnding {
2146 self.line_ending
2147 }
2148
2149 pub fn deleted_text(&self) -> String {
2150 self.deleted_text.to_string()
2151 }
2152
2153 pub fn text_summary(&self) -> TextSummary {
2154 self.visible_text.summary()
2155 }
2156
2157 pub fn max_point(&self) -> Point {
2158 self.visible_text.max_point()
2159 }
2160
2161 pub fn max_point_utf16(&self) -> PointUtf16 {
2162 self.visible_text.max_point_utf16()
2163 }
2164
2165 pub fn point_to_offset(&self, point: Point) -> usize {
2166 self.visible_text.point_to_offset(point)
2167 }
2168
2169 pub fn point_to_offset_utf16(&self, point: Point) -> OffsetUtf16 {
2170 self.visible_text.point_to_offset_utf16(point)
2171 }
2172
2173 pub fn point_utf16_to_offset_utf16(&self, point: PointUtf16) -> OffsetUtf16 {
2174 self.visible_text.point_utf16_to_offset_utf16(point)
2175 }
2176
2177 pub fn point_utf16_to_offset(&self, point: PointUtf16) -> usize {
2178 self.visible_text.point_utf16_to_offset(point)
2179 }
2180
2181 pub fn unclipped_point_utf16_to_offset(&self, point: Unclipped<PointUtf16>) -> usize {
2182 self.visible_text.unclipped_point_utf16_to_offset(point)
2183 }
2184
2185 pub fn unclipped_point_utf16_to_point(&self, point: Unclipped<PointUtf16>) -> Point {
2186 self.visible_text.unclipped_point_utf16_to_point(point)
2187 }
2188
2189 pub fn offset_utf16_to_offset(&self, offset: OffsetUtf16) -> usize {
2190 self.visible_text.offset_utf16_to_offset(offset)
2191 }
2192
2193 pub fn offset_to_offset_utf16(&self, offset: usize) -> OffsetUtf16 {
2194 self.visible_text.offset_to_offset_utf16(offset)
2195 }
2196
2197 pub fn offset_to_point(&self, offset: usize) -> Point {
2198 self.visible_text.offset_to_point(offset)
2199 }
2200
2201 pub fn offset_to_point_utf16(&self, offset: usize) -> PointUtf16 {
2202 self.visible_text.offset_to_point_utf16(offset)
2203 }
2204
2205 pub fn point_to_point_utf16(&self, point: Point) -> PointUtf16 {
2206 self.visible_text.point_to_point_utf16(point)
2207 }
2208
2209 pub fn point_utf16_to_point(&self, point: PointUtf16) -> Point {
2210 self.visible_text.point_utf16_to_point(point)
2211 }
2212
2213 pub fn version(&self) -> &clock::Global {
2214 &self.version
2215 }
2216
2217 pub fn chars_at<T: ToOffset>(&self, position: T) -> impl Iterator<Item = char> + '_ {
2218 let offset = position.to_offset(self);
2219 self.visible_text.chars_at(offset)
2220 }
2221
2222 pub fn reversed_chars_at<T: ToOffset>(&self, position: T) -> impl Iterator<Item = char> + '_ {
2223 let offset = position.to_offset(self);
2224 self.visible_text.reversed_chars_at(offset)
2225 }
2226
2227 pub fn reversed_chunks_in_range<T: ToOffset>(&self, range: Range<T>) -> rope::Chunks<'_> {
2228 let range = range.start.to_offset(self)..range.end.to_offset(self);
2229 self.visible_text.reversed_chunks_in_range(range)
2230 }
2231
2232 pub fn bytes_in_range<T: ToOffset>(&self, range: Range<T>) -> rope::Bytes<'_> {
2233 let start = range.start.to_offset(self);
2234 let end = range.end.to_offset(self);
2235 self.visible_text.bytes_in_range(start..end)
2236 }
2237
2238 pub fn reversed_bytes_in_range<T: ToOffset>(&self, range: Range<T>) -> rope::Bytes<'_> {
2239 let start = range.start.to_offset(self);
2240 let end = range.end.to_offset(self);
2241 self.visible_text.reversed_bytes_in_range(start..end)
2242 }
2243
2244 pub fn text_for_range<T: ToOffset>(&self, range: Range<T>) -> Chunks<'_> {
2245 let start = range.start.to_offset(self);
2246 let end = range.end.to_offset(self);
2247 self.visible_text.chunks_in_range(start..end)
2248 }
2249
2250 pub fn line_len(&self, row: u32) -> u32 {
2251 let row_start_offset = Point::new(row, 0).to_offset(self);
2252 let row_end_offset = if row >= self.max_point().row {
2253 self.len()
2254 } else {
2255 Point::new(row + 1, 0).to_previous_offset(self)
2256 };
2257 (row_end_offset - row_start_offset) as u32
2258 }
2259
2260 /// A function to convert character offsets from e.g. user's `go.mod:22:33` input into byte-offset Point columns.
2261 pub fn point_from_external_input(&self, row: u32, characters: u32) -> Point {
2262 const MAX_BYTES_IN_UTF_8: u32 = 4;
2263
2264 let row = row.min(self.max_point().row);
2265 let start = Point::new(row, 0);
2266 let end = self.clip_point(
2267 Point::new(
2268 row,
2269 characters
2270 .saturating_mul(MAX_BYTES_IN_UTF_8)
2271 .saturating_add(1),
2272 ),
2273 Bias::Right,
2274 );
2275 let range = start..end;
2276 let mut point = range.start;
2277 let mut remaining_columns = characters;
2278
2279 for chunk in self.text_for_range(range) {
2280 for character in chunk.chars() {
2281 if remaining_columns == 0 {
2282 return point;
2283 }
2284 remaining_columns -= 1;
2285 point.column += character.len_utf8() as u32;
2286 }
2287 }
2288 point
2289 }
2290
2291 pub fn line_indents_in_row_range(
2292 &self,
2293 row_range: Range<u32>,
2294 ) -> impl Iterator<Item = (u32, LineIndent)> + '_ {
2295 let start = Point::new(row_range.start, 0).to_offset(self);
2296 let end = Point::new(row_range.end, self.line_len(row_range.end)).to_offset(self);
2297
2298 let mut chunks = self.as_rope().chunks_in_range(start..end);
2299 let mut row = row_range.start;
2300 let mut done = false;
2301 std::iter::from_fn(move || {
2302 if done {
2303 None
2304 } else {
2305 let indent = (row, LineIndent::from_chunks(&mut chunks));
2306 done = !chunks.next_line();
2307 row += 1;
2308 Some(indent)
2309 }
2310 })
2311 }
2312
2313 /// Returns the line indents in the given row range, exclusive of end row, in reversed order.
2314 pub fn reversed_line_indents_in_row_range(
2315 &self,
2316 row_range: Range<u32>,
2317 ) -> impl Iterator<Item = (u32, LineIndent)> + '_ {
2318 let start = Point::new(row_range.start, 0).to_offset(self);
2319
2320 let end_point;
2321 let end;
2322 if row_range.end > row_range.start {
2323 end_point = Point::new(row_range.end - 1, self.line_len(row_range.end - 1));
2324 end = end_point.to_offset(self);
2325 } else {
2326 end_point = Point::new(row_range.start, 0);
2327 end = start;
2328 };
2329
2330 let mut chunks = self.as_rope().chunks_in_range(start..end);
2331 // Move the cursor to the start of the last line if it's not empty.
2332 chunks.seek(end);
2333 if end_point.column > 0 {
2334 chunks.prev_line();
2335 }
2336
2337 let mut row = end_point.row;
2338 let mut done = false;
2339 std::iter::from_fn(move || {
2340 if done {
2341 None
2342 } else {
2343 let initial_offset = chunks.offset();
2344 let indent = (row, LineIndent::from_chunks(&mut chunks));
2345 if chunks.offset() > initial_offset {
2346 chunks.prev_line();
2347 }
2348 done = !chunks.prev_line();
2349 if !done {
2350 row -= 1;
2351 }
2352
2353 Some(indent)
2354 }
2355 })
2356 }
2357
2358 pub fn line_indent_for_row(&self, row: u32) -> LineIndent {
2359 LineIndent::from_iter(self.chars_at(Point::new(row, 0)))
2360 }
2361
2362 pub fn is_line_blank(&self, row: u32) -> bool {
2363 self.text_for_range(Point::new(row, 0)..Point::new(row, self.line_len(row)))
2364 .all(|chunk| chunk.matches(|c: char| !c.is_whitespace()).next().is_none())
2365 }
2366
2367 pub fn text_summary_for_range<D, O: ToOffset>(&self, range: Range<O>) -> D
2368 where
2369 D: TextDimension,
2370 {
2371 self.visible_text
2372 .cursor(range.start.to_offset(self))
2373 .summary(range.end.to_offset(self))
2374 }
2375
2376 pub fn summaries_for_anchors<'a, D, A>(&'a self, anchors: A) -> impl 'a + Iterator<Item = D>
2377 where
2378 D: 'a + TextDimension,
2379 A: 'a + IntoIterator<Item = &'a Anchor>,
2380 {
2381 let anchors = anchors.into_iter();
2382 self.summaries_for_anchors_with_payload::<D, _, ()>(anchors.map(|a| (a, ())))
2383 .map(|d| d.0)
2384 }
2385
2386 pub fn summaries_for_anchors_with_payload<'a, D, A, T>(
2387 &'a self,
2388 anchors: A,
2389 ) -> impl 'a + Iterator<Item = (D, T)>
2390 where
2391 D: 'a + TextDimension,
2392 A: 'a + IntoIterator<Item = (&'a Anchor, T)>,
2393 {
2394 let anchors = anchors.into_iter();
2395 let mut fragment_cursor = self
2396 .fragments
2397 .cursor::<Dimensions<Option<&Locator>, usize>>(&None);
2398 let mut text_cursor = self.visible_text.cursor(0);
2399 let mut position = D::zero(());
2400
2401 anchors.map(move |(anchor, payload)| {
2402 if anchor.is_min() {
2403 return (D::zero(()), payload);
2404 } else if anchor.is_max() {
2405 return (D::from_text_summary(&self.visible_text.summary()), payload);
2406 }
2407
2408 let Some(insertion) = self.try_find_fragment(anchor) else {
2409 panic!(
2410 "invalid insertion for buffer {}@{:?} with anchor {:?}",
2411 self.remote_id(),
2412 self.version,
2413 anchor
2414 );
2415 };
2416 // TODO verbose debug because we are seeing is_max return false unexpectedly,
2417 // remove this once that is understood and fixed
2418 assert_eq!(
2419 insertion.timestamp,
2420 anchor.timestamp(),
2421 "invalid insertion for buffer {}@{:?}. anchor: {:?}, {:?}, {:?}, {:?}, {:?}. timestamp: {:?}, offset: {:?}, bias: {:?}",
2422 self.remote_id(),
2423 self.version,
2424 anchor.timestamp_replica_id,
2425 anchor.timestamp_value,
2426 anchor.offset,
2427 anchor.bias,
2428 anchor.buffer_id,
2429 anchor.timestamp() == clock::Lamport::MAX,
2430 anchor.offset == u32::MAX,
2431 anchor.bias == Bias::Right,
2432 );
2433
2434 fragment_cursor.seek_forward(&Some(&insertion.fragment_id), Bias::Left);
2435 let fragment = fragment_cursor.item().unwrap();
2436 let mut fragment_offset = fragment_cursor.start().1;
2437 if fragment.visible {
2438 fragment_offset += (anchor.offset - insertion.split_offset) as usize;
2439 }
2440
2441 position.add_assign(&text_cursor.summary(fragment_offset));
2442 (position, payload)
2443 })
2444 }
2445
2446 pub fn summary_for_anchor<D>(&self, anchor: &Anchor) -> D
2447 where
2448 D: TextDimension,
2449 {
2450 self.text_summary_for_range(0..self.offset_for_anchor(anchor))
2451 }
2452
2453 pub fn offset_for_anchor(&self, anchor: &Anchor) -> usize {
2454 if anchor.is_min() {
2455 0
2456 } else if anchor.is_max() {
2457 self.visible_text.len()
2458 } else {
2459 debug_assert_eq!(anchor.buffer_id, Some(self.remote_id));
2460 debug_assert!(
2461 self.version.observed(anchor.timestamp()),
2462 "Anchor timestamp {:?} not observed by buffer {:?}",
2463 anchor.timestamp(),
2464 self.version
2465 );
2466 let item = self.try_find_fragment(anchor);
2467 let Some(insertion) =
2468 item.filter(|insertion| insertion.timestamp == anchor.timestamp())
2469 else {
2470 self.panic_bad_anchor(anchor);
2471 };
2472
2473 let (start, _, item) = self
2474 .fragments
2475 .find::<Dimensions<Option<&Locator>, usize>, _>(
2476 &None,
2477 &Some(&insertion.fragment_id),
2478 Bias::Left,
2479 );
2480 let fragment = item.unwrap();
2481 let mut fragment_offset = start.1;
2482 if fragment.visible {
2483 fragment_offset += (anchor.offset - insertion.split_offset) as usize;
2484 }
2485 fragment_offset
2486 }
2487 }
2488
2489 #[cold]
2490 fn panic_bad_anchor(&self, anchor: &Anchor) -> ! {
2491 if anchor.buffer_id.is_some_and(|id| id != self.remote_id) {
2492 panic!(
2493 "invalid anchor - buffer id does not match: anchor {anchor:?}; buffer id: {}, version: {:?}",
2494 self.remote_id, self.version
2495 );
2496 } else if !self.version.observed(anchor.timestamp()) {
2497 panic!(
2498 "invalid anchor - snapshot has not observed lamport: {:?}; version: {:?}",
2499 anchor, self.version
2500 );
2501 } else {
2502 panic!(
2503 "invalid anchor {:?}. buffer id: {}, version: {:?}",
2504 anchor, self.remote_id, self.version
2505 );
2506 }
2507 }
2508
2509 fn fragment_id_for_anchor(&self, anchor: &Anchor) -> &Locator {
2510 self.try_fragment_id_for_anchor(anchor)
2511 .unwrap_or_else(|| self.panic_bad_anchor(anchor))
2512 }
2513
2514 fn try_fragment_id_for_anchor(&self, anchor: &Anchor) -> Option<&Locator> {
2515 if anchor.is_min() {
2516 Some(Locator::min_ref())
2517 } else if anchor.is_max() {
2518 Some(Locator::max_ref())
2519 } else {
2520 let item = self.try_find_fragment(anchor);
2521 item.filter(|insertion| {
2522 !cfg!(debug_assertions) || insertion.timestamp == anchor.timestamp()
2523 })
2524 .map(|insertion| &insertion.fragment_id)
2525 }
2526 }
2527
2528 fn try_find_fragment(&self, anchor: &Anchor) -> Option<&InsertionFragment> {
2529 let anchor_key = InsertionFragmentKey {
2530 timestamp: anchor.timestamp(),
2531 split_offset: anchor.offset,
2532 };
2533 match self.insertions.find_with_prev::<InsertionFragmentKey, _>(
2534 (),
2535 &anchor_key,
2536 anchor.bias,
2537 ) {
2538 (_, _, Some((prev, insertion))) => {
2539 let comparison = sum_tree::KeyedItem::key(insertion).cmp(&anchor_key);
2540 if comparison == Ordering::Greater
2541 || (anchor.bias == Bias::Left
2542 && comparison == Ordering::Equal
2543 && anchor.offset > 0)
2544 {
2545 prev
2546 } else {
2547 Some(insertion)
2548 }
2549 }
2550 _ => self.insertions.last(),
2551 }
2552 }
2553
2554 /// Returns an anchor range for the given input position range that is anchored to the text in the range.
2555 pub fn anchor_range_around<T: ToOffset>(&self, position: Range<T>) -> Range<Anchor> {
2556 self.anchor_after(position.start)..self.anchor_before(position.end)
2557 }
2558
2559 /// Returns an anchor range for the given input position range that is anchored to the text before and after.
2560 pub fn anchor_range_between<T: ToOffset>(&self, position: Range<T>) -> Range<Anchor> {
2561 self.anchor_before(position.start)..self.anchor_after(position.end)
2562 }
2563
2564 /// Returns an anchor for the given input position that is anchored to the text before the position.
2565 pub fn anchor_before<T: ToOffset>(&self, position: T) -> Anchor {
2566 self.anchor_at(position, Bias::Left)
2567 }
2568
2569 /// Returns an anchor for the given input position that is anchored to the text after the position.
2570 pub fn anchor_after<T: ToOffset>(&self, position: T) -> Anchor {
2571 self.anchor_at(position, Bias::Right)
2572 }
2573
2574 pub fn anchor_at<T: ToOffset>(&self, position: T, bias: Bias) -> Anchor {
2575 self.anchor_at_offset(position.to_offset(self), bias)
2576 }
2577
2578 fn anchor_at_offset(&self, mut offset: usize, bias: Bias) -> Anchor {
2579 if bias == Bias::Left && offset == 0 {
2580 Anchor::min_for_buffer(self.remote_id)
2581 } else if bias == Bias::Right
2582 && ((!cfg!(debug_assertions) && offset >= self.len()) || offset == self.len())
2583 {
2584 Anchor::max_for_buffer(self.remote_id)
2585 } else {
2586 if !self
2587 .visible_text
2588 .assert_char_boundary::<{ cfg!(debug_assertions) }>(offset)
2589 {
2590 offset = match bias {
2591 Bias::Left => self.visible_text.floor_char_boundary(offset),
2592 Bias::Right => self.visible_text.ceil_char_boundary(offset),
2593 };
2594 }
2595 let (start, _, item) = self.fragments.find::<usize, _>(&None, &offset, bias);
2596 let Some(fragment) = item else {
2597 // We got a bad offset, likely out of bounds
2598 debug_panic!(
2599 "Failed to find fragment at offset {} (len: {})",
2600 offset,
2601 self.len()
2602 );
2603 return Anchor::max_for_buffer(self.remote_id);
2604 };
2605 let overshoot = offset - start;
2606 Anchor::new(
2607 fragment.timestamp,
2608 fragment.insertion_offset + overshoot as u32,
2609 bias,
2610 Some(self.remote_id),
2611 )
2612 }
2613 }
2614
2615 pub fn can_resolve(&self, anchor: &Anchor) -> bool {
2616 anchor.is_min()
2617 || anchor.is_max()
2618 || (Some(self.remote_id) == anchor.buffer_id
2619 && self.version.observed(anchor.timestamp()))
2620 }
2621
2622 pub fn clip_offset(&self, offset: usize, bias: Bias) -> usize {
2623 self.visible_text.clip_offset(offset, bias)
2624 }
2625
2626 pub fn clip_point(&self, point: Point, bias: Bias) -> Point {
2627 self.visible_text.clip_point(point, bias)
2628 }
2629
2630 pub fn clip_offset_utf16(&self, offset: OffsetUtf16, bias: Bias) -> OffsetUtf16 {
2631 self.visible_text.clip_offset_utf16(offset, bias)
2632 }
2633
2634 pub fn clip_point_utf16(&self, point: Unclipped<PointUtf16>, bias: Bias) -> PointUtf16 {
2635 self.visible_text.clip_point_utf16(point, bias)
2636 }
2637
2638 pub fn edits_since<'a, D>(
2639 &'a self,
2640 since: &'a clock::Global,
2641 ) -> impl 'a + Iterator<Item = Edit<D>>
2642 where
2643 D: TextDimension + Ord,
2644 {
2645 self.edits_since_in_range(since, Anchor::MIN..Anchor::MAX)
2646 }
2647
2648 pub fn anchored_edits_since<'a, D>(
2649 &'a self,
2650 since: &'a clock::Global,
2651 ) -> impl 'a + Iterator<Item = (Edit<D>, Range<Anchor>)>
2652 where
2653 D: TextDimension + Ord,
2654 {
2655 self.anchored_edits_since_in_range(since, Anchor::MIN..Anchor::MAX)
2656 }
2657
2658 pub fn edits_since_in_range<'a, D>(
2659 &'a self,
2660 since: &'a clock::Global,
2661 range: Range<Anchor>,
2662 ) -> impl 'a + Iterator<Item = Edit<D>>
2663 where
2664 D: TextDimension + Ord,
2665 {
2666 self.anchored_edits_since_in_range(since, range)
2667 .map(|item| item.0)
2668 }
2669
2670 pub fn anchored_edits_since_in_range<'a, D>(
2671 &'a self,
2672 since: &'a clock::Global,
2673 range: Range<Anchor>,
2674 ) -> impl 'a + Iterator<Item = (Edit<D>, Range<Anchor>)>
2675 where
2676 D: TextDimension + Ord,
2677 {
2678 if *since == self.version {
2679 return None.into_iter().flatten();
2680 }
2681 let mut cursor = self.fragments.filter(&None, move |summary| {
2682 !since.observed_all(&summary.max_version)
2683 });
2684 cursor.next();
2685 let fragments_cursor = Some(cursor);
2686 let start_fragment_id = self.fragment_id_for_anchor(&range.start);
2687 let (start, _, item) = self
2688 .fragments
2689 .find::<Dimensions<Option<&Locator>, FragmentTextSummary>, _>(
2690 &None,
2691 &Some(start_fragment_id),
2692 Bias::Left,
2693 );
2694 let mut visible_start = start.1.visible;
2695 let mut deleted_start = start.1.deleted;
2696 if let Some(fragment) = item {
2697 let overshoot = (range.start.offset - fragment.insertion_offset) as usize;
2698 if fragment.visible {
2699 visible_start += overshoot;
2700 } else {
2701 deleted_start += overshoot;
2702 }
2703 }
2704 let end_fragment_id = self.fragment_id_for_anchor(&range.end);
2705
2706 Some(Edits {
2707 visible_cursor: self.visible_text.cursor(visible_start),
2708 deleted_cursor: self.deleted_text.cursor(deleted_start),
2709 fragments_cursor,
2710 undos: &self.undo_map,
2711 since,
2712 old_end: D::zero(()),
2713 new_end: D::zero(()),
2714 range: (start_fragment_id, range.start.offset)..(end_fragment_id, range.end.offset),
2715 buffer_id: self.remote_id,
2716 })
2717 .into_iter()
2718 .flatten()
2719 }
2720
2721 pub fn has_edits_since_in_range(&self, since: &clock::Global, range: Range<Anchor>) -> bool {
2722 if *since != self.version {
2723 let start_fragment_id = self.fragment_id_for_anchor(&range.start);
2724 let end_fragment_id = self.fragment_id_for_anchor(&range.end);
2725 let mut cursor = self.fragments.filter::<_, usize>(&None, move |summary| {
2726 !since.observed_all(&summary.max_version)
2727 });
2728 cursor.next();
2729 while let Some(fragment) = cursor.item() {
2730 if fragment.id > *end_fragment_id {
2731 break;
2732 }
2733 if fragment.id > *start_fragment_id {
2734 let was_visible = fragment.was_visible(since, &self.undo_map);
2735 let is_visible = fragment.visible;
2736 if was_visible != is_visible {
2737 return true;
2738 }
2739 }
2740 cursor.next();
2741 }
2742 }
2743 false
2744 }
2745
2746 pub fn has_edits_since(&self, since: &clock::Global) -> bool {
2747 if *since != self.version {
2748 let mut cursor = self.fragments.filter::<_, usize>(&None, move |summary| {
2749 !since.observed_all(&summary.max_version)
2750 });
2751 cursor.next();
2752 while let Some(fragment) = cursor.item() {
2753 let was_visible = fragment.was_visible(since, &self.undo_map);
2754 let is_visible = fragment.visible;
2755 if was_visible != is_visible {
2756 return true;
2757 }
2758 cursor.next();
2759 }
2760 }
2761 false
2762 }
2763
2764 pub fn range_to_version(&self, range: Range<usize>, version: &clock::Global) -> Range<usize> {
2765 let mut offsets = self.offsets_to_version([range.start, range.end], version);
2766 offsets.next().unwrap()..offsets.next().unwrap()
2767 }
2768
2769 /// Converts the given sequence of offsets into their corresponding offsets
2770 /// at a prior version of this buffer.
2771 pub fn offsets_to_version<'a>(
2772 &'a self,
2773 offsets: impl 'a + IntoIterator<Item = usize>,
2774 version: &'a clock::Global,
2775 ) -> impl 'a + Iterator<Item = usize> {
2776 let mut edits = self.edits_since(version).peekable();
2777 let mut last_old_end = 0;
2778 let mut last_new_end = 0;
2779 offsets.into_iter().map(move |new_offset| {
2780 while let Some(edit) = edits.peek() {
2781 if edit.new.start > new_offset {
2782 break;
2783 }
2784
2785 if edit.new.end <= new_offset {
2786 last_new_end = edit.new.end;
2787 last_old_end = edit.old.end;
2788 edits.next();
2789 continue;
2790 }
2791
2792 let overshoot = new_offset - edit.new.start;
2793 return (edit.old.start + overshoot).min(edit.old.end);
2794 }
2795
2796 last_old_end + new_offset.saturating_sub(last_new_end)
2797 })
2798 }
2799
2800 /// Visually annotates a position or range with the `Debug` representation of a value. The
2801 /// callsite of this function is used as a key - previous annotations will be removed.
2802 #[cfg(debug_assertions)]
2803 #[track_caller]
2804 pub fn debug<R, V>(&self, ranges: &R, value: V)
2805 where
2806 R: debug::ToDebugRanges,
2807 V: std::fmt::Debug,
2808 {
2809 self.debug_with_key(std::panic::Location::caller(), ranges, value);
2810 }
2811
2812 /// Visually annotates a position or range with the `Debug` representation of a value. Previous
2813 /// debug annotations with the same key will be removed. The key is also used to determine the
2814 /// annotation's color.
2815 #[cfg(debug_assertions)]
2816 pub fn debug_with_key<K, R, V>(&self, key: &K, ranges: &R, value: V)
2817 where
2818 K: std::hash::Hash + 'static,
2819 R: debug::ToDebugRanges,
2820 V: std::fmt::Debug,
2821 {
2822 let ranges = ranges
2823 .to_debug_ranges(self)
2824 .into_iter()
2825 .map(|range| self.anchor_after(range.start)..self.anchor_before(range.end))
2826 .collect();
2827 debug::GlobalDebugRanges::with_locked(|debug_ranges| {
2828 debug_ranges.insert(key, ranges, format!("{value:?}").into());
2829 });
2830 }
2831}
2832
2833struct RopeBuilder<'a> {
2834 old_visible_cursor: rope::Cursor<'a>,
2835 old_deleted_cursor: rope::Cursor<'a>,
2836 new_visible: Rope,
2837 new_deleted: Rope,
2838}
2839
2840impl<'a> RopeBuilder<'a> {
2841 fn new(old_visible_cursor: rope::Cursor<'a>, old_deleted_cursor: rope::Cursor<'a>) -> Self {
2842 Self {
2843 old_visible_cursor,
2844 old_deleted_cursor,
2845 new_visible: Rope::new(),
2846 new_deleted: Rope::new(),
2847 }
2848 }
2849
2850 fn append(&mut self, len: FragmentTextSummary) {
2851 self.push(len.visible, true, true);
2852 self.push(len.deleted, false, false);
2853 }
2854
2855 fn push_fragment(&mut self, fragment: &Fragment, was_visible: bool) {
2856 debug_assert!(fragment.len > 0);
2857 self.push(fragment.len as usize, was_visible, fragment.visible)
2858 }
2859
2860 fn push(&mut self, len: usize, was_visible: bool, is_visible: bool) {
2861 let text = if was_visible {
2862 self.old_visible_cursor
2863 .slice(self.old_visible_cursor.offset() + len)
2864 } else {
2865 self.old_deleted_cursor
2866 .slice(self.old_deleted_cursor.offset() + len)
2867 };
2868 if is_visible {
2869 self.new_visible.append(text);
2870 } else {
2871 self.new_deleted.append(text);
2872 }
2873 }
2874
2875 fn push_str(&mut self, text: &str) {
2876 self.new_visible.push(text);
2877 }
2878
2879 fn finish(mut self) -> (Rope, Rope) {
2880 self.new_visible.append(self.old_visible_cursor.suffix());
2881 self.new_deleted.append(self.old_deleted_cursor.suffix());
2882 (self.new_visible, self.new_deleted)
2883 }
2884}
2885
2886impl<D: TextDimension + Ord, F: FnMut(&FragmentSummary) -> bool> Iterator for Edits<'_, D, F> {
2887 type Item = (Edit<D>, Range<Anchor>);
2888
2889 fn next(&mut self) -> Option<Self::Item> {
2890 let mut pending_edit: Option<Self::Item> = None;
2891 let cursor = self.fragments_cursor.as_mut()?;
2892
2893 while let Some(fragment) = cursor.item() {
2894 if fragment.id < *self.range.start.0 {
2895 cursor.next();
2896 continue;
2897 } else if fragment.id > *self.range.end.0 {
2898 break;
2899 }
2900
2901 if cursor.start().visible > self.visible_cursor.offset() {
2902 let summary = self.visible_cursor.summary(cursor.start().visible);
2903 self.old_end.add_assign(&summary);
2904 self.new_end.add_assign(&summary);
2905 }
2906
2907 if pending_edit
2908 .as_ref()
2909 .is_some_and(|(change, _)| change.new.end < self.new_end)
2910 {
2911 break;
2912 }
2913
2914 let start_anchor = Anchor::new(
2915 fragment.timestamp,
2916 fragment.insertion_offset,
2917 Bias::Right,
2918 Some(self.buffer_id),
2919 );
2920 let end_anchor = Anchor::new(
2921 fragment.timestamp,
2922 fragment.insertion_offset + fragment.len,
2923 Bias::Left,
2924 Some(self.buffer_id),
2925 );
2926
2927 if !fragment.was_visible(self.since, self.undos) && fragment.visible {
2928 let mut visible_end = cursor.end().visible;
2929 if fragment.id == *self.range.end.0 {
2930 visible_end = cmp::min(
2931 visible_end,
2932 cursor.start().visible
2933 + (self.range.end.1 - fragment.insertion_offset) as usize,
2934 );
2935 }
2936
2937 let fragment_summary = self.visible_cursor.summary(visible_end);
2938 let mut new_end = self.new_end;
2939 new_end.add_assign(&fragment_summary);
2940 if let Some((edit, range)) = pending_edit.as_mut() {
2941 edit.new.end = new_end;
2942 range.end = end_anchor;
2943 } else {
2944 pending_edit = Some((
2945 Edit {
2946 old: self.old_end..self.old_end,
2947 new: self.new_end..new_end,
2948 },
2949 start_anchor..end_anchor,
2950 ));
2951 }
2952
2953 self.new_end = new_end;
2954 } else if fragment.was_visible(self.since, self.undos) && !fragment.visible {
2955 let mut deleted_end = cursor.end().deleted;
2956 if fragment.id == *self.range.end.0 {
2957 deleted_end = cmp::min(
2958 deleted_end,
2959 cursor.start().deleted
2960 + (self.range.end.1 - fragment.insertion_offset) as usize,
2961 );
2962 }
2963
2964 if cursor.start().deleted > self.deleted_cursor.offset() {
2965 self.deleted_cursor.seek_forward(cursor.start().deleted);
2966 }
2967 let fragment_summary = self.deleted_cursor.summary(deleted_end);
2968 let mut old_end = self.old_end;
2969 old_end.add_assign(&fragment_summary);
2970 if let Some((edit, range)) = pending_edit.as_mut() {
2971 edit.old.end = old_end;
2972 range.end = end_anchor;
2973 } else {
2974 pending_edit = Some((
2975 Edit {
2976 old: self.old_end..old_end,
2977 new: self.new_end..self.new_end,
2978 },
2979 start_anchor..end_anchor,
2980 ));
2981 }
2982
2983 self.old_end = old_end;
2984 }
2985
2986 cursor.next();
2987 }
2988
2989 pending_edit
2990 }
2991}
2992
2993impl Fragment {
2994 fn is_visible(&self, undos: &UndoMap) -> bool {
2995 !undos.is_undone(self.timestamp) && self.deletions.iter().all(|d| undos.is_undone(*d))
2996 }
2997
2998 fn was_visible(&self, version: &clock::Global, undos: &UndoMap) -> bool {
2999 (version.observed(self.timestamp) && !undos.was_undone(self.timestamp, version))
3000 && self
3001 .deletions
3002 .iter()
3003 .all(|d| !version.observed(*d) || undos.was_undone(*d, version))
3004 }
3005}
3006
3007impl sum_tree::Item for Fragment {
3008 type Summary = FragmentSummary;
3009
3010 fn summary(&self, _cx: &Option<clock::Global>) -> Self::Summary {
3011 let mut max_version = clock::Global::new();
3012 max_version.observe(self.timestamp);
3013 for deletion in &self.deletions {
3014 max_version.observe(*deletion);
3015 }
3016 max_version.join(&self.max_undos);
3017
3018 let mut min_insertion_version = clock::Global::new();
3019 min_insertion_version.observe(self.timestamp);
3020 let max_insertion_version = min_insertion_version.clone();
3021 if self.visible {
3022 FragmentSummary {
3023 max_id: self.id.clone(),
3024 text: FragmentTextSummary {
3025 visible: self.len as usize,
3026 deleted: 0,
3027 },
3028 max_version,
3029 min_insertion_version,
3030 max_insertion_version,
3031 }
3032 } else {
3033 FragmentSummary {
3034 max_id: self.id.clone(),
3035 text: FragmentTextSummary {
3036 visible: 0,
3037 deleted: self.len as usize,
3038 },
3039 max_version,
3040 min_insertion_version,
3041 max_insertion_version,
3042 }
3043 }
3044 }
3045}
3046
3047impl sum_tree::Summary for FragmentSummary {
3048 type Context<'a> = &'a Option<clock::Global>;
3049
3050 fn zero(_cx: Self::Context<'_>) -> Self {
3051 Default::default()
3052 }
3053
3054 fn add_summary(&mut self, other: &Self, _: Self::Context<'_>) {
3055 self.max_id.assign(&other.max_id);
3056 self.text.visible += &other.text.visible;
3057 self.text.deleted += &other.text.deleted;
3058 self.max_version.join(&other.max_version);
3059 self.min_insertion_version
3060 .meet(&other.min_insertion_version);
3061 self.max_insertion_version
3062 .join(&other.max_insertion_version);
3063 }
3064}
3065
3066impl Default for FragmentSummary {
3067 fn default() -> Self {
3068 FragmentSummary {
3069 max_id: Locator::min(),
3070 text: FragmentTextSummary::default(),
3071 max_version: clock::Global::new(),
3072 min_insertion_version: clock::Global::new(),
3073 max_insertion_version: clock::Global::new(),
3074 }
3075 }
3076}
3077
3078impl sum_tree::Item for InsertionFragment {
3079 type Summary = InsertionFragmentKey;
3080
3081 fn summary(&self, _cx: ()) -> Self::Summary {
3082 InsertionFragmentKey {
3083 timestamp: self.timestamp,
3084 split_offset: self.split_offset,
3085 }
3086 }
3087}
3088
3089impl sum_tree::KeyedItem for InsertionFragment {
3090 type Key = InsertionFragmentKey;
3091
3092 fn key(&self) -> Self::Key {
3093 sum_tree::Item::summary(self, ())
3094 }
3095}
3096
3097impl InsertionFragment {
3098 fn new(fragment: &Fragment) -> Self {
3099 Self {
3100 timestamp: fragment.timestamp,
3101 split_offset: fragment.insertion_offset,
3102 fragment_id: fragment.id.clone(),
3103 }
3104 }
3105
3106 fn insert_new(fragment: &Fragment) -> sum_tree::Edit<Self> {
3107 sum_tree::Edit::Insert(Self::new(fragment))
3108 }
3109}
3110
3111impl sum_tree::ContextLessSummary for InsertionFragmentKey {
3112 fn zero() -> Self {
3113 InsertionFragmentKey {
3114 timestamp: Lamport::MIN,
3115 split_offset: 0,
3116 }
3117 }
3118
3119 fn add_summary(&mut self, summary: &Self) {
3120 *self = *summary;
3121 }
3122}
3123
3124#[derive(Copy, Clone, Debug, Default, PartialEq, Eq, PartialOrd, Ord, Hash)]
3125pub struct FullOffset(pub usize);
3126
3127impl ops::AddAssign<usize> for FullOffset {
3128 fn add_assign(&mut self, rhs: usize) {
3129 self.0 += rhs;
3130 }
3131}
3132
3133impl ops::Add<usize> for FullOffset {
3134 type Output = Self;
3135
3136 fn add(mut self, rhs: usize) -> Self::Output {
3137 self += rhs;
3138 self
3139 }
3140}
3141
3142impl ops::Sub for FullOffset {
3143 type Output = usize;
3144
3145 fn sub(self, rhs: Self) -> Self::Output {
3146 self.0 - rhs.0
3147 }
3148}
3149
3150impl sum_tree::Dimension<'_, FragmentSummary> for usize {
3151 fn zero(_: &Option<clock::Global>) -> Self {
3152 Default::default()
3153 }
3154
3155 fn add_summary(&mut self, summary: &FragmentSummary, _: &Option<clock::Global>) {
3156 *self += summary.text.visible;
3157 }
3158}
3159
3160impl sum_tree::Dimension<'_, FragmentSummary> for FullOffset {
3161 fn zero(_: &Option<clock::Global>) -> Self {
3162 Default::default()
3163 }
3164
3165 fn add_summary(&mut self, summary: &FragmentSummary, _: &Option<clock::Global>) {
3166 self.0 += summary.text.visible + summary.text.deleted;
3167 }
3168}
3169
3170impl<'a> sum_tree::Dimension<'a, FragmentSummary> for Option<&'a Locator> {
3171 fn zero(_: &Option<clock::Global>) -> Self {
3172 Default::default()
3173 }
3174
3175 fn add_summary(&mut self, summary: &'a FragmentSummary, _: &Option<clock::Global>) {
3176 *self = Some(&summary.max_id);
3177 }
3178}
3179
3180impl sum_tree::SeekTarget<'_, FragmentSummary, FragmentTextSummary> for usize {
3181 fn cmp(
3182 &self,
3183 cursor_location: &FragmentTextSummary,
3184 _: &Option<clock::Global>,
3185 ) -> cmp::Ordering {
3186 Ord::cmp(self, &cursor_location.visible)
3187 }
3188}
3189
3190#[derive(Copy, Clone, Debug, Eq, PartialEq)]
3191enum VersionedFullOffset {
3192 Offset(FullOffset),
3193 Invalid,
3194}
3195
3196impl VersionedFullOffset {
3197 fn full_offset(&self) -> FullOffset {
3198 if let Self::Offset(position) = self {
3199 *position
3200 } else {
3201 panic!("invalid version")
3202 }
3203 }
3204}
3205
3206impl Default for VersionedFullOffset {
3207 fn default() -> Self {
3208 Self::Offset(Default::default())
3209 }
3210}
3211
3212impl<'a> sum_tree::Dimension<'a, FragmentSummary> for VersionedFullOffset {
3213 fn zero(_cx: &Option<clock::Global>) -> Self {
3214 Default::default()
3215 }
3216
3217 fn add_summary(&mut self, summary: &'a FragmentSummary, cx: &Option<clock::Global>) {
3218 if let Self::Offset(offset) = self {
3219 let version = cx.as_ref().unwrap();
3220 if version.observed_all(&summary.max_insertion_version) {
3221 *offset += summary.text.visible + summary.text.deleted;
3222 } else if version.observed_any(&summary.min_insertion_version) {
3223 *self = Self::Invalid;
3224 }
3225 }
3226 }
3227}
3228
3229impl sum_tree::SeekTarget<'_, FragmentSummary, Self> for VersionedFullOffset {
3230 fn cmp(&self, cursor_position: &Self, _: &Option<clock::Global>) -> cmp::Ordering {
3231 match (self, cursor_position) {
3232 (Self::Offset(a), Self::Offset(b)) => Ord::cmp(a, b),
3233 (Self::Offset(_), Self::Invalid) => cmp::Ordering::Less,
3234 (Self::Invalid, _) => unreachable!(),
3235 }
3236 }
3237}
3238
3239impl Operation {
3240 fn replica_id(&self) -> ReplicaId {
3241 operation_queue::Operation::lamport_timestamp(self).replica_id
3242 }
3243
3244 pub fn timestamp(&self) -> clock::Lamport {
3245 match self {
3246 Operation::Edit(edit) => edit.timestamp,
3247 Operation::Undo(undo) => undo.timestamp,
3248 }
3249 }
3250
3251 pub fn as_edit(&self) -> Option<&EditOperation> {
3252 match self {
3253 Operation::Edit(edit) => Some(edit),
3254 _ => None,
3255 }
3256 }
3257
3258 pub fn is_edit(&self) -> bool {
3259 matches!(self, Operation::Edit { .. })
3260 }
3261}
3262
3263impl operation_queue::Operation for Operation {
3264 fn lamport_timestamp(&self) -> clock::Lamport {
3265 match self {
3266 Operation::Edit(edit) => edit.timestamp,
3267 Operation::Undo(undo) => undo.timestamp,
3268 }
3269 }
3270}
3271
3272pub trait ToOffset {
3273 fn to_offset(&self, snapshot: &BufferSnapshot) -> usize;
3274 /// Turns this point into the next offset in the buffer that comes after this, respecting utf8 boundaries.
3275 fn to_next_offset(&self, snapshot: &BufferSnapshot) -> usize {
3276 snapshot
3277 .visible_text
3278 .ceil_char_boundary(self.to_offset(snapshot) + 1)
3279 }
3280 /// Turns this point into the previous offset in the buffer that comes before this, respecting utf8 boundaries.
3281 fn to_previous_offset(&self, snapshot: &BufferSnapshot) -> usize {
3282 snapshot
3283 .visible_text
3284 .floor_char_boundary(self.to_offset(snapshot).saturating_sub(1))
3285 }
3286}
3287
3288impl ToOffset for Point {
3289 #[inline]
3290 fn to_offset(&self, snapshot: &BufferSnapshot) -> usize {
3291 snapshot.point_to_offset(*self)
3292 }
3293}
3294
3295impl ToOffset for usize {
3296 #[track_caller]
3297 fn to_offset(&self, snapshot: &BufferSnapshot) -> usize {
3298 if !snapshot
3299 .as_rope()
3300 .assert_char_boundary::<{ cfg!(debug_assertions) }>(*self)
3301 {
3302 snapshot.as_rope().floor_char_boundary(*self)
3303 } else {
3304 *self
3305 }
3306 }
3307}
3308
3309impl ToOffset for Anchor {
3310 #[inline]
3311 fn to_offset(&self, snapshot: &BufferSnapshot) -> usize {
3312 snapshot.summary_for_anchor(self)
3313 }
3314}
3315
3316impl<T: ToOffset> ToOffset for &T {
3317 #[inline]
3318 fn to_offset(&self, content: &BufferSnapshot) -> usize {
3319 (*self).to_offset(content)
3320 }
3321}
3322
3323impl ToOffset for PointUtf16 {
3324 #[inline]
3325 fn to_offset(&self, snapshot: &BufferSnapshot) -> usize {
3326 snapshot.point_utf16_to_offset(*self)
3327 }
3328}
3329
3330impl ToOffset for Unclipped<PointUtf16> {
3331 #[inline]
3332 fn to_offset(&self, snapshot: &BufferSnapshot) -> usize {
3333 snapshot.unclipped_point_utf16_to_offset(*self)
3334 }
3335}
3336
3337pub trait ToPoint {
3338 fn to_point(&self, snapshot: &BufferSnapshot) -> Point;
3339}
3340
3341impl ToPoint for Anchor {
3342 #[inline]
3343 fn to_point(&self, snapshot: &BufferSnapshot) -> Point {
3344 snapshot.summary_for_anchor(self)
3345 }
3346}
3347
3348impl ToPoint for usize {
3349 #[inline]
3350 fn to_point(&self, snapshot: &BufferSnapshot) -> Point {
3351 snapshot.offset_to_point(*self)
3352 }
3353}
3354
3355impl ToPoint for Point {
3356 #[inline]
3357 fn to_point(&self, _: &BufferSnapshot) -> Point {
3358 *self
3359 }
3360}
3361
3362impl ToPoint for Unclipped<PointUtf16> {
3363 #[inline]
3364 fn to_point(&self, snapshot: &BufferSnapshot) -> Point {
3365 snapshot.unclipped_point_utf16_to_point(*self)
3366 }
3367}
3368
3369pub trait ToPointUtf16 {
3370 fn to_point_utf16(&self, snapshot: &BufferSnapshot) -> PointUtf16;
3371}
3372
3373impl ToPointUtf16 for Anchor {
3374 #[inline]
3375 fn to_point_utf16(&self, snapshot: &BufferSnapshot) -> PointUtf16 {
3376 snapshot.summary_for_anchor(self)
3377 }
3378}
3379
3380impl ToPointUtf16 for usize {
3381 #[inline]
3382 fn to_point_utf16(&self, snapshot: &BufferSnapshot) -> PointUtf16 {
3383 snapshot.offset_to_point_utf16(*self)
3384 }
3385}
3386
3387impl ToPointUtf16 for PointUtf16 {
3388 #[inline]
3389 fn to_point_utf16(&self, _: &BufferSnapshot) -> PointUtf16 {
3390 *self
3391 }
3392}
3393
3394impl ToPointUtf16 for Point {
3395 #[inline]
3396 fn to_point_utf16(&self, snapshot: &BufferSnapshot) -> PointUtf16 {
3397 snapshot.point_to_point_utf16(*self)
3398 }
3399}
3400
3401pub trait ToOffsetUtf16 {
3402 fn to_offset_utf16(&self, snapshot: &BufferSnapshot) -> OffsetUtf16;
3403}
3404
3405impl ToOffsetUtf16 for Anchor {
3406 #[inline]
3407 fn to_offset_utf16(&self, snapshot: &BufferSnapshot) -> OffsetUtf16 {
3408 snapshot.summary_for_anchor(self)
3409 }
3410}
3411
3412impl ToOffsetUtf16 for usize {
3413 #[inline]
3414 fn to_offset_utf16(&self, snapshot: &BufferSnapshot) -> OffsetUtf16 {
3415 snapshot.offset_to_offset_utf16(*self)
3416 }
3417}
3418
3419impl ToOffsetUtf16 for OffsetUtf16 {
3420 #[inline]
3421 fn to_offset_utf16(&self, _snapshot: &BufferSnapshot) -> OffsetUtf16 {
3422 *self
3423 }
3424}
3425
3426pub trait FromAnchor {
3427 fn from_anchor(anchor: &Anchor, snapshot: &BufferSnapshot) -> Self;
3428}
3429
3430impl FromAnchor for Anchor {
3431 #[inline]
3432 fn from_anchor(anchor: &Anchor, _snapshot: &BufferSnapshot) -> Self {
3433 *anchor
3434 }
3435}
3436
3437impl FromAnchor for Point {
3438 #[inline]
3439 fn from_anchor(anchor: &Anchor, snapshot: &BufferSnapshot) -> Self {
3440 snapshot.summary_for_anchor(anchor)
3441 }
3442}
3443
3444impl FromAnchor for PointUtf16 {
3445 #[inline]
3446 fn from_anchor(anchor: &Anchor, snapshot: &BufferSnapshot) -> Self {
3447 snapshot.summary_for_anchor(anchor)
3448 }
3449}
3450
3451impl FromAnchor for usize {
3452 #[inline]
3453 fn from_anchor(anchor: &Anchor, snapshot: &BufferSnapshot) -> Self {
3454 snapshot.summary_for_anchor(anchor)
3455 }
3456}
3457
3458#[derive(Clone, Copy, Debug, PartialEq)]
3459pub enum LineEnding {
3460 Unix,
3461 Windows,
3462}
3463
3464impl Default for LineEnding {
3465 fn default() -> Self {
3466 #[cfg(unix)]
3467 return Self::Unix;
3468
3469 #[cfg(not(unix))]
3470 return Self::Windows;
3471 }
3472}
3473
3474impl LineEnding {
3475 pub fn as_str(&self) -> &'static str {
3476 match self {
3477 LineEnding::Unix => "\n",
3478 LineEnding::Windows => "\r\n",
3479 }
3480 }
3481
3482 pub fn label(&self) -> &'static str {
3483 match self {
3484 LineEnding::Unix => "LF",
3485 LineEnding::Windows => "CRLF",
3486 }
3487 }
3488
3489 pub fn detect(text: &str) -> Self {
3490 let mut max_ix = cmp::min(text.len(), 1000);
3491 while !text.is_char_boundary(max_ix) {
3492 max_ix -= 1;
3493 }
3494
3495 if let Some(ix) = text[..max_ix].find(['\n']) {
3496 if ix > 0 && text.as_bytes()[ix - 1] == b'\r' {
3497 Self::Windows
3498 } else {
3499 Self::Unix
3500 }
3501 } else {
3502 Self::default()
3503 }
3504 }
3505
3506 pub fn normalize(text: &mut String) {
3507 if let Cow::Owned(replaced) = LINE_SEPARATORS_REGEX.replace_all(text, "\n") {
3508 *text = replaced;
3509 }
3510 }
3511
3512 pub fn normalize_arc(text: Arc<str>) -> Arc<str> {
3513 if let Cow::Owned(replaced) = LINE_SEPARATORS_REGEX.replace_all(&text, "\n") {
3514 replaced.into()
3515 } else {
3516 text
3517 }
3518 }
3519
3520 pub fn normalize_cow(text: Cow<str>) -> Cow<str> {
3521 if let Cow::Owned(replaced) = LINE_SEPARATORS_REGEX.replace_all(&text, "\n") {
3522 replaced.into()
3523 } else {
3524 text
3525 }
3526 }
3527}
3528
3529pub fn chunks_with_line_ending(rope: &Rope, line_ending: LineEnding) -> impl Iterator<Item = &str> {
3530 rope.chunks().flat_map(move |chunk| {
3531 let mut newline = false;
3532 let end_with_newline = chunk.ends_with('\n').then_some(line_ending.as_str());
3533 chunk
3534 .lines()
3535 .flat_map(move |line| {
3536 let ending = if newline {
3537 Some(line_ending.as_str())
3538 } else {
3539 None
3540 };
3541 newline = true;
3542 ending.into_iter().chain([line])
3543 })
3544 .chain(end_with_newline)
3545 })
3546}
3547
3548#[cfg(debug_assertions)]
3549pub mod debug {
3550 use super::*;
3551 use parking_lot::Mutex;
3552 use std::any::TypeId;
3553 use std::hash::{Hash, Hasher};
3554
3555 static GLOBAL_DEBUG_RANGES: Mutex<Option<GlobalDebugRanges>> = Mutex::new(None);
3556
3557 pub struct GlobalDebugRanges {
3558 pub ranges: Vec<DebugRange>,
3559 key_to_occurrence_index: HashMap<Key, usize>,
3560 next_occurrence_index: usize,
3561 }
3562
3563 pub struct DebugRange {
3564 key: Key,
3565 pub ranges: Vec<Range<Anchor>>,
3566 pub value: Arc<str>,
3567 pub occurrence_index: usize,
3568 }
3569
3570 #[derive(Debug, Clone, PartialEq, Eq, Hash)]
3571 struct Key {
3572 type_id: TypeId,
3573 hash: u64,
3574 }
3575
3576 impl GlobalDebugRanges {
3577 pub fn with_locked<R>(f: impl FnOnce(&mut Self) -> R) -> R {
3578 let mut state = GLOBAL_DEBUG_RANGES.lock();
3579 if state.is_none() {
3580 *state = Some(GlobalDebugRanges {
3581 ranges: Vec::new(),
3582 key_to_occurrence_index: HashMap::default(),
3583 next_occurrence_index: 0,
3584 });
3585 }
3586 if let Some(global_debug_ranges) = state.as_mut() {
3587 f(global_debug_ranges)
3588 } else {
3589 unreachable!()
3590 }
3591 }
3592
3593 pub fn insert<K: Hash + 'static>(
3594 &mut self,
3595 key: &K,
3596 ranges: Vec<Range<Anchor>>,
3597 value: Arc<str>,
3598 ) {
3599 let occurrence_index = *self
3600 .key_to_occurrence_index
3601 .entry(Key::new(key))
3602 .or_insert_with(|| {
3603 let occurrence_index = self.next_occurrence_index;
3604 self.next_occurrence_index += 1;
3605 occurrence_index
3606 });
3607 let key = Key::new(key);
3608 let existing = self
3609 .ranges
3610 .iter()
3611 .enumerate()
3612 .rfind(|(_, existing)| existing.key == key);
3613 if let Some((existing_ix, _)) = existing {
3614 self.ranges.remove(existing_ix);
3615 }
3616 self.ranges.push(DebugRange {
3617 ranges,
3618 key,
3619 value,
3620 occurrence_index,
3621 });
3622 }
3623
3624 pub fn remove<K: Hash + 'static>(&mut self, key: &K) {
3625 self.remove_impl(&Key::new(key));
3626 }
3627
3628 fn remove_impl(&mut self, key: &Key) {
3629 let existing = self
3630 .ranges
3631 .iter()
3632 .enumerate()
3633 .rfind(|(_, existing)| &existing.key == key);
3634 if let Some((existing_ix, _)) = existing {
3635 self.ranges.remove(existing_ix);
3636 }
3637 }
3638
3639 pub fn remove_all_with_key_type<K: 'static>(&mut self) {
3640 self.ranges
3641 .retain(|item| item.key.type_id != TypeId::of::<K>());
3642 }
3643 }
3644
3645 impl Key {
3646 fn new<K: Hash + 'static>(key: &K) -> Self {
3647 let type_id = TypeId::of::<K>();
3648 let mut hasher = collections::FxHasher::default();
3649 key.hash(&mut hasher);
3650 Key {
3651 type_id,
3652 hash: hasher.finish(),
3653 }
3654 }
3655 }
3656
3657 pub trait ToDebugRanges {
3658 fn to_debug_ranges(&self, snapshot: &BufferSnapshot) -> Vec<Range<usize>>;
3659 }
3660
3661 impl<T: ToOffset> ToDebugRanges for T {
3662 fn to_debug_ranges(&self, snapshot: &BufferSnapshot) -> Vec<Range<usize>> {
3663 [self.to_offset(snapshot)].to_debug_ranges(snapshot)
3664 }
3665 }
3666
3667 impl<T: ToOffset + Clone> ToDebugRanges for Range<T> {
3668 fn to_debug_ranges(&self, snapshot: &BufferSnapshot) -> Vec<Range<usize>> {
3669 [self.clone()].to_debug_ranges(snapshot)
3670 }
3671 }
3672
3673 impl<T: ToOffset> ToDebugRanges for Vec<T> {
3674 fn to_debug_ranges(&self, snapshot: &BufferSnapshot) -> Vec<Range<usize>> {
3675 self.as_slice().to_debug_ranges(snapshot)
3676 }
3677 }
3678
3679 impl<T: ToOffset> ToDebugRanges for Vec<Range<T>> {
3680 fn to_debug_ranges(&self, snapshot: &BufferSnapshot) -> Vec<Range<usize>> {
3681 self.as_slice().to_debug_ranges(snapshot)
3682 }
3683 }
3684
3685 impl<T: ToOffset> ToDebugRanges for [T] {
3686 fn to_debug_ranges(&self, snapshot: &BufferSnapshot) -> Vec<Range<usize>> {
3687 self.iter()
3688 .map(|item| {
3689 let offset = item.to_offset(snapshot);
3690 offset..offset
3691 })
3692 .collect()
3693 }
3694 }
3695
3696 impl<T: ToOffset> ToDebugRanges for [Range<T>] {
3697 fn to_debug_ranges(&self, snapshot: &BufferSnapshot) -> Vec<Range<usize>> {
3698 self.iter()
3699 .map(|range| range.start.to_offset(snapshot)..range.end.to_offset(snapshot))
3700 .collect()
3701 }
3702 }
3703}