1mod anchor;
2pub mod locator;
3#[cfg(any(test, feature = "test-support"))]
4pub mod network;
5pub mod operation_queue;
6mod patch;
7mod selection;
8pub mod subscription;
9#[cfg(test)]
10mod tests;
11mod undo_map;
12
13pub use anchor::*;
14use anyhow::{Context as _, Result};
15use clock::Lamport;
16pub use clock::ReplicaId;
17use collections::{HashMap, HashSet};
18use locator::Locator;
19use operation_queue::OperationQueue;
20pub use patch::Patch;
21use postage::{oneshot, prelude::*};
22
23use regex::Regex;
24pub use rope::*;
25pub use selection::*;
26use smallvec::SmallVec;
27use std::{
28 borrow::Cow,
29 cmp::{self, Ordering, Reverse},
30 fmt::Display,
31 future::Future,
32 iter::Iterator,
33 num::NonZeroU64,
34 ops::{self, Deref, Range, Sub},
35 str,
36 sync::{Arc, LazyLock},
37 time::{Duration, Instant},
38};
39pub use subscription::*;
40pub use sum_tree::Bias;
41use sum_tree::{Dimensions, FilterCursor, SumTree, TreeMap, TreeSet};
42use undo_map::UndoMap;
43use util::debug_panic;
44
45#[cfg(any(test, feature = "test-support"))]
46use util::RandomCharIter;
47
48static LINE_SEPARATORS_REGEX: LazyLock<Regex> =
49 LazyLock::new(|| Regex::new(r"\r\n|\r").expect("Failed to create LINE_SEPARATORS_REGEX"));
50
51/// The maximum length of a single insertion operation.
52/// Fragments larger than this will be split into multiple smaller
53/// fragments. This allows us to use relative `u32` offsets instead of `usize`,
54/// reducing memory usage.
55const MAX_INSERTION_LEN: usize = if cfg!(test) { 16 } else { u32::MAX as usize };
56
57pub type TransactionId = clock::Lamport;
58
59pub struct Buffer {
60 snapshot: BufferSnapshot,
61 history: History,
62 deferred_ops: OperationQueue<Operation>,
63 deferred_replicas: HashSet<ReplicaId>,
64 pub lamport_clock: clock::Lamport,
65 subscriptions: Topic<usize>,
66 edit_id_resolvers: HashMap<clock::Lamport, Vec<oneshot::Sender<()>>>,
67 wait_for_version_txs: Vec<(clock::Global, oneshot::Sender<()>)>,
68}
69
70#[repr(transparent)]
71#[derive(Clone, Copy, Debug, Hash, PartialEq, PartialOrd, Ord, Eq)]
72pub struct BufferId(NonZeroU64);
73
74impl Display for BufferId {
75 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
76 write!(f, "{}", self.0)
77 }
78}
79
80impl From<NonZeroU64> for BufferId {
81 fn from(id: NonZeroU64) -> Self {
82 BufferId(id)
83 }
84}
85
86impl BufferId {
87 /// Returns Err if `id` is outside of BufferId domain.
88 pub fn new(id: u64) -> anyhow::Result<Self> {
89 let id = NonZeroU64::new(id).context("Buffer id cannot be 0.")?;
90 Ok(Self(id))
91 }
92
93 /// Increments this buffer id, returning the old value.
94 /// So that's a post-increment operator in disguise.
95 pub fn next(&mut self) -> Self {
96 let old = *self;
97 self.0 = self.0.saturating_add(1);
98 old
99 }
100
101 pub fn to_proto(self) -> u64 {
102 self.into()
103 }
104}
105
106impl From<BufferId> for u64 {
107 fn from(id: BufferId) -> Self {
108 id.0.get()
109 }
110}
111
112#[derive(Clone)]
113pub struct BufferSnapshot {
114 visible_text: Rope,
115 deleted_text: Rope,
116 fragments: SumTree<Fragment>,
117 insertions: SumTree<InsertionFragment>,
118 insertion_slices: TreeSet<InsertionSlice>,
119 undo_map: UndoMap,
120 pub version: clock::Global,
121 remote_id: BufferId,
122 replica_id: ReplicaId,
123 line_ending: LineEnding,
124}
125
126#[derive(Clone, Debug)]
127pub struct HistoryEntry {
128 transaction: Transaction,
129 first_edit_at: Instant,
130 last_edit_at: Instant,
131 suppress_grouping: bool,
132}
133
134#[derive(Clone, Debug)]
135pub struct Transaction {
136 pub id: TransactionId,
137 pub edit_ids: Vec<clock::Lamport>,
138 pub start: clock::Global,
139}
140
141impl Transaction {
142 pub fn merge_in(&mut self, other: Transaction) {
143 self.edit_ids.extend(other.edit_ids);
144 }
145}
146
147impl HistoryEntry {
148 pub fn transaction_id(&self) -> TransactionId {
149 self.transaction.id
150 }
151}
152
153struct History {
154 base_text: Rope,
155 operations: TreeMap<clock::Lamport, Operation>,
156 undo_stack: Vec<HistoryEntry>,
157 redo_stack: Vec<HistoryEntry>,
158 transaction_depth: usize,
159 group_interval: Duration,
160}
161
162#[derive(Clone, Debug, Eq, PartialEq)]
163struct InsertionSlice {
164 // Inline the lamports to allow the replica ids to share the same alignment
165 // saving 4 bytes space edit_id: clock::Lamport,
166 edit_id_value: clock::Seq,
167 edit_id_replica_id: ReplicaId,
168 // insertion_id: clock::Lamport,
169 insertion_id_value: clock::Seq,
170 insertion_id_replica_id: ReplicaId,
171 range: Range<u32>,
172}
173
174impl Ord for InsertionSlice {
175 fn cmp(&self, other: &Self) -> Ordering {
176 Lamport {
177 value: self.edit_id_value,
178 replica_id: self.edit_id_replica_id,
179 }
180 .cmp(&Lamport {
181 value: other.edit_id_value,
182 replica_id: other.edit_id_replica_id,
183 })
184 .then_with(|| {
185 Lamport {
186 value: self.insertion_id_value,
187 replica_id: self.insertion_id_replica_id,
188 }
189 .cmp(&Lamport {
190 value: other.insertion_id_value,
191 replica_id: other.insertion_id_replica_id,
192 })
193 })
194 .then_with(|| self.range.start.cmp(&other.range.start))
195 .then_with(|| self.range.end.cmp(&other.range.end))
196 }
197}
198
199impl PartialOrd for InsertionSlice {
200 fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
201 Some(self.cmp(other))
202 }
203}
204
205impl InsertionSlice {
206 fn from_fragment(edit_id: clock::Lamport, fragment: &Fragment) -> Self {
207 Self {
208 edit_id_value: edit_id.value,
209 edit_id_replica_id: edit_id.replica_id,
210 insertion_id_value: fragment.timestamp.value,
211 insertion_id_replica_id: fragment.timestamp.replica_id,
212 range: fragment.insertion_offset..fragment.insertion_offset + fragment.len,
213 }
214 }
215}
216
217impl History {
218 pub fn new(base_text: Rope) -> Self {
219 Self {
220 base_text,
221 operations: Default::default(),
222 undo_stack: Vec::new(),
223 redo_stack: Vec::new(),
224 transaction_depth: 0,
225 // Don't group transactions in tests unless we opt in, because it's a footgun.
226 group_interval: if cfg!(any(test, feature = "test-support")) {
227 Duration::ZERO
228 } else {
229 Duration::from_millis(300)
230 },
231 }
232 }
233
234 fn push(&mut self, op: Operation) {
235 self.operations.insert(op.timestamp(), op);
236 }
237
238 fn start_transaction(
239 &mut self,
240 start: clock::Global,
241 now: Instant,
242 clock: &mut clock::Lamport,
243 ) -> Option<TransactionId> {
244 self.transaction_depth += 1;
245 if self.transaction_depth == 1 {
246 let id = clock.tick();
247 self.undo_stack.push(HistoryEntry {
248 transaction: Transaction {
249 id,
250 start,
251 edit_ids: Default::default(),
252 },
253 first_edit_at: now,
254 last_edit_at: now,
255 suppress_grouping: false,
256 });
257 Some(id)
258 } else {
259 None
260 }
261 }
262
263 fn end_transaction(&mut self, now: Instant) -> Option<&HistoryEntry> {
264 assert_ne!(self.transaction_depth, 0);
265 self.transaction_depth -= 1;
266 if self.transaction_depth == 0 {
267 if self
268 .undo_stack
269 .last()
270 .unwrap()
271 .transaction
272 .edit_ids
273 .is_empty()
274 {
275 self.undo_stack.pop();
276 None
277 } else {
278 self.redo_stack.clear();
279 let entry = self.undo_stack.last_mut().unwrap();
280 entry.last_edit_at = now;
281 Some(entry)
282 }
283 } else {
284 None
285 }
286 }
287
288 fn group(&mut self) -> Option<TransactionId> {
289 let mut count = 0;
290 let mut entries = self.undo_stack.iter();
291 if let Some(mut entry) = entries.next_back() {
292 while let Some(prev_entry) = entries.next_back() {
293 if !prev_entry.suppress_grouping
294 && entry.first_edit_at - prev_entry.last_edit_at < self.group_interval
295 {
296 entry = prev_entry;
297 count += 1;
298 } else {
299 break;
300 }
301 }
302 }
303 self.group_trailing(count)
304 }
305
306 fn group_until(&mut self, transaction_id: TransactionId) {
307 let mut count = 0;
308 for entry in self.undo_stack.iter().rev() {
309 if entry.transaction_id() == transaction_id {
310 self.group_trailing(count);
311 break;
312 } else if entry.suppress_grouping {
313 break;
314 } else {
315 count += 1;
316 }
317 }
318 }
319
320 fn group_trailing(&mut self, n: usize) -> Option<TransactionId> {
321 let new_len = self.undo_stack.len() - n;
322 let (entries_to_keep, entries_to_merge) = self.undo_stack.split_at_mut(new_len);
323 if let Some(last_entry) = entries_to_keep.last_mut() {
324 for entry in &*entries_to_merge {
325 for edit_id in &entry.transaction.edit_ids {
326 last_entry.transaction.edit_ids.push(*edit_id);
327 }
328 }
329
330 if let Some(entry) = entries_to_merge.last_mut() {
331 last_entry.last_edit_at = entry.last_edit_at;
332 }
333 }
334
335 self.undo_stack.truncate(new_len);
336 self.undo_stack.last().map(|e| e.transaction.id)
337 }
338
339 fn finalize_last_transaction(&mut self) -> Option<&Transaction> {
340 self.undo_stack.last_mut().map(|entry| {
341 entry.transaction.edit_ids.shrink_to_fit();
342 entry.suppress_grouping = true;
343 &entry.transaction
344 })
345 }
346
347 fn push_transaction(&mut self, transaction: Transaction, now: Instant) {
348 assert_eq!(self.transaction_depth, 0);
349 self.undo_stack.push(HistoryEntry {
350 transaction,
351 first_edit_at: now,
352 last_edit_at: now,
353 suppress_grouping: false,
354 });
355 }
356
357 /// Differs from `push_transaction` in that it does not clear the redo
358 /// stack. Intended to be used to create a parent transaction to merge
359 /// potential child transactions into.
360 ///
361 /// The caller is responsible for removing it from the undo history using
362 /// `forget_transaction` if no edits are merged into it. Otherwise, if edits
363 /// are merged into this transaction, the caller is responsible for ensuring
364 /// the redo stack is cleared. The easiest way to ensure the redo stack is
365 /// cleared is to create transactions with the usual `start_transaction` and
366 /// `end_transaction` methods and merging the resulting transactions into
367 /// the transaction created by this method
368 fn push_empty_transaction(
369 &mut self,
370 start: clock::Global,
371 now: Instant,
372 clock: &mut clock::Lamport,
373 ) -> TransactionId {
374 assert_eq!(self.transaction_depth, 0);
375 let id = clock.tick();
376 let transaction = Transaction {
377 id,
378 start,
379 edit_ids: Vec::new(),
380 };
381 self.undo_stack.push(HistoryEntry {
382 transaction,
383 first_edit_at: now,
384 last_edit_at: now,
385 suppress_grouping: false,
386 });
387 id
388 }
389
390 fn push_undo(&mut self, op_id: clock::Lamport) {
391 assert_ne!(self.transaction_depth, 0);
392 if let Some(Operation::Edit(_)) = self.operations.get(&op_id) {
393 let last_transaction = self.undo_stack.last_mut().unwrap();
394 last_transaction.transaction.edit_ids.push(op_id);
395 }
396 }
397
398 fn pop_undo(&mut self) -> Option<&HistoryEntry> {
399 assert_eq!(self.transaction_depth, 0);
400 if let Some(entry) = self.undo_stack.pop() {
401 self.redo_stack.push(entry);
402 self.redo_stack.last()
403 } else {
404 None
405 }
406 }
407
408 fn remove_from_undo(&mut self, transaction_id: TransactionId) -> Option<&HistoryEntry> {
409 assert_eq!(self.transaction_depth, 0);
410
411 let entry_ix = self
412 .undo_stack
413 .iter()
414 .rposition(|entry| entry.transaction.id == transaction_id)?;
415 let entry = self.undo_stack.remove(entry_ix);
416 self.redo_stack.push(entry);
417 self.redo_stack.last()
418 }
419
420 fn remove_from_undo_until(&mut self, transaction_id: TransactionId) -> &[HistoryEntry] {
421 assert_eq!(self.transaction_depth, 0);
422
423 let redo_stack_start_len = self.redo_stack.len();
424 if let Some(entry_ix) = self
425 .undo_stack
426 .iter()
427 .rposition(|entry| entry.transaction.id == transaction_id)
428 {
429 self.redo_stack
430 .extend(self.undo_stack.drain(entry_ix..).rev());
431 }
432 &self.redo_stack[redo_stack_start_len..]
433 }
434
435 fn forget(&mut self, transaction_id: TransactionId) -> Option<Transaction> {
436 assert_eq!(self.transaction_depth, 0);
437 if let Some(entry_ix) = self
438 .undo_stack
439 .iter()
440 .rposition(|entry| entry.transaction.id == transaction_id)
441 {
442 Some(self.undo_stack.remove(entry_ix).transaction)
443 } else if let Some(entry_ix) = self
444 .redo_stack
445 .iter()
446 .rposition(|entry| entry.transaction.id == transaction_id)
447 {
448 Some(self.redo_stack.remove(entry_ix).transaction)
449 } else {
450 None
451 }
452 }
453
454 fn transaction(&self, transaction_id: TransactionId) -> Option<&Transaction> {
455 let entry = self
456 .undo_stack
457 .iter()
458 .rfind(|entry| entry.transaction.id == transaction_id)
459 .or_else(|| {
460 self.redo_stack
461 .iter()
462 .rfind(|entry| entry.transaction.id == transaction_id)
463 })?;
464 Some(&entry.transaction)
465 }
466
467 fn transaction_mut(&mut self, transaction_id: TransactionId) -> Option<&mut Transaction> {
468 let entry = self
469 .undo_stack
470 .iter_mut()
471 .rfind(|entry| entry.transaction.id == transaction_id)
472 .or_else(|| {
473 self.redo_stack
474 .iter_mut()
475 .rfind(|entry| entry.transaction.id == transaction_id)
476 })?;
477 Some(&mut entry.transaction)
478 }
479
480 fn merge_transactions(&mut self, transaction: TransactionId, destination: TransactionId) {
481 if let Some(transaction) = self.forget(transaction)
482 && let Some(destination) = self.transaction_mut(destination)
483 {
484 destination.edit_ids.extend(transaction.edit_ids);
485 }
486 }
487
488 fn pop_redo(&mut self) -> Option<&HistoryEntry> {
489 assert_eq!(self.transaction_depth, 0);
490 if let Some(entry) = self.redo_stack.pop() {
491 self.undo_stack.push(entry);
492 self.undo_stack.last()
493 } else {
494 None
495 }
496 }
497
498 fn remove_from_redo(&mut self, transaction_id: TransactionId) -> &[HistoryEntry] {
499 assert_eq!(self.transaction_depth, 0);
500
501 let undo_stack_start_len = self.undo_stack.len();
502 if let Some(entry_ix) = self
503 .redo_stack
504 .iter()
505 .rposition(|entry| entry.transaction.id == transaction_id)
506 {
507 self.undo_stack
508 .extend(self.redo_stack.drain(entry_ix..).rev());
509 }
510 &self.undo_stack[undo_stack_start_len..]
511 }
512}
513
514struct Edits<'a, D: TextDimension, F: FnMut(&FragmentSummary) -> bool> {
515 visible_cursor: rope::Cursor<'a>,
516 deleted_cursor: rope::Cursor<'a>,
517 fragments_cursor: Option<FilterCursor<'a, 'static, F, Fragment, FragmentTextSummary>>,
518 undos: &'a UndoMap,
519 since: &'a clock::Global,
520 old_end: D,
521 new_end: D,
522 range: Range<(&'a Locator, u32)>,
523 buffer_id: BufferId,
524}
525
526#[derive(Clone, Debug, Default, Eq, PartialEq)]
527pub struct Edit<D> {
528 pub old: Range<D>,
529 pub new: Range<D>,
530}
531impl<D> Edit<D>
532where
533 D: PartialEq,
534{
535 pub fn is_empty(&self) -> bool {
536 self.old.start == self.old.end && self.new.start == self.new.end
537 }
538}
539
540impl<D, DDelta> Edit<D>
541where
542 D: Sub<D, Output = DDelta> + Copy,
543{
544 pub fn old_len(&self) -> DDelta {
545 self.old.end - self.old.start
546 }
547
548 pub fn new_len(&self) -> DDelta {
549 self.new.end - self.new.start
550 }
551}
552
553impl<D1, D2> Edit<(D1, D2)> {
554 pub fn flatten(self) -> (Edit<D1>, Edit<D2>) {
555 (
556 Edit {
557 old: self.old.start.0..self.old.end.0,
558 new: self.new.start.0..self.new.end.0,
559 },
560 Edit {
561 old: self.old.start.1..self.old.end.1,
562 new: self.new.start.1..self.new.end.1,
563 },
564 )
565 }
566}
567
568#[derive(Eq, PartialEq, Clone, Debug)]
569struct Fragment {
570 id: Locator,
571 timestamp: clock::Lamport,
572 insertion_offset: u32,
573 len: u32,
574 visible: bool,
575 deletions: SmallVec<[clock::Lamport; 2]>,
576 max_undos: clock::Global,
577}
578
579#[derive(Eq, PartialEq, Clone, Debug)]
580struct FragmentSummary {
581 text: FragmentTextSummary,
582 max_id: Locator,
583 max_version: clock::Global,
584 min_insertion_version: clock::Global,
585 max_insertion_version: clock::Global,
586}
587
588#[derive(Copy, Default, Clone, Debug, PartialEq, Eq)]
589struct FragmentTextSummary {
590 visible: usize,
591 deleted: usize,
592}
593
594impl<'a> sum_tree::Dimension<'a, FragmentSummary> for FragmentTextSummary {
595 fn zero(_: &Option<clock::Global>) -> Self {
596 Default::default()
597 }
598
599 fn add_summary(&mut self, summary: &'a FragmentSummary, _: &Option<clock::Global>) {
600 self.visible += summary.text.visible;
601 self.deleted += summary.text.deleted;
602 }
603}
604
605#[derive(Eq, PartialEq, Clone, Debug)]
606struct InsertionFragment {
607 timestamp: clock::Lamport,
608 split_offset: u32,
609 fragment_id: Locator,
610}
611
612#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord)]
613struct InsertionFragmentKey {
614 timestamp: clock::Lamport,
615 split_offset: u32,
616}
617
618#[derive(Clone, Debug, Eq, PartialEq)]
619pub enum Operation {
620 Edit(EditOperation),
621 Undo(UndoOperation),
622}
623
624#[derive(Clone, Debug, Eq, PartialEq)]
625pub struct EditOperation {
626 pub timestamp: clock::Lamport,
627 pub version: clock::Global,
628 pub ranges: Vec<Range<FullOffset>>,
629 pub new_text: Vec<Arc<str>>,
630}
631
632#[derive(Clone, Debug, Eq, PartialEq)]
633pub struct UndoOperation {
634 pub timestamp: clock::Lamport,
635 pub version: clock::Global,
636 pub counts: HashMap<clock::Lamport, u32>,
637}
638
639/// Stores information about the indentation of a line (tabs and spaces).
640#[derive(Clone, Copy, Debug, Eq, PartialEq)]
641pub struct LineIndent {
642 pub tabs: u32,
643 pub spaces: u32,
644 pub line_blank: bool,
645}
646
647impl LineIndent {
648 pub fn from_chunks(chunks: &mut Chunks) -> Self {
649 let mut tabs = 0;
650 let mut spaces = 0;
651 let mut line_blank = true;
652
653 'outer: while let Some(chunk) = chunks.peek() {
654 for ch in chunk.chars() {
655 if ch == '\t' {
656 tabs += 1;
657 } else if ch == ' ' {
658 spaces += 1;
659 } else {
660 if ch != '\n' {
661 line_blank = false;
662 }
663 break 'outer;
664 }
665 }
666
667 chunks.next();
668 }
669
670 Self {
671 tabs,
672 spaces,
673 line_blank,
674 }
675 }
676
677 /// Constructs a new `LineIndent` which only contains spaces.
678 pub fn spaces(spaces: u32) -> Self {
679 Self {
680 tabs: 0,
681 spaces,
682 line_blank: true,
683 }
684 }
685
686 /// Constructs a new `LineIndent` which only contains tabs.
687 pub fn tabs(tabs: u32) -> Self {
688 Self {
689 tabs,
690 spaces: 0,
691 line_blank: true,
692 }
693 }
694
695 /// Indicates whether the line is empty.
696 pub fn is_line_empty(&self) -> bool {
697 self.tabs == 0 && self.spaces == 0 && self.line_blank
698 }
699
700 /// Indicates whether the line is blank (contains only whitespace).
701 pub fn is_line_blank(&self) -> bool {
702 self.line_blank
703 }
704
705 /// Returns the number of indentation characters (tabs or spaces).
706 pub fn raw_len(&self) -> u32 {
707 self.tabs + self.spaces
708 }
709
710 /// Returns the number of indentation characters (tabs or spaces), taking tab size into account.
711 pub fn len(&self, tab_size: u32) -> u32 {
712 self.tabs * tab_size + self.spaces
713 }
714}
715
716impl From<&str> for LineIndent {
717 fn from(value: &str) -> Self {
718 Self::from_iter(value.chars())
719 }
720}
721
722impl FromIterator<char> for LineIndent {
723 fn from_iter<T: IntoIterator<Item = char>>(chars: T) -> Self {
724 let mut tabs = 0;
725 let mut spaces = 0;
726 let mut line_blank = true;
727 for c in chars {
728 if c == '\t' {
729 tabs += 1;
730 } else if c == ' ' {
731 spaces += 1;
732 } else {
733 if c != '\n' {
734 line_blank = false;
735 }
736 break;
737 }
738 }
739 Self {
740 tabs,
741 spaces,
742 line_blank,
743 }
744 }
745}
746
747impl Buffer {
748 pub fn new(replica_id: ReplicaId, remote_id: BufferId, base_text: impl Into<String>) -> Buffer {
749 let mut base_text = base_text.into();
750 let line_ending = LineEnding::detect(&base_text);
751 LineEnding::normalize(&mut base_text);
752 Self::new_normalized(replica_id, remote_id, line_ending, Rope::from(&*base_text))
753 }
754
755 pub fn new_normalized(
756 replica_id: ReplicaId,
757 remote_id: BufferId,
758 line_ending: LineEnding,
759 normalized: Rope,
760 ) -> Buffer {
761 let history = History::new(normalized);
762 let mut fragments = SumTree::new(&None);
763 let mut insertions = SumTree::default();
764
765 let mut lamport_clock = clock::Lamport::new(replica_id);
766 let mut version = clock::Global::new();
767
768 let visible_text = history.base_text.clone();
769 if !visible_text.is_empty() {
770 let insertion_timestamp = clock::Lamport::new(ReplicaId::LOCAL);
771 lamport_clock.observe(insertion_timestamp);
772 version.observe(insertion_timestamp);
773
774 let mut insertion_offset: u32 = 0;
775 let mut text_offset: usize = 0;
776 let mut prev_locator = Locator::min();
777
778 while text_offset < visible_text.len() {
779 let target_end = visible_text.len().min(text_offset + MAX_INSERTION_LEN);
780 let chunk_end = if target_end == visible_text.len() {
781 target_end
782 } else {
783 visible_text.floor_char_boundary(target_end)
784 };
785 let chunk_len = chunk_end - text_offset;
786
787 let fragment_id = Locator::between(&prev_locator, &Locator::max());
788 let fragment = Fragment {
789 id: fragment_id.clone(),
790 timestamp: insertion_timestamp,
791 insertion_offset,
792 len: chunk_len as u32,
793 visible: true,
794 deletions: Default::default(),
795 max_undos: Default::default(),
796 };
797 insertions.push(InsertionFragment::new(&fragment), ());
798 fragments.push(fragment, &None);
799
800 prev_locator = fragment_id;
801 insertion_offset += chunk_len as u32;
802 text_offset = chunk_end;
803 }
804 }
805
806 Buffer {
807 snapshot: BufferSnapshot {
808 replica_id,
809 remote_id,
810 visible_text,
811 deleted_text: Rope::new(),
812 line_ending,
813 fragments,
814 insertions,
815 version,
816 undo_map: Default::default(),
817 insertion_slices: Default::default(),
818 },
819 history,
820 deferred_ops: OperationQueue::new(),
821 deferred_replicas: HashSet::default(),
822 lamport_clock,
823 subscriptions: Default::default(),
824 edit_id_resolvers: Default::default(),
825 wait_for_version_txs: Default::default(),
826 }
827 }
828
829 pub fn version(&self) -> clock::Global {
830 self.version.clone()
831 }
832
833 pub fn snapshot(&self) -> &BufferSnapshot {
834 &self.snapshot
835 }
836
837 pub fn into_snapshot(self) -> BufferSnapshot {
838 self.snapshot
839 }
840
841 pub fn branch(&self) -> Self {
842 Self {
843 snapshot: self.snapshot.clone(),
844 history: History::new(self.base_text().clone()),
845 deferred_ops: OperationQueue::new(),
846 deferred_replicas: HashSet::default(),
847 lamport_clock: clock::Lamport::new(ReplicaId::LOCAL_BRANCH),
848 subscriptions: Default::default(),
849 edit_id_resolvers: Default::default(),
850 wait_for_version_txs: Default::default(),
851 }
852 }
853
854 pub fn replica_id(&self) -> ReplicaId {
855 self.lamport_clock.replica_id
856 }
857
858 pub fn remote_id(&self) -> BufferId {
859 self.remote_id
860 }
861
862 pub fn deferred_ops_len(&self) -> usize {
863 self.deferred_ops.len()
864 }
865
866 pub fn transaction_group_interval(&self) -> Duration {
867 self.history.group_interval
868 }
869
870 pub fn edit<R, I, S, T>(&mut self, edits: R) -> Operation
871 where
872 R: IntoIterator<IntoIter = I>,
873 I: ExactSizeIterator<Item = (Range<S>, T)>,
874 S: ToOffset,
875 T: Into<Arc<str>>,
876 {
877 let edits = edits
878 .into_iter()
879 .map(|(range, new_text)| (range, new_text.into()));
880
881 self.start_transaction();
882 let timestamp = self.lamport_clock.tick();
883 let operation = Operation::Edit(self.apply_local_edit(edits, timestamp));
884
885 self.history.push(operation.clone());
886 self.history.push_undo(operation.timestamp());
887 self.snapshot.version.observe(operation.timestamp());
888 self.end_transaction();
889 operation
890 }
891
892 fn apply_local_edit<S: ToOffset, T: Into<Arc<str>>>(
893 &mut self,
894 edits: impl ExactSizeIterator<Item = (Range<S>, T)>,
895 timestamp: clock::Lamport,
896 ) -> EditOperation {
897 let mut edits_patch = Patch::default();
898 let mut edit_op = EditOperation {
899 timestamp,
900 version: self.version(),
901 ranges: Vec::with_capacity(edits.len()),
902 new_text: Vec::with_capacity(edits.len()),
903 };
904 let mut new_insertions = Vec::new();
905 let mut insertion_offset: u32 = 0;
906 let mut insertion_slices = Vec::new();
907
908 let mut edits = edits
909 .map(|(range, new_text)| (range.to_offset(&*self), new_text))
910 .peekable();
911
912 let mut new_ropes =
913 RopeBuilder::new(self.visible_text.cursor(0), self.deleted_text.cursor(0));
914 let mut old_fragments = self.fragments.cursor::<FragmentTextSummary>(&None);
915 let mut new_fragments = old_fragments.slice(&edits.peek().unwrap().0.start, Bias::Right);
916 new_ropes.append(new_fragments.summary().text);
917
918 let mut fragment_start = old_fragments.start().visible;
919 for (range, new_text) in edits {
920 let new_text = LineEnding::normalize_arc(new_text.into());
921 let fragment_end = old_fragments.end().visible;
922
923 // If the current fragment ends before this range, then jump ahead to the first fragment
924 // that extends past the start of this range, reusing any intervening fragments.
925 if fragment_end < range.start {
926 // If the current fragment has been partially consumed, then consume the rest of it
927 // and advance to the next fragment before slicing.
928 if fragment_start > old_fragments.start().visible {
929 if fragment_end > fragment_start {
930 let mut suffix = old_fragments.item().unwrap().clone();
931 suffix.len = (fragment_end - fragment_start) as u32;
932 suffix.insertion_offset +=
933 (fragment_start - old_fragments.start().visible) as u32;
934 new_insertions.push(InsertionFragment::insert_new(&suffix));
935 new_ropes.push_fragment(&suffix, suffix.visible);
936 new_fragments.push(suffix, &None);
937 }
938 old_fragments.next();
939 }
940
941 let slice = old_fragments.slice(&range.start, Bias::Right);
942 new_ropes.append(slice.summary().text);
943 new_fragments.append(slice, &None);
944 fragment_start = old_fragments.start().visible;
945 }
946
947 let full_range_start = FullOffset(range.start + old_fragments.start().deleted);
948
949 // Preserve any portion of the current fragment that precedes this range.
950 if fragment_start < range.start {
951 let mut prefix = old_fragments.item().unwrap().clone();
952 prefix.len = (range.start - fragment_start) as u32;
953 prefix.insertion_offset += (fragment_start - old_fragments.start().visible) as u32;
954 prefix.id = Locator::between(&new_fragments.summary().max_id, &prefix.id);
955 new_insertions.push(InsertionFragment::insert_new(&prefix));
956 new_ropes.push_fragment(&prefix, prefix.visible);
957 new_fragments.push(prefix, &None);
958 fragment_start = range.start;
959 }
960
961 // Insert the new text before any existing fragments within the range.
962 if !new_text.is_empty() {
963 let new_start = new_fragments.summary().text.visible;
964
965 let next_fragment_id = old_fragments
966 .item()
967 .map_or(Locator::max_ref(), |old_fragment| &old_fragment.id);
968 Self::push_fragments_for_insertion(
969 new_text.as_ref(),
970 timestamp,
971 &mut insertion_offset,
972 &mut new_fragments,
973 &mut new_insertions,
974 &mut insertion_slices,
975 &mut new_ropes,
976 next_fragment_id,
977 timestamp,
978 );
979 edits_patch.push(Edit {
980 old: fragment_start..fragment_start,
981 new: new_start..new_start + new_text.len(),
982 });
983 }
984
985 // Advance through every fragment that intersects this range, marking the intersecting
986 // portions as deleted.
987 while fragment_start < range.end {
988 let fragment = old_fragments.item().unwrap();
989 let fragment_end = old_fragments.end().visible;
990 let mut intersection = fragment.clone();
991 let intersection_end = cmp::min(range.end, fragment_end);
992 if fragment.visible {
993 intersection.len = (intersection_end - fragment_start) as u32;
994 intersection.insertion_offset +=
995 (fragment_start - old_fragments.start().visible) as u32;
996 intersection.id =
997 Locator::between(&new_fragments.summary().max_id, &intersection.id);
998 intersection.deletions.push(timestamp);
999 intersection.visible = false;
1000 }
1001 if intersection.len > 0 {
1002 if fragment.visible && !intersection.visible {
1003 let new_start = new_fragments.summary().text.visible;
1004 edits_patch.push(Edit {
1005 old: fragment_start..intersection_end,
1006 new: new_start..new_start,
1007 });
1008 insertion_slices
1009 .push(InsertionSlice::from_fragment(timestamp, &intersection));
1010 }
1011 new_insertions.push(InsertionFragment::insert_new(&intersection));
1012 new_ropes.push_fragment(&intersection, fragment.visible);
1013 new_fragments.push(intersection, &None);
1014 fragment_start = intersection_end;
1015 }
1016 if fragment_end <= range.end {
1017 old_fragments.next();
1018 }
1019 }
1020
1021 let full_range_end = FullOffset(range.end + old_fragments.start().deleted);
1022 edit_op.ranges.push(full_range_start..full_range_end);
1023 edit_op.new_text.push(new_text);
1024 }
1025
1026 // If the current fragment has been partially consumed, then consume the rest of it
1027 // and advance to the next fragment before slicing.
1028 if fragment_start > old_fragments.start().visible {
1029 let fragment_end = old_fragments.end().visible;
1030 if fragment_end > fragment_start {
1031 let mut suffix = old_fragments.item().unwrap().clone();
1032 suffix.len = (fragment_end - fragment_start) as u32;
1033 suffix.insertion_offset += (fragment_start - old_fragments.start().visible) as u32;
1034 new_insertions.push(InsertionFragment::insert_new(&suffix));
1035 new_ropes.push_fragment(&suffix, suffix.visible);
1036 new_fragments.push(suffix, &None);
1037 }
1038 old_fragments.next();
1039 }
1040
1041 let suffix = old_fragments.suffix();
1042 new_ropes.append(suffix.summary().text);
1043 new_fragments.append(suffix, &None);
1044 let (visible_text, deleted_text) = new_ropes.finish();
1045 drop(old_fragments);
1046
1047 self.snapshot.fragments = new_fragments;
1048 self.snapshot.insertions.edit(new_insertions, ());
1049 self.snapshot.visible_text = visible_text;
1050 self.snapshot.deleted_text = deleted_text;
1051 self.subscriptions.publish_mut(&edits_patch);
1052 self.snapshot.insertion_slices.extend(insertion_slices);
1053 edit_op
1054 }
1055
1056 pub fn set_line_ending(&mut self, line_ending: LineEnding) {
1057 self.snapshot.line_ending = line_ending;
1058 }
1059
1060 pub fn apply_ops<I: IntoIterator<Item = Operation>>(&mut self, ops: I) {
1061 let mut deferred_ops = Vec::new();
1062 for op in ops {
1063 self.history.push(op.clone());
1064 if self.can_apply_op(&op) {
1065 self.apply_op(op);
1066 } else {
1067 self.deferred_replicas.insert(op.replica_id());
1068 deferred_ops.push(op);
1069 }
1070 }
1071 self.deferred_ops.insert(deferred_ops);
1072 self.flush_deferred_ops();
1073 }
1074
1075 fn apply_op(&mut self, op: Operation) {
1076 match op {
1077 Operation::Edit(edit) => {
1078 if !self.version.observed(edit.timestamp) {
1079 self.apply_remote_edit(
1080 &edit.version,
1081 &edit.ranges,
1082 &edit.new_text,
1083 edit.timestamp,
1084 );
1085 self.snapshot.version.observe(edit.timestamp);
1086 self.lamport_clock.observe(edit.timestamp);
1087 self.resolve_edit(edit.timestamp);
1088 }
1089 }
1090 Operation::Undo(undo) => {
1091 if !self.version.observed(undo.timestamp) {
1092 self.apply_undo(&undo);
1093 self.snapshot.version.observe(undo.timestamp);
1094 self.lamport_clock.observe(undo.timestamp);
1095 }
1096 }
1097 }
1098 self.wait_for_version_txs.retain_mut(|(version, tx)| {
1099 if self.snapshot.version().observed_all(version) {
1100 tx.try_send(()).ok();
1101 false
1102 } else {
1103 true
1104 }
1105 });
1106 }
1107
1108 fn apply_remote_edit(
1109 &mut self,
1110 version: &clock::Global,
1111 ranges: &[Range<FullOffset>],
1112 new_text: &[Arc<str>],
1113 timestamp: clock::Lamport,
1114 ) {
1115 if ranges.is_empty() {
1116 return;
1117 }
1118
1119 let edits = ranges.iter().zip(new_text.iter());
1120 let mut edits_patch = Patch::default();
1121 let mut insertion_slices = Vec::new();
1122 let cx = Some(version.clone());
1123 let mut new_insertions = Vec::new();
1124 let mut insertion_offset: u32 = 0;
1125 let mut new_ropes =
1126 RopeBuilder::new(self.visible_text.cursor(0), self.deleted_text.cursor(0));
1127 let mut old_fragments = self
1128 .fragments
1129 .cursor::<Dimensions<VersionedFullOffset, usize>>(&cx);
1130 let mut new_fragments =
1131 old_fragments.slice(&VersionedFullOffset::Offset(ranges[0].start), Bias::Left);
1132 new_ropes.append(new_fragments.summary().text);
1133
1134 let mut fragment_start = old_fragments.start().0.full_offset();
1135 for (range, new_text) in edits {
1136 let fragment_end = old_fragments.end().0.full_offset();
1137
1138 // If the current fragment ends before this range, then jump ahead to the first fragment
1139 // that extends past the start of this range, reusing any intervening fragments.
1140 if fragment_end < range.start {
1141 // If the current fragment has been partially consumed, then consume the rest of it
1142 // and advance to the next fragment before slicing.
1143 if fragment_start > old_fragments.start().0.full_offset() {
1144 if fragment_end > fragment_start {
1145 let mut suffix = old_fragments.item().unwrap().clone();
1146 suffix.len = (fragment_end.0 - fragment_start.0) as u32;
1147 suffix.insertion_offset +=
1148 (fragment_start - old_fragments.start().0.full_offset()) as u32;
1149 new_insertions.push(InsertionFragment::insert_new(&suffix));
1150 new_ropes.push_fragment(&suffix, suffix.visible);
1151 new_fragments.push(suffix, &None);
1152 }
1153 old_fragments.next();
1154 }
1155
1156 let slice =
1157 old_fragments.slice(&VersionedFullOffset::Offset(range.start), Bias::Left);
1158 new_ropes.append(slice.summary().text);
1159 new_fragments.append(slice, &None);
1160 fragment_start = old_fragments.start().0.full_offset();
1161 }
1162
1163 // If we are at the end of a non-concurrent fragment, advance to the next one.
1164 let fragment_end = old_fragments.end().0.full_offset();
1165 if fragment_end == range.start && fragment_end > fragment_start {
1166 let mut fragment = old_fragments.item().unwrap().clone();
1167 fragment.len = (fragment_end.0 - fragment_start.0) as u32;
1168 fragment.insertion_offset +=
1169 (fragment_start - old_fragments.start().0.full_offset()) as u32;
1170 new_insertions.push(InsertionFragment::insert_new(&fragment));
1171 new_ropes.push_fragment(&fragment, fragment.visible);
1172 new_fragments.push(fragment, &None);
1173 old_fragments.next();
1174 fragment_start = old_fragments.start().0.full_offset();
1175 }
1176
1177 // Skip over insertions that are concurrent to this edit, but have a lower lamport
1178 // timestamp.
1179 while let Some(fragment) = old_fragments.item() {
1180 if fragment_start == range.start && fragment.timestamp > timestamp {
1181 new_ropes.push_fragment(fragment, fragment.visible);
1182 new_fragments.push(fragment.clone(), &None);
1183 old_fragments.next();
1184 debug_assert_eq!(fragment_start, range.start);
1185 } else {
1186 break;
1187 }
1188 }
1189 debug_assert!(fragment_start <= range.start);
1190
1191 // Preserve any portion of the current fragment that precedes this range.
1192 if fragment_start < range.start {
1193 let mut prefix = old_fragments.item().unwrap().clone();
1194 prefix.len = (range.start.0 - fragment_start.0) as u32;
1195 prefix.insertion_offset +=
1196 (fragment_start - old_fragments.start().0.full_offset()) as u32;
1197 prefix.id = Locator::between(&new_fragments.summary().max_id, &prefix.id);
1198 new_insertions.push(InsertionFragment::insert_new(&prefix));
1199 fragment_start = range.start;
1200 new_ropes.push_fragment(&prefix, prefix.visible);
1201 new_fragments.push(prefix, &None);
1202 }
1203
1204 // Insert the new text before any existing fragments within the range.
1205 if !new_text.is_empty() {
1206 let mut old_start = old_fragments.start().1;
1207 if old_fragments.item().is_some_and(|f| f.visible) {
1208 old_start += fragment_start.0 - old_fragments.start().0.full_offset().0;
1209 }
1210 let new_start = new_fragments.summary().text.visible;
1211 let next_fragment_id = old_fragments
1212 .item()
1213 .map_or(Locator::max_ref(), |old_fragment| &old_fragment.id);
1214 Self::push_fragments_for_insertion(
1215 new_text,
1216 timestamp,
1217 &mut insertion_offset,
1218 &mut new_fragments,
1219 &mut new_insertions,
1220 &mut insertion_slices,
1221 &mut new_ropes,
1222 next_fragment_id,
1223 timestamp,
1224 );
1225 edits_patch.push(Edit {
1226 old: old_start..old_start,
1227 new: new_start..new_start + new_text.len(),
1228 });
1229 }
1230
1231 // Advance through every fragment that intersects this range, marking the intersecting
1232 // portions as deleted.
1233 while fragment_start < range.end {
1234 let fragment = old_fragments.item().unwrap();
1235 let fragment_end = old_fragments.end().0.full_offset();
1236 let mut intersection = fragment.clone();
1237 let intersection_end = cmp::min(range.end, fragment_end);
1238 if version.observed(fragment.timestamp) {
1239 intersection.len = (intersection_end.0 - fragment_start.0) as u32;
1240 intersection.insertion_offset +=
1241 (fragment_start - old_fragments.start().0.full_offset()) as u32;
1242 intersection.id =
1243 Locator::between(&new_fragments.summary().max_id, &intersection.id);
1244 if fragment.was_visible(version, &self.undo_map) {
1245 intersection.deletions.push(timestamp);
1246 intersection.visible = false;
1247 insertion_slices
1248 .push(InsertionSlice::from_fragment(timestamp, &intersection));
1249 }
1250 }
1251 if intersection.len > 0 {
1252 if fragment.visible && !intersection.visible {
1253 let old_start = old_fragments.start().1
1254 + (fragment_start.0 - old_fragments.start().0.full_offset().0);
1255 let new_start = new_fragments.summary().text.visible;
1256 edits_patch.push(Edit {
1257 old: old_start..old_start + intersection.len as usize,
1258 new: new_start..new_start,
1259 });
1260 }
1261 new_insertions.push(InsertionFragment::insert_new(&intersection));
1262 new_ropes.push_fragment(&intersection, fragment.visible);
1263 new_fragments.push(intersection, &None);
1264 fragment_start = intersection_end;
1265 }
1266 if fragment_end <= range.end {
1267 old_fragments.next();
1268 }
1269 }
1270 }
1271
1272 // If the current fragment has been partially consumed, then consume the rest of it
1273 // and advance to the next fragment before slicing.
1274 if fragment_start > old_fragments.start().0.full_offset() {
1275 let fragment_end = old_fragments.end().0.full_offset();
1276 if fragment_end > fragment_start {
1277 let mut suffix = old_fragments.item().unwrap().clone();
1278 suffix.len = (fragment_end.0 - fragment_start.0) as u32;
1279 suffix.insertion_offset +=
1280 (fragment_start - old_fragments.start().0.full_offset()) as u32;
1281 new_insertions.push(InsertionFragment::insert_new(&suffix));
1282 new_ropes.push_fragment(&suffix, suffix.visible);
1283 new_fragments.push(suffix, &None);
1284 }
1285 old_fragments.next();
1286 }
1287
1288 let suffix = old_fragments.suffix();
1289 new_ropes.append(suffix.summary().text);
1290 new_fragments.append(suffix, &None);
1291 let (visible_text, deleted_text) = new_ropes.finish();
1292 drop(old_fragments);
1293
1294 self.snapshot.fragments = new_fragments;
1295 self.snapshot.visible_text = visible_text;
1296 self.snapshot.deleted_text = deleted_text;
1297 self.snapshot.insertions.edit(new_insertions, ());
1298 self.snapshot.insertion_slices.extend(insertion_slices);
1299 self.subscriptions.publish_mut(&edits_patch)
1300 }
1301
1302 fn push_fragments_for_insertion(
1303 new_text: &str,
1304 timestamp: clock::Lamport,
1305 insertion_offset: &mut u32,
1306 new_fragments: &mut SumTree<Fragment>,
1307 new_insertions: &mut Vec<sum_tree::Edit<InsertionFragment>>,
1308 insertion_slices: &mut Vec<InsertionSlice>,
1309 new_ropes: &mut RopeBuilder,
1310 next_fragment_id: &Locator,
1311 edit_timestamp: clock::Lamport,
1312 ) {
1313 let mut text_offset = 0;
1314 while text_offset < new_text.len() {
1315 let target_end = new_text.len().min(text_offset + MAX_INSERTION_LEN);
1316 let chunk_end = if target_end == new_text.len() {
1317 target_end
1318 } else {
1319 new_text.floor_char_boundary(target_end)
1320 };
1321 if chunk_end == text_offset {
1322 break;
1323 }
1324 let chunk_len = chunk_end - text_offset;
1325
1326 let fragment = Fragment {
1327 id: Locator::between(&new_fragments.summary().max_id, next_fragment_id),
1328 timestamp,
1329 insertion_offset: *insertion_offset,
1330 len: chunk_len as u32,
1331 deletions: Default::default(),
1332 max_undos: Default::default(),
1333 visible: true,
1334 };
1335 insertion_slices.push(InsertionSlice::from_fragment(edit_timestamp, &fragment));
1336 new_insertions.push(InsertionFragment::insert_new(&fragment));
1337 new_fragments.push(fragment, &None);
1338
1339 *insertion_offset += chunk_len as u32;
1340 text_offset = chunk_end;
1341 }
1342 new_ropes.push_str(new_text);
1343 }
1344
1345 fn fragment_ids_for_edits<'a>(
1346 &'a self,
1347 edit_ids: impl Iterator<Item = &'a clock::Lamport>,
1348 ) -> Vec<&'a Locator> {
1349 // Get all of the insertion slices changed by the given edits.
1350 let mut insertion_slices = Vec::new();
1351 for edit_id in edit_ids {
1352 let insertion_slice = InsertionSlice {
1353 edit_id_value: edit_id.value,
1354 edit_id_replica_id: edit_id.replica_id,
1355 insertion_id_value: Lamport::MIN.value,
1356 insertion_id_replica_id: Lamport::MIN.replica_id,
1357 range: 0..0,
1358 };
1359 let slices = self
1360 .snapshot
1361 .insertion_slices
1362 .iter_from(&insertion_slice)
1363 .take_while(|slice| {
1364 Lamport {
1365 value: slice.edit_id_value,
1366 replica_id: slice.edit_id_replica_id,
1367 } == *edit_id
1368 });
1369 insertion_slices.extend(slices)
1370 }
1371 insertion_slices.sort_unstable_by_key(|s| {
1372 (
1373 Lamport {
1374 value: s.insertion_id_value,
1375 replica_id: s.insertion_id_replica_id,
1376 },
1377 s.range.start,
1378 Reverse(s.range.end),
1379 )
1380 });
1381
1382 // Get all of the fragments corresponding to these insertion slices.
1383 let mut fragment_ids = Vec::new();
1384 let mut insertions_cursor = self.insertions.cursor::<InsertionFragmentKey>(());
1385 for insertion_slice in &insertion_slices {
1386 let insertion_id = Lamport {
1387 value: insertion_slice.insertion_id_value,
1388 replica_id: insertion_slice.insertion_id_replica_id,
1389 };
1390 if insertion_id != insertions_cursor.start().timestamp
1391 || insertion_slice.range.start > insertions_cursor.start().split_offset
1392 {
1393 insertions_cursor.seek_forward(
1394 &InsertionFragmentKey {
1395 timestamp: insertion_id,
1396 split_offset: insertion_slice.range.start,
1397 },
1398 Bias::Left,
1399 );
1400 }
1401 while let Some(item) = insertions_cursor.item() {
1402 if item.timestamp != insertion_id || item.split_offset >= insertion_slice.range.end
1403 {
1404 break;
1405 }
1406 fragment_ids.push(&item.fragment_id);
1407 insertions_cursor.next();
1408 }
1409 }
1410 fragment_ids.sort_unstable();
1411 fragment_ids
1412 }
1413
1414 fn apply_undo(&mut self, undo: &UndoOperation) {
1415 self.snapshot.undo_map.insert(undo);
1416
1417 let mut edits = Patch::default();
1418 let mut old_fragments = self
1419 .fragments
1420 .cursor::<Dimensions<Option<&Locator>, usize>>(&None);
1421 let mut new_fragments = SumTree::new(&None);
1422 let mut new_ropes =
1423 RopeBuilder::new(self.visible_text.cursor(0), self.deleted_text.cursor(0));
1424
1425 for fragment_id in self.fragment_ids_for_edits(undo.counts.keys()) {
1426 let preceding_fragments = old_fragments.slice(&Some(fragment_id), Bias::Left);
1427 new_ropes.append(preceding_fragments.summary().text);
1428 new_fragments.append(preceding_fragments, &None);
1429
1430 if let Some(fragment) = old_fragments.item() {
1431 let mut fragment = fragment.clone();
1432 let fragment_was_visible = fragment.visible;
1433
1434 fragment.visible = fragment.is_visible(&self.undo_map);
1435 fragment.max_undos.observe(undo.timestamp);
1436
1437 let old_start = old_fragments.start().1;
1438 let new_start = new_fragments.summary().text.visible;
1439 if fragment_was_visible && !fragment.visible {
1440 edits.push(Edit {
1441 old: old_start..old_start + fragment.len as usize,
1442 new: new_start..new_start,
1443 });
1444 } else if !fragment_was_visible && fragment.visible {
1445 edits.push(Edit {
1446 old: old_start..old_start,
1447 new: new_start..new_start + fragment.len as usize,
1448 });
1449 }
1450 new_ropes.push_fragment(&fragment, fragment_was_visible);
1451 new_fragments.push(fragment, &None);
1452
1453 old_fragments.next();
1454 }
1455 }
1456
1457 let suffix = old_fragments.suffix();
1458 new_ropes.append(suffix.summary().text);
1459 new_fragments.append(suffix, &None);
1460
1461 drop(old_fragments);
1462 let (visible_text, deleted_text) = new_ropes.finish();
1463 self.snapshot.fragments = new_fragments;
1464 self.snapshot.visible_text = visible_text;
1465 self.snapshot.deleted_text = deleted_text;
1466 self.subscriptions.publish_mut(&edits);
1467 }
1468
1469 fn flush_deferred_ops(&mut self) {
1470 self.deferred_replicas.clear();
1471 let mut deferred_ops = Vec::new();
1472 for op in self.deferred_ops.drain().iter().cloned() {
1473 if self.can_apply_op(&op) {
1474 self.apply_op(op);
1475 } else {
1476 self.deferred_replicas.insert(op.replica_id());
1477 deferred_ops.push(op);
1478 }
1479 }
1480 self.deferred_ops.insert(deferred_ops);
1481 }
1482
1483 fn can_apply_op(&self, op: &Operation) -> bool {
1484 if self.deferred_replicas.contains(&op.replica_id()) {
1485 false
1486 } else {
1487 self.version.observed_all(match op {
1488 Operation::Edit(edit) => &edit.version,
1489 Operation::Undo(undo) => &undo.version,
1490 })
1491 }
1492 }
1493
1494 pub fn has_deferred_ops(&self) -> bool {
1495 !self.deferred_ops.is_empty()
1496 }
1497
1498 pub fn peek_undo_stack(&self) -> Option<&HistoryEntry> {
1499 self.history.undo_stack.last()
1500 }
1501
1502 pub fn peek_redo_stack(&self) -> Option<&HistoryEntry> {
1503 self.history.redo_stack.last()
1504 }
1505
1506 pub fn start_transaction(&mut self) -> Option<TransactionId> {
1507 self.start_transaction_at(Instant::now())
1508 }
1509
1510 pub fn start_transaction_at(&mut self, now: Instant) -> Option<TransactionId> {
1511 self.history
1512 .start_transaction(self.version.clone(), now, &mut self.lamport_clock)
1513 }
1514
1515 pub fn end_transaction(&mut self) -> Option<(TransactionId, clock::Global)> {
1516 self.end_transaction_at(Instant::now())
1517 }
1518
1519 pub fn end_transaction_at(&mut self, now: Instant) -> Option<(TransactionId, clock::Global)> {
1520 if let Some(entry) = self.history.end_transaction(now) {
1521 let since = entry.transaction.start.clone();
1522 let id = self.history.group().unwrap();
1523 Some((id, since))
1524 } else {
1525 None
1526 }
1527 }
1528
1529 pub fn finalize_last_transaction(&mut self) -> Option<&Transaction> {
1530 self.history.finalize_last_transaction()
1531 }
1532
1533 pub fn group_until_transaction(&mut self, transaction_id: TransactionId) {
1534 self.history.group_until(transaction_id);
1535 }
1536
1537 pub fn base_text(&self) -> &Rope {
1538 &self.history.base_text
1539 }
1540
1541 pub fn operations(&self) -> &TreeMap<clock::Lamport, Operation> {
1542 &self.history.operations
1543 }
1544
1545 pub fn undo(&mut self) -> Option<(TransactionId, Operation)> {
1546 if let Some(entry) = self.history.pop_undo() {
1547 let transaction = entry.transaction.clone();
1548 let transaction_id = transaction.id;
1549 let op = self.undo_or_redo(transaction);
1550 Some((transaction_id, op))
1551 } else {
1552 None
1553 }
1554 }
1555
1556 pub fn undo_transaction(&mut self, transaction_id: TransactionId) -> Option<Operation> {
1557 let transaction = self
1558 .history
1559 .remove_from_undo(transaction_id)?
1560 .transaction
1561 .clone();
1562 Some(self.undo_or_redo(transaction))
1563 }
1564
1565 pub fn undo_to_transaction(&mut self, transaction_id: TransactionId) -> Vec<Operation> {
1566 let transactions = self
1567 .history
1568 .remove_from_undo_until(transaction_id)
1569 .iter()
1570 .map(|entry| entry.transaction.clone())
1571 .collect::<Vec<_>>();
1572
1573 transactions
1574 .into_iter()
1575 .map(|transaction| self.undo_or_redo(transaction))
1576 .collect()
1577 }
1578
1579 pub fn forget_transaction(&mut self, transaction_id: TransactionId) -> Option<Transaction> {
1580 self.history.forget(transaction_id)
1581 }
1582
1583 pub fn get_transaction(&self, transaction_id: TransactionId) -> Option<&Transaction> {
1584 self.history.transaction(transaction_id)
1585 }
1586
1587 pub fn merge_transactions(&mut self, transaction: TransactionId, destination: TransactionId) {
1588 self.history.merge_transactions(transaction, destination);
1589 }
1590
1591 pub fn redo(&mut self) -> Option<(TransactionId, Operation)> {
1592 if let Some(entry) = self.history.pop_redo() {
1593 let transaction = entry.transaction.clone();
1594 let transaction_id = transaction.id;
1595 let op = self.undo_or_redo(transaction);
1596 Some((transaction_id, op))
1597 } else {
1598 None
1599 }
1600 }
1601
1602 pub fn redo_to_transaction(&mut self, transaction_id: TransactionId) -> Vec<Operation> {
1603 let transactions = self
1604 .history
1605 .remove_from_redo(transaction_id)
1606 .iter()
1607 .map(|entry| entry.transaction.clone())
1608 .collect::<Vec<_>>();
1609
1610 transactions
1611 .into_iter()
1612 .map(|transaction| self.undo_or_redo(transaction))
1613 .collect()
1614 }
1615
1616 fn undo_or_redo(&mut self, transaction: Transaction) -> Operation {
1617 let mut counts = HashMap::default();
1618 for edit_id in transaction.edit_ids {
1619 counts.insert(edit_id, self.undo_map.undo_count(edit_id).saturating_add(1));
1620 }
1621
1622 let operation = self.undo_operations(counts);
1623 self.history.push(operation.clone());
1624 operation
1625 }
1626
1627 pub fn undo_operations(&mut self, counts: HashMap<clock::Lamport, u32>) -> Operation {
1628 let timestamp = self.lamport_clock.tick();
1629 let version = self.version();
1630 self.snapshot.version.observe(timestamp);
1631 let undo = UndoOperation {
1632 timestamp,
1633 version,
1634 counts,
1635 };
1636 self.apply_undo(&undo);
1637 Operation::Undo(undo)
1638 }
1639
1640 pub fn push_transaction(&mut self, transaction: Transaction, now: Instant) {
1641 self.history.push_transaction(transaction, now);
1642 }
1643
1644 /// Differs from `push_transaction` in that it does not clear the redo stack.
1645 /// The caller responsible for
1646 /// Differs from `push_transaction` in that it does not clear the redo
1647 /// stack. Intended to be used to create a parent transaction to merge
1648 /// potential child transactions into.
1649 ///
1650 /// The caller is responsible for removing it from the undo history using
1651 /// `forget_transaction` if no edits are merged into it. Otherwise, if edits
1652 /// are merged into this transaction, the caller is responsible for ensuring
1653 /// the redo stack is cleared. The easiest way to ensure the redo stack is
1654 /// cleared is to create transactions with the usual `start_transaction` and
1655 /// `end_transaction` methods and merging the resulting transactions into
1656 /// the transaction created by this method
1657 pub fn push_empty_transaction(&mut self, now: Instant) -> TransactionId {
1658 self.history
1659 .push_empty_transaction(self.version.clone(), now, &mut self.lamport_clock)
1660 }
1661
1662 pub fn edited_ranges_for_transaction_id<D>(
1663 &self,
1664 transaction_id: TransactionId,
1665 ) -> impl '_ + Iterator<Item = Range<D>>
1666 where
1667 D: TextDimension,
1668 {
1669 self.history
1670 .transaction(transaction_id)
1671 .into_iter()
1672 .flat_map(|transaction| self.edited_ranges_for_transaction(transaction))
1673 }
1674
1675 pub fn edited_ranges_for_edit_ids<'a, D>(
1676 &'a self,
1677 edit_ids: impl IntoIterator<Item = &'a clock::Lamport>,
1678 ) -> impl 'a + Iterator<Item = Range<D>>
1679 where
1680 D: TextDimension,
1681 {
1682 // get fragment ranges
1683 let mut cursor = self
1684 .fragments
1685 .cursor::<Dimensions<Option<&Locator>, usize>>(&None);
1686 let offset_ranges = self
1687 .fragment_ids_for_edits(edit_ids.into_iter())
1688 .into_iter()
1689 .filter_map(move |fragment_id| {
1690 cursor.seek_forward(&Some(fragment_id), Bias::Left);
1691 let fragment = cursor.item()?;
1692 let start_offset = cursor.start().1;
1693 let end_offset = start_offset
1694 + if fragment.visible {
1695 fragment.len as usize
1696 } else {
1697 0
1698 };
1699 Some(start_offset..end_offset)
1700 });
1701
1702 // combine adjacent ranges
1703 let mut prev_range: Option<Range<usize>> = None;
1704 let disjoint_ranges = offset_ranges
1705 .map(Some)
1706 .chain([None])
1707 .filter_map(move |range| {
1708 if let Some((range, prev_range)) = range.as_ref().zip(prev_range.as_mut())
1709 && prev_range.end == range.start
1710 {
1711 prev_range.end = range.end;
1712 return None;
1713 }
1714 let result = prev_range.clone();
1715 prev_range = range;
1716 result
1717 });
1718
1719 // convert to the desired text dimension.
1720 let mut position = D::zero(());
1721 let mut rope_cursor = self.visible_text.cursor(0);
1722 disjoint_ranges.map(move |range| {
1723 position.add_assign(&rope_cursor.summary(range.start));
1724 let start = position;
1725 position.add_assign(&rope_cursor.summary(range.end));
1726 let end = position;
1727 start..end
1728 })
1729 }
1730
1731 pub fn edited_ranges_for_transaction<'a, D>(
1732 &'a self,
1733 transaction: &'a Transaction,
1734 ) -> impl 'a + Iterator<Item = Range<D>>
1735 where
1736 D: TextDimension,
1737 {
1738 self.edited_ranges_for_edit_ids(&transaction.edit_ids)
1739 }
1740
1741 pub fn subscribe(&mut self) -> Subscription<usize> {
1742 self.subscriptions.subscribe()
1743 }
1744
1745 pub fn wait_for_edits<It: IntoIterator<Item = clock::Lamport>>(
1746 &mut self,
1747 edit_ids: It,
1748 ) -> impl 'static + Future<Output = Result<()>> + use<It> {
1749 let mut futures = Vec::new();
1750 for edit_id in edit_ids {
1751 if !self.version.observed(edit_id) {
1752 let (tx, rx) = oneshot::channel();
1753 self.edit_id_resolvers.entry(edit_id).or_default().push(tx);
1754 futures.push(rx);
1755 }
1756 }
1757
1758 async move {
1759 for mut future in futures {
1760 if future.recv().await.is_none() {
1761 anyhow::bail!("gave up waiting for edits");
1762 }
1763 }
1764 Ok(())
1765 }
1766 }
1767
1768 pub fn wait_for_anchors<It: IntoIterator<Item = Anchor>>(
1769 &mut self,
1770 anchors: It,
1771 ) -> impl 'static + Future<Output = Result<()>> + use<It> {
1772 let mut futures = Vec::new();
1773 for anchor in anchors {
1774 if !self.version.observed(anchor.timestamp()) && !anchor.is_max() && !anchor.is_min() {
1775 let (tx, rx) = oneshot::channel();
1776 self.edit_id_resolvers
1777 .entry(anchor.timestamp())
1778 .or_default()
1779 .push(tx);
1780 futures.push(rx);
1781 }
1782 }
1783
1784 async move {
1785 for mut future in futures {
1786 if future.recv().await.is_none() {
1787 anyhow::bail!("gave up waiting for anchors");
1788 }
1789 }
1790 Ok(())
1791 }
1792 }
1793
1794 pub fn wait_for_version(
1795 &mut self,
1796 version: clock::Global,
1797 ) -> impl Future<Output = Result<()>> + use<> {
1798 let mut rx = None;
1799 if !self.snapshot.version.observed_all(&version) {
1800 let channel = oneshot::channel();
1801 self.wait_for_version_txs.push((version, channel.0));
1802 rx = Some(channel.1);
1803 }
1804 async move {
1805 if let Some(mut rx) = rx
1806 && rx.recv().await.is_none()
1807 {
1808 anyhow::bail!("gave up waiting for version");
1809 }
1810 Ok(())
1811 }
1812 }
1813
1814 pub fn give_up_waiting(&mut self) {
1815 self.edit_id_resolvers.clear();
1816 self.wait_for_version_txs.clear();
1817 }
1818
1819 fn resolve_edit(&mut self, edit_id: clock::Lamport) {
1820 for mut tx in self
1821 .edit_id_resolvers
1822 .remove(&edit_id)
1823 .into_iter()
1824 .flatten()
1825 {
1826 tx.try_send(()).ok();
1827 }
1828 }
1829
1830 pub fn set_group_interval(&mut self, group_interval: Duration) {
1831 self.history.group_interval = group_interval;
1832 }
1833}
1834
1835#[cfg(any(test, feature = "test-support"))]
1836impl Buffer {
1837 #[track_caller]
1838 pub fn edit_via_marked_text(&mut self, marked_string: &str) {
1839 let edits = self.edits_for_marked_text(marked_string);
1840 self.edit(edits);
1841 }
1842
1843 #[track_caller]
1844 pub fn edits_for_marked_text(&self, marked_string: &str) -> Vec<(Range<usize>, String)> {
1845 let old_text = self.text();
1846 let (new_text, mut ranges) = util::test::marked_text_ranges(marked_string, false);
1847 if ranges.is_empty() {
1848 ranges.push(0..new_text.len());
1849 }
1850
1851 assert_eq!(
1852 old_text[..ranges[0].start],
1853 new_text[..ranges[0].start],
1854 "invalid edit"
1855 );
1856
1857 let mut delta = 0;
1858 let mut edits = Vec::new();
1859 let mut ranges = ranges.into_iter().peekable();
1860
1861 while let Some(inserted_range) = ranges.next() {
1862 let new_start = inserted_range.start;
1863 let old_start = (new_start as isize - delta) as usize;
1864
1865 let following_text = if let Some(next_range) = ranges.peek() {
1866 &new_text[inserted_range.end..next_range.start]
1867 } else {
1868 &new_text[inserted_range.end..]
1869 };
1870
1871 let inserted_len = inserted_range.len();
1872 let deleted_len = old_text[old_start..]
1873 .find(following_text)
1874 .expect("invalid edit");
1875
1876 let old_range = old_start..old_start + deleted_len;
1877 edits.push((old_range, new_text[inserted_range].to_string()));
1878 delta += inserted_len as isize - deleted_len as isize;
1879 }
1880
1881 assert_eq!(
1882 old_text.len() as isize + delta,
1883 new_text.len() as isize,
1884 "invalid edit"
1885 );
1886
1887 edits
1888 }
1889
1890 pub fn check_invariants(&self) {
1891 // Ensure every fragment is ordered by locator in the fragment tree and corresponds
1892 // to an insertion fragment in the insertions tree.
1893 let mut prev_fragment_id = Locator::min();
1894 for fragment in self.snapshot.fragments.items(&None) {
1895 assert!(fragment.id > prev_fragment_id);
1896 prev_fragment_id = fragment.id.clone();
1897
1898 let insertion_fragment = self
1899 .snapshot
1900 .insertions
1901 .get(
1902 &InsertionFragmentKey {
1903 timestamp: fragment.timestamp,
1904 split_offset: fragment.insertion_offset,
1905 },
1906 (),
1907 )
1908 .unwrap();
1909 assert_eq!(
1910 insertion_fragment.fragment_id, fragment.id,
1911 "fragment: {:?}\ninsertion: {:?}",
1912 fragment, insertion_fragment
1913 );
1914 }
1915
1916 let mut cursor = self.snapshot.fragments.cursor::<Option<&Locator>>(&None);
1917 for insertion_fragment in self.snapshot.insertions.cursor::<()>(()) {
1918 cursor.seek(&Some(&insertion_fragment.fragment_id), Bias::Left);
1919 let fragment = cursor.item().unwrap();
1920 assert_eq!(insertion_fragment.fragment_id, fragment.id);
1921 assert_eq!(insertion_fragment.split_offset, fragment.insertion_offset);
1922 }
1923
1924 let fragment_summary = self.snapshot.fragments.summary();
1925 assert_eq!(
1926 fragment_summary.text.visible,
1927 self.snapshot.visible_text.len()
1928 );
1929 assert_eq!(
1930 fragment_summary.text.deleted,
1931 self.snapshot.deleted_text.len()
1932 );
1933
1934 assert!(!self.text().contains("\r\n"));
1935 }
1936
1937 pub fn random_byte_range(&self, start_offset: usize, rng: &mut impl rand::Rng) -> Range<usize> {
1938 let end = self.clip_offset(rng.random_range(start_offset..=self.len()), Bias::Right);
1939 let start = self.clip_offset(rng.random_range(start_offset..=end), Bias::Right);
1940 start..end
1941 }
1942
1943 pub fn get_random_edits<T>(
1944 &self,
1945 rng: &mut T,
1946 edit_count: usize,
1947 ) -> Vec<(Range<usize>, Arc<str>)>
1948 where
1949 T: rand::Rng,
1950 {
1951 let mut edits: Vec<(Range<usize>, Arc<str>)> = Vec::new();
1952 let mut last_end = None;
1953 for _ in 0..edit_count {
1954 if last_end.is_some_and(|last_end| last_end >= self.len()) {
1955 break;
1956 }
1957 let new_start = last_end.map_or(0, |last_end| last_end + 1);
1958 let range = self.random_byte_range(new_start, rng);
1959 last_end = Some(range.end);
1960
1961 let new_text_len = rng.random_range(0..10);
1962 let new_text: String = RandomCharIter::new(&mut *rng).take(new_text_len).collect();
1963
1964 edits.push((range, new_text.into()));
1965 }
1966 edits
1967 }
1968
1969 pub fn randomly_edit<T>(
1970 &mut self,
1971 rng: &mut T,
1972 edit_count: usize,
1973 ) -> (Vec<(Range<usize>, Arc<str>)>, Operation)
1974 where
1975 T: rand::Rng,
1976 {
1977 let mut edits = self.get_random_edits(rng, edit_count);
1978 log::info!("mutating buffer {:?} with {:?}", self.replica_id, edits);
1979
1980 let op = self.edit(edits.iter().cloned());
1981 if let Operation::Edit(edit) = &op {
1982 assert_eq!(edits.len(), edit.new_text.len());
1983 for (edit, new_text) in edits.iter_mut().zip(&edit.new_text) {
1984 edit.1 = new_text.clone();
1985 }
1986 } else {
1987 unreachable!()
1988 }
1989
1990 (edits, op)
1991 }
1992
1993 pub fn randomly_undo_redo(&mut self, rng: &mut impl rand::Rng) -> Vec<Operation> {
1994 use rand::prelude::*;
1995
1996 let mut ops = Vec::new();
1997 for _ in 0..rng.random_range(1..=5) {
1998 if let Some(entry) = self.history.undo_stack.choose(rng) {
1999 let transaction = entry.transaction.clone();
2000 log::info!(
2001 "undoing buffer {:?} transaction {:?}",
2002 self.replica_id,
2003 transaction
2004 );
2005 ops.push(self.undo_or_redo(transaction));
2006 }
2007 }
2008 ops
2009 }
2010}
2011
2012impl Deref for Buffer {
2013 type Target = BufferSnapshot;
2014
2015 fn deref(&self) -> &Self::Target {
2016 &self.snapshot
2017 }
2018}
2019
2020impl BufferSnapshot {
2021 pub fn as_rope(&self) -> &Rope {
2022 &self.visible_text
2023 }
2024
2025 pub fn rope_for_version(&self, version: &clock::Global) -> Rope {
2026 let mut rope = Rope::new();
2027
2028 let mut cursor = self
2029 .fragments
2030 .filter::<_, FragmentTextSummary>(&None, move |summary| {
2031 !version.observed_all(&summary.max_version)
2032 });
2033 cursor.next();
2034
2035 let mut visible_cursor = self.visible_text.cursor(0);
2036 let mut deleted_cursor = self.deleted_text.cursor(0);
2037
2038 while let Some(fragment) = cursor.item() {
2039 if cursor.start().visible > visible_cursor.offset() {
2040 let text = visible_cursor.slice(cursor.start().visible);
2041 rope.append(text);
2042 }
2043
2044 if fragment.was_visible(version, &self.undo_map) {
2045 if fragment.visible {
2046 let text = visible_cursor.slice(cursor.end().visible);
2047 rope.append(text);
2048 } else {
2049 deleted_cursor.seek_forward(cursor.start().deleted);
2050 let text = deleted_cursor.slice(cursor.end().deleted);
2051 rope.append(text);
2052 }
2053 } else if fragment.visible {
2054 visible_cursor.seek_forward(cursor.end().visible);
2055 }
2056
2057 cursor.next();
2058 }
2059
2060 if cursor.start().visible > visible_cursor.offset() {
2061 let text = visible_cursor.slice(cursor.start().visible);
2062 rope.append(text);
2063 }
2064
2065 rope
2066 }
2067
2068 pub fn remote_id(&self) -> BufferId {
2069 self.remote_id
2070 }
2071
2072 pub fn replica_id(&self) -> ReplicaId {
2073 self.replica_id
2074 }
2075
2076 pub fn row_count(&self) -> u32 {
2077 self.max_point().row + 1
2078 }
2079
2080 pub fn len(&self) -> usize {
2081 self.visible_text.len()
2082 }
2083
2084 pub fn is_empty(&self) -> bool {
2085 self.len() == 0
2086 }
2087
2088 pub fn chars(&self) -> impl Iterator<Item = char> + '_ {
2089 self.chars_at(0)
2090 }
2091
2092 pub fn chars_for_range<T: ToOffset>(&self, range: Range<T>) -> impl Iterator<Item = char> + '_ {
2093 self.text_for_range(range).flat_map(str::chars)
2094 }
2095
2096 pub fn reversed_chars_for_range<T: ToOffset>(
2097 &self,
2098 range: Range<T>,
2099 ) -> impl Iterator<Item = char> + '_ {
2100 self.reversed_chunks_in_range(range)
2101 .flat_map(|chunk| chunk.chars().rev())
2102 }
2103
2104 pub fn contains_str_at<T>(&self, position: T, needle: &str) -> bool
2105 where
2106 T: ToOffset,
2107 {
2108 let position = position.to_offset(self);
2109 position == self.clip_offset(position, Bias::Left)
2110 && self
2111 .bytes_in_range(position..self.len())
2112 .flatten()
2113 .copied()
2114 .take(needle.len())
2115 .eq(needle.bytes())
2116 }
2117
2118 pub fn common_prefix_at<T>(&self, position: T, needle: &str) -> Range<T>
2119 where
2120 T: ToOffset + TextDimension,
2121 {
2122 let offset = position.to_offset(self);
2123 let common_prefix_len = needle
2124 .char_indices()
2125 .map(|(index, _)| index)
2126 .chain([needle.len()])
2127 .take_while(|&len| len <= offset)
2128 .filter(|&len| {
2129 let left = self
2130 .chars_for_range(offset - len..offset)
2131 .flat_map(char::to_lowercase);
2132 let right = needle[..len].chars().flat_map(char::to_lowercase);
2133 left.eq(right)
2134 })
2135 .last()
2136 .unwrap_or(0);
2137 let start_offset = offset - common_prefix_len;
2138 let start = self.text_summary_for_range(0..start_offset);
2139 start..position
2140 }
2141
2142 pub fn text(&self) -> String {
2143 self.visible_text.to_string()
2144 }
2145
2146 pub fn line_ending(&self) -> LineEnding {
2147 self.line_ending
2148 }
2149
2150 pub fn deleted_text(&self) -> String {
2151 self.deleted_text.to_string()
2152 }
2153
2154 pub fn text_summary(&self) -> TextSummary {
2155 self.visible_text.summary()
2156 }
2157
2158 pub fn max_point(&self) -> Point {
2159 self.visible_text.max_point()
2160 }
2161
2162 pub fn max_point_utf16(&self) -> PointUtf16 {
2163 self.visible_text.max_point_utf16()
2164 }
2165
2166 pub fn point_to_offset(&self, point: Point) -> usize {
2167 self.visible_text.point_to_offset(point)
2168 }
2169
2170 pub fn point_to_offset_utf16(&self, point: Point) -> OffsetUtf16 {
2171 self.visible_text.point_to_offset_utf16(point)
2172 }
2173
2174 pub fn point_utf16_to_offset_utf16(&self, point: PointUtf16) -> OffsetUtf16 {
2175 self.visible_text.point_utf16_to_offset_utf16(point)
2176 }
2177
2178 pub fn point_utf16_to_offset(&self, point: PointUtf16) -> usize {
2179 self.visible_text.point_utf16_to_offset(point)
2180 }
2181
2182 pub fn unclipped_point_utf16_to_offset(&self, point: Unclipped<PointUtf16>) -> usize {
2183 self.visible_text.unclipped_point_utf16_to_offset(point)
2184 }
2185
2186 pub fn unclipped_point_utf16_to_point(&self, point: Unclipped<PointUtf16>) -> Point {
2187 self.visible_text.unclipped_point_utf16_to_point(point)
2188 }
2189
2190 pub fn offset_utf16_to_offset(&self, offset: OffsetUtf16) -> usize {
2191 self.visible_text.offset_utf16_to_offset(offset)
2192 }
2193
2194 pub fn offset_to_offset_utf16(&self, offset: usize) -> OffsetUtf16 {
2195 self.visible_text.offset_to_offset_utf16(offset)
2196 }
2197
2198 pub fn offset_to_point(&self, offset: usize) -> Point {
2199 self.visible_text.offset_to_point(offset)
2200 }
2201
2202 pub fn offset_to_point_utf16(&self, offset: usize) -> PointUtf16 {
2203 self.visible_text.offset_to_point_utf16(offset)
2204 }
2205
2206 pub fn point_to_point_utf16(&self, point: Point) -> PointUtf16 {
2207 self.visible_text.point_to_point_utf16(point)
2208 }
2209
2210 pub fn point_utf16_to_point(&self, point: PointUtf16) -> Point {
2211 self.visible_text.point_utf16_to_point(point)
2212 }
2213
2214 pub fn version(&self) -> &clock::Global {
2215 &self.version
2216 }
2217
2218 pub fn chars_at<T: ToOffset>(&self, position: T) -> impl Iterator<Item = char> + '_ {
2219 let offset = position.to_offset(self);
2220 self.visible_text.chars_at(offset)
2221 }
2222
2223 pub fn reversed_chars_at<T: ToOffset>(&self, position: T) -> impl Iterator<Item = char> + '_ {
2224 let offset = position.to_offset(self);
2225 self.visible_text.reversed_chars_at(offset)
2226 }
2227
2228 pub fn reversed_chunks_in_range<T: ToOffset>(&self, range: Range<T>) -> rope::Chunks<'_> {
2229 let range = range.start.to_offset(self)..range.end.to_offset(self);
2230 self.visible_text.reversed_chunks_in_range(range)
2231 }
2232
2233 pub fn bytes_in_range<T: ToOffset>(&self, range: Range<T>) -> rope::Bytes<'_> {
2234 let start = range.start.to_offset(self);
2235 let end = range.end.to_offset(self);
2236 self.visible_text.bytes_in_range(start..end)
2237 }
2238
2239 pub fn reversed_bytes_in_range<T: ToOffset>(&self, range: Range<T>) -> rope::Bytes<'_> {
2240 let start = range.start.to_offset(self);
2241 let end = range.end.to_offset(self);
2242 self.visible_text.reversed_bytes_in_range(start..end)
2243 }
2244
2245 pub fn text_for_range<T: ToOffset>(&self, range: Range<T>) -> Chunks<'_> {
2246 let start = range.start.to_offset(self);
2247 let end = range.end.to_offset(self);
2248 self.visible_text.chunks_in_range(start..end)
2249 }
2250
2251 pub fn line_len(&self, row: u32) -> u32 {
2252 let row_start_offset = Point::new(row, 0).to_offset(self);
2253 let row_end_offset = if row >= self.max_point().row {
2254 self.len()
2255 } else {
2256 Point::new(row + 1, 0).to_previous_offset(self)
2257 };
2258 (row_end_offset - row_start_offset) as u32
2259 }
2260
2261 /// A function to convert character offsets from e.g. user's `go.mod:22:33` input into byte-offset Point columns.
2262 pub fn point_from_external_input(&self, row: u32, characters: u32) -> Point {
2263 const MAX_BYTES_IN_UTF_8: u32 = 4;
2264
2265 let row = row.min(self.max_point().row);
2266 let start = Point::new(row, 0);
2267 let end = self.clip_point(
2268 Point::new(
2269 row,
2270 characters
2271 .saturating_mul(MAX_BYTES_IN_UTF_8)
2272 .saturating_add(1),
2273 ),
2274 Bias::Right,
2275 );
2276 let range = start..end;
2277 let mut point = range.start;
2278 let mut remaining_columns = characters;
2279
2280 for chunk in self.text_for_range(range) {
2281 for character in chunk.chars() {
2282 if remaining_columns == 0 {
2283 return point;
2284 }
2285 remaining_columns -= 1;
2286 point.column += character.len_utf8() as u32;
2287 }
2288 }
2289 point
2290 }
2291
2292 pub fn line_indents_in_row_range(
2293 &self,
2294 row_range: Range<u32>,
2295 ) -> impl Iterator<Item = (u32, LineIndent)> + '_ {
2296 let start = Point::new(row_range.start, 0).to_offset(self);
2297 let end = Point::new(row_range.end, self.line_len(row_range.end)).to_offset(self);
2298
2299 let mut chunks = self.as_rope().chunks_in_range(start..end);
2300 let mut row = row_range.start;
2301 let mut done = false;
2302 std::iter::from_fn(move || {
2303 if done {
2304 None
2305 } else {
2306 let indent = (row, LineIndent::from_chunks(&mut chunks));
2307 done = !chunks.next_line();
2308 row += 1;
2309 Some(indent)
2310 }
2311 })
2312 }
2313
2314 /// Returns the line indents in the given row range, exclusive of end row, in reversed order.
2315 pub fn reversed_line_indents_in_row_range(
2316 &self,
2317 row_range: Range<u32>,
2318 ) -> impl Iterator<Item = (u32, LineIndent)> + '_ {
2319 let start = Point::new(row_range.start, 0).to_offset(self);
2320
2321 let end_point;
2322 let end;
2323 if row_range.end > row_range.start {
2324 end_point = Point::new(row_range.end - 1, self.line_len(row_range.end - 1));
2325 end = end_point.to_offset(self);
2326 } else {
2327 end_point = Point::new(row_range.start, 0);
2328 end = start;
2329 };
2330
2331 let mut chunks = self.as_rope().chunks_in_range(start..end);
2332 // Move the cursor to the start of the last line if it's not empty.
2333 chunks.seek(end);
2334 if end_point.column > 0 {
2335 chunks.prev_line();
2336 }
2337
2338 let mut row = end_point.row;
2339 let mut done = false;
2340 std::iter::from_fn(move || {
2341 if done {
2342 None
2343 } else {
2344 let initial_offset = chunks.offset();
2345 let indent = (row, LineIndent::from_chunks(&mut chunks));
2346 if chunks.offset() > initial_offset {
2347 chunks.prev_line();
2348 }
2349 done = !chunks.prev_line();
2350 if !done {
2351 row -= 1;
2352 }
2353
2354 Some(indent)
2355 }
2356 })
2357 }
2358
2359 pub fn line_indent_for_row(&self, row: u32) -> LineIndent {
2360 LineIndent::from_iter(self.chars_at(Point::new(row, 0)))
2361 }
2362
2363 pub fn is_line_blank(&self, row: u32) -> bool {
2364 self.text_for_range(Point::new(row, 0)..Point::new(row, self.line_len(row)))
2365 .all(|chunk| chunk.matches(|c: char| !c.is_whitespace()).next().is_none())
2366 }
2367
2368 pub fn text_summary_for_range<D, O: ToOffset>(&self, range: Range<O>) -> D
2369 where
2370 D: TextDimension,
2371 {
2372 self.visible_text
2373 .cursor(range.start.to_offset(self))
2374 .summary(range.end.to_offset(self))
2375 }
2376
2377 pub fn summaries_for_anchors<'a, D, A>(&'a self, anchors: A) -> impl 'a + Iterator<Item = D>
2378 where
2379 D: 'a + TextDimension,
2380 A: 'a + IntoIterator<Item = &'a Anchor>,
2381 {
2382 let anchors = anchors.into_iter();
2383 self.summaries_for_anchors_with_payload::<D, _, ()>(anchors.map(|a| (a, ())))
2384 .map(|d| d.0)
2385 }
2386
2387 pub fn summaries_for_anchors_with_payload<'a, D, A, T>(
2388 &'a self,
2389 anchors: A,
2390 ) -> impl 'a + Iterator<Item = (D, T)>
2391 where
2392 D: 'a + TextDimension,
2393 A: 'a + IntoIterator<Item = (&'a Anchor, T)>,
2394 {
2395 let anchors = anchors.into_iter();
2396 let mut fragment_cursor = self
2397 .fragments
2398 .cursor::<Dimensions<Option<&Locator>, usize>>(&None);
2399 let mut text_cursor = self.visible_text.cursor(0);
2400 let mut position = D::zero(());
2401
2402 anchors.map(move |(anchor, payload)| {
2403 if anchor.is_min() {
2404 return (D::zero(()), payload);
2405 } else if anchor.is_max() {
2406 return (D::from_text_summary(&self.visible_text.summary()), payload);
2407 }
2408
2409 let Some(insertion) = self.try_find_fragment(anchor) else {
2410 panic!(
2411 "invalid insertion for buffer {}@{:?} with anchor {:?}",
2412 self.remote_id(),
2413 self.version,
2414 anchor
2415 );
2416 };
2417 // TODO verbose debug because we are seeing is_max return false unexpectedly,
2418 // remove this once that is understood and fixed
2419 assert_eq!(
2420 insertion.timestamp,
2421 anchor.timestamp(),
2422 "invalid insertion for buffer {}@{:?}. anchor: {:?}, {:?}, {:?}, {:?}, {:?}. timestamp: {:?}, offset: {:?}, bias: {:?}",
2423 self.remote_id(),
2424 self.version,
2425 anchor.timestamp_replica_id,
2426 anchor.timestamp_value,
2427 anchor.offset,
2428 anchor.bias,
2429 anchor.buffer_id,
2430 anchor.timestamp() == clock::Lamport::MAX,
2431 anchor.offset == u32::MAX,
2432 anchor.bias == Bias::Right,
2433 );
2434
2435 fragment_cursor.seek_forward(&Some(&insertion.fragment_id), Bias::Left);
2436 let fragment = fragment_cursor.item().unwrap();
2437 let mut fragment_offset = fragment_cursor.start().1;
2438 if fragment.visible {
2439 fragment_offset += (anchor.offset - insertion.split_offset) as usize;
2440 }
2441
2442 position.add_assign(&text_cursor.summary(fragment_offset));
2443 (position, payload)
2444 })
2445 }
2446
2447 pub fn summary_for_anchor<D>(&self, anchor: &Anchor) -> D
2448 where
2449 D: TextDimension,
2450 {
2451 self.text_summary_for_range(0..self.offset_for_anchor(anchor))
2452 }
2453
2454 pub fn offset_for_anchor(&self, anchor: &Anchor) -> usize {
2455 if anchor.is_min() {
2456 0
2457 } else if anchor.is_max() {
2458 self.visible_text.len()
2459 } else {
2460 debug_assert_eq!(anchor.buffer_id, Some(self.remote_id));
2461 debug_assert!(
2462 self.version.observed(anchor.timestamp()),
2463 "Anchor timestamp {:?} not observed by buffer {:?}",
2464 anchor.timestamp(),
2465 self.version
2466 );
2467 let item = self.try_find_fragment(anchor);
2468 let Some(insertion) =
2469 item.filter(|insertion| insertion.timestamp == anchor.timestamp())
2470 else {
2471 self.panic_bad_anchor(anchor);
2472 };
2473
2474 let (start, _, item) = self
2475 .fragments
2476 .find::<Dimensions<Option<&Locator>, usize>, _>(
2477 &None,
2478 &Some(&insertion.fragment_id),
2479 Bias::Left,
2480 );
2481 let fragment = item.unwrap();
2482 let mut fragment_offset = start.1;
2483 if fragment.visible {
2484 fragment_offset += (anchor.offset - insertion.split_offset) as usize;
2485 }
2486 fragment_offset
2487 }
2488 }
2489
2490 #[cold]
2491 fn panic_bad_anchor(&self, anchor: &Anchor) -> ! {
2492 if anchor.buffer_id.is_some_and(|id| id != self.remote_id) {
2493 panic!(
2494 "invalid anchor - buffer id does not match: anchor {anchor:?}; buffer id: {}, version: {:?}",
2495 self.remote_id, self.version
2496 );
2497 } else if !self.version.observed(anchor.timestamp()) {
2498 panic!(
2499 "invalid anchor - snapshot has not observed lamport: {:?}; version: {:?}",
2500 anchor, self.version
2501 );
2502 } else {
2503 panic!(
2504 "invalid anchor {:?}. buffer id: {}, version: {:?}",
2505 anchor, self.remote_id, self.version
2506 );
2507 }
2508 }
2509
2510 fn fragment_id_for_anchor(&self, anchor: &Anchor) -> &Locator {
2511 self.try_fragment_id_for_anchor(anchor)
2512 .unwrap_or_else(|| self.panic_bad_anchor(anchor))
2513 }
2514
2515 fn try_fragment_id_for_anchor(&self, anchor: &Anchor) -> Option<&Locator> {
2516 if anchor.is_min() {
2517 Some(Locator::min_ref())
2518 } else if anchor.is_max() {
2519 Some(Locator::max_ref())
2520 } else {
2521 let item = self.try_find_fragment(anchor);
2522 item.filter(|insertion| {
2523 !cfg!(debug_assertions) || insertion.timestamp == anchor.timestamp()
2524 })
2525 .map(|insertion| &insertion.fragment_id)
2526 }
2527 }
2528
2529 fn try_find_fragment(&self, anchor: &Anchor) -> Option<&InsertionFragment> {
2530 let anchor_key = InsertionFragmentKey {
2531 timestamp: anchor.timestamp(),
2532 split_offset: anchor.offset,
2533 };
2534 match self.insertions.find_with_prev::<InsertionFragmentKey, _>(
2535 (),
2536 &anchor_key,
2537 anchor.bias,
2538 ) {
2539 (_, _, Some((prev, insertion))) => {
2540 let comparison = sum_tree::KeyedItem::key(insertion).cmp(&anchor_key);
2541 if comparison == Ordering::Greater
2542 || (anchor.bias == Bias::Left
2543 && comparison == Ordering::Equal
2544 && anchor.offset > 0)
2545 {
2546 prev
2547 } else {
2548 Some(insertion)
2549 }
2550 }
2551 _ => self.insertions.last(),
2552 }
2553 }
2554
2555 /// Returns an anchor range for the given input position range that is anchored to the text in the range.
2556 pub fn anchor_range_around<T: ToOffset>(&self, position: Range<T>) -> Range<Anchor> {
2557 self.anchor_after(position.start)..self.anchor_before(position.end)
2558 }
2559
2560 /// Returns an anchor range for the given input position range that is anchored to the text before and after.
2561 pub fn anchor_range_between<T: ToOffset>(&self, position: Range<T>) -> Range<Anchor> {
2562 self.anchor_before(position.start)..self.anchor_after(position.end)
2563 }
2564
2565 /// Returns an anchor for the given input position that is anchored to the text before the position.
2566 pub fn anchor_before<T: ToOffset>(&self, position: T) -> Anchor {
2567 self.anchor_at(position, Bias::Left)
2568 }
2569
2570 /// Returns an anchor for the given input position that is anchored to the text after the position.
2571 pub fn anchor_after<T: ToOffset>(&self, position: T) -> Anchor {
2572 self.anchor_at(position, Bias::Right)
2573 }
2574
2575 pub fn anchor_at<T: ToOffset>(&self, position: T, bias: Bias) -> Anchor {
2576 self.anchor_at_offset(position.to_offset(self), bias)
2577 }
2578
2579 fn anchor_at_offset(&self, mut offset: usize, bias: Bias) -> Anchor {
2580 if bias == Bias::Left && offset == 0 {
2581 Anchor::min_for_buffer(self.remote_id)
2582 } else if bias == Bias::Right
2583 && ((!cfg!(debug_assertions) && offset >= self.len()) || offset == self.len())
2584 {
2585 Anchor::max_for_buffer(self.remote_id)
2586 } else {
2587 if !self
2588 .visible_text
2589 .assert_char_boundary::<{ cfg!(debug_assertions) }>(offset)
2590 {
2591 offset = match bias {
2592 Bias::Left => self.visible_text.floor_char_boundary(offset),
2593 Bias::Right => self.visible_text.ceil_char_boundary(offset),
2594 };
2595 }
2596 let (start, _, item) = self.fragments.find::<usize, _>(&None, &offset, bias);
2597 let Some(fragment) = item else {
2598 // We got a bad offset, likely out of bounds
2599 debug_panic!(
2600 "Failed to find fragment at offset {} (len: {})",
2601 offset,
2602 self.len()
2603 );
2604 return Anchor::max_for_buffer(self.remote_id);
2605 };
2606 let overshoot = offset - start;
2607 Anchor::new(
2608 fragment.timestamp,
2609 fragment.insertion_offset + overshoot as u32,
2610 bias,
2611 Some(self.remote_id),
2612 )
2613 }
2614 }
2615
2616 pub fn can_resolve(&self, anchor: &Anchor) -> bool {
2617 anchor.is_min()
2618 || anchor.is_max()
2619 || (Some(self.remote_id) == anchor.buffer_id
2620 && self.version.observed(anchor.timestamp()))
2621 }
2622
2623 pub fn clip_offset(&self, offset: usize, bias: Bias) -> usize {
2624 self.visible_text.clip_offset(offset, bias)
2625 }
2626
2627 pub fn clip_point(&self, point: Point, bias: Bias) -> Point {
2628 self.visible_text.clip_point(point, bias)
2629 }
2630
2631 pub fn clip_offset_utf16(&self, offset: OffsetUtf16, bias: Bias) -> OffsetUtf16 {
2632 self.visible_text.clip_offset_utf16(offset, bias)
2633 }
2634
2635 pub fn clip_point_utf16(&self, point: Unclipped<PointUtf16>, bias: Bias) -> PointUtf16 {
2636 self.visible_text.clip_point_utf16(point, bias)
2637 }
2638
2639 pub fn edits_since<'a, D>(
2640 &'a self,
2641 since: &'a clock::Global,
2642 ) -> impl 'a + Iterator<Item = Edit<D>>
2643 where
2644 D: TextDimension + Ord,
2645 {
2646 self.edits_since_in_range(since, Anchor::MIN..Anchor::MAX)
2647 }
2648
2649 pub fn anchored_edits_since<'a, D>(
2650 &'a self,
2651 since: &'a clock::Global,
2652 ) -> impl 'a + Iterator<Item = (Edit<D>, Range<Anchor>)>
2653 where
2654 D: TextDimension + Ord,
2655 {
2656 self.anchored_edits_since_in_range(since, Anchor::MIN..Anchor::MAX)
2657 }
2658
2659 pub fn edits_since_in_range<'a, D>(
2660 &'a self,
2661 since: &'a clock::Global,
2662 range: Range<Anchor>,
2663 ) -> impl 'a + Iterator<Item = Edit<D>>
2664 where
2665 D: TextDimension + Ord,
2666 {
2667 self.anchored_edits_since_in_range(since, range)
2668 .map(|item| item.0)
2669 }
2670
2671 pub fn anchored_edits_since_in_range<'a, D>(
2672 &'a self,
2673 since: &'a clock::Global,
2674 range: Range<Anchor>,
2675 ) -> impl 'a + Iterator<Item = (Edit<D>, Range<Anchor>)>
2676 where
2677 D: TextDimension + Ord,
2678 {
2679 if *since == self.version {
2680 return None.into_iter().flatten();
2681 }
2682 let mut cursor = self.fragments.filter(&None, move |summary| {
2683 !since.observed_all(&summary.max_version)
2684 });
2685 cursor.next();
2686 let fragments_cursor = Some(cursor);
2687 let start_fragment_id = self.fragment_id_for_anchor(&range.start);
2688 let (start, _, item) = self
2689 .fragments
2690 .find::<Dimensions<Option<&Locator>, FragmentTextSummary>, _>(
2691 &None,
2692 &Some(start_fragment_id),
2693 Bias::Left,
2694 );
2695 let mut visible_start = start.1.visible;
2696 let mut deleted_start = start.1.deleted;
2697 if let Some(fragment) = item {
2698 let overshoot = (range.start.offset - fragment.insertion_offset) as usize;
2699 if fragment.visible {
2700 visible_start += overshoot;
2701 } else {
2702 deleted_start += overshoot;
2703 }
2704 }
2705 let end_fragment_id = self.fragment_id_for_anchor(&range.end);
2706
2707 Some(Edits {
2708 visible_cursor: self.visible_text.cursor(visible_start),
2709 deleted_cursor: self.deleted_text.cursor(deleted_start),
2710 fragments_cursor,
2711 undos: &self.undo_map,
2712 since,
2713 old_end: D::zero(()),
2714 new_end: D::zero(()),
2715 range: (start_fragment_id, range.start.offset)..(end_fragment_id, range.end.offset),
2716 buffer_id: self.remote_id,
2717 })
2718 .into_iter()
2719 .flatten()
2720 }
2721
2722 pub fn has_edits_since_in_range(&self, since: &clock::Global, range: Range<Anchor>) -> bool {
2723 if *since != self.version {
2724 let start_fragment_id = self.fragment_id_for_anchor(&range.start);
2725 let end_fragment_id = self.fragment_id_for_anchor(&range.end);
2726 let mut cursor = self.fragments.filter::<_, usize>(&None, move |summary| {
2727 !since.observed_all(&summary.max_version)
2728 });
2729 cursor.next();
2730 while let Some(fragment) = cursor.item() {
2731 if fragment.id > *end_fragment_id {
2732 break;
2733 }
2734 if fragment.id > *start_fragment_id {
2735 let was_visible = fragment.was_visible(since, &self.undo_map);
2736 let is_visible = fragment.visible;
2737 if was_visible != is_visible {
2738 return true;
2739 }
2740 }
2741 cursor.next();
2742 }
2743 }
2744 false
2745 }
2746
2747 pub fn has_edits_since(&self, since: &clock::Global) -> bool {
2748 if *since != self.version {
2749 let mut cursor = self.fragments.filter::<_, usize>(&None, move |summary| {
2750 !since.observed_all(&summary.max_version)
2751 });
2752 cursor.next();
2753 while let Some(fragment) = cursor.item() {
2754 let was_visible = fragment.was_visible(since, &self.undo_map);
2755 let is_visible = fragment.visible;
2756 if was_visible != is_visible {
2757 return true;
2758 }
2759 cursor.next();
2760 }
2761 }
2762 false
2763 }
2764
2765 pub fn range_to_version(&self, range: Range<usize>, version: &clock::Global) -> Range<usize> {
2766 let mut offsets = self.offsets_to_version([range.start, range.end], version);
2767 offsets.next().unwrap()..offsets.next().unwrap()
2768 }
2769
2770 /// Converts the given sequence of offsets into their corresponding offsets
2771 /// at a prior version of this buffer.
2772 pub fn offsets_to_version<'a>(
2773 &'a self,
2774 offsets: impl 'a + IntoIterator<Item = usize>,
2775 version: &'a clock::Global,
2776 ) -> impl 'a + Iterator<Item = usize> {
2777 let mut edits = self.edits_since(version).peekable();
2778 let mut last_old_end = 0;
2779 let mut last_new_end = 0;
2780 offsets.into_iter().map(move |new_offset| {
2781 while let Some(edit) = edits.peek() {
2782 if edit.new.start > new_offset {
2783 break;
2784 }
2785
2786 if edit.new.end <= new_offset {
2787 last_new_end = edit.new.end;
2788 last_old_end = edit.old.end;
2789 edits.next();
2790 continue;
2791 }
2792
2793 let overshoot = new_offset - edit.new.start;
2794 return (edit.old.start + overshoot).min(edit.old.end);
2795 }
2796
2797 last_old_end + new_offset.saturating_sub(last_new_end)
2798 })
2799 }
2800
2801 /// Visually annotates a position or range with the `Debug` representation of a value. The
2802 /// callsite of this function is used as a key - previous annotations will be removed.
2803 #[cfg(debug_assertions)]
2804 #[track_caller]
2805 pub fn debug<R, V>(&self, ranges: &R, value: V)
2806 where
2807 R: debug::ToDebugRanges,
2808 V: std::fmt::Debug,
2809 {
2810 self.debug_with_key(std::panic::Location::caller(), ranges, value);
2811 }
2812
2813 /// Visually annotates a position or range with the `Debug` representation of a value. Previous
2814 /// debug annotations with the same key will be removed. The key is also used to determine the
2815 /// annotation's color.
2816 #[cfg(debug_assertions)]
2817 pub fn debug_with_key<K, R, V>(&self, key: &K, ranges: &R, value: V)
2818 where
2819 K: std::hash::Hash + 'static,
2820 R: debug::ToDebugRanges,
2821 V: std::fmt::Debug,
2822 {
2823 let ranges = ranges
2824 .to_debug_ranges(self)
2825 .into_iter()
2826 .map(|range| self.anchor_after(range.start)..self.anchor_before(range.end))
2827 .collect();
2828 debug::GlobalDebugRanges::with_locked(|debug_ranges| {
2829 debug_ranges.insert(key, ranges, format!("{value:?}").into());
2830 });
2831 }
2832}
2833
2834struct RopeBuilder<'a> {
2835 old_visible_cursor: rope::Cursor<'a>,
2836 old_deleted_cursor: rope::Cursor<'a>,
2837 new_visible: Rope,
2838 new_deleted: Rope,
2839}
2840
2841impl<'a> RopeBuilder<'a> {
2842 fn new(old_visible_cursor: rope::Cursor<'a>, old_deleted_cursor: rope::Cursor<'a>) -> Self {
2843 Self {
2844 old_visible_cursor,
2845 old_deleted_cursor,
2846 new_visible: Rope::new(),
2847 new_deleted: Rope::new(),
2848 }
2849 }
2850
2851 fn append(&mut self, len: FragmentTextSummary) {
2852 self.push(len.visible, true, true);
2853 self.push(len.deleted, false, false);
2854 }
2855
2856 fn push_fragment(&mut self, fragment: &Fragment, was_visible: bool) {
2857 debug_assert!(fragment.len > 0);
2858 self.push(fragment.len as usize, was_visible, fragment.visible)
2859 }
2860
2861 fn push(&mut self, len: usize, was_visible: bool, is_visible: bool) {
2862 let text = if was_visible {
2863 self.old_visible_cursor
2864 .slice(self.old_visible_cursor.offset() + len)
2865 } else {
2866 self.old_deleted_cursor
2867 .slice(self.old_deleted_cursor.offset() + len)
2868 };
2869 if is_visible {
2870 self.new_visible.append(text);
2871 } else {
2872 self.new_deleted.append(text);
2873 }
2874 }
2875
2876 fn push_str(&mut self, text: &str) {
2877 self.new_visible.push(text);
2878 }
2879
2880 fn finish(mut self) -> (Rope, Rope) {
2881 self.new_visible.append(self.old_visible_cursor.suffix());
2882 self.new_deleted.append(self.old_deleted_cursor.suffix());
2883 (self.new_visible, self.new_deleted)
2884 }
2885}
2886
2887impl<D: TextDimension + Ord, F: FnMut(&FragmentSummary) -> bool> Iterator for Edits<'_, D, F> {
2888 type Item = (Edit<D>, Range<Anchor>);
2889
2890 fn next(&mut self) -> Option<Self::Item> {
2891 let mut pending_edit: Option<Self::Item> = None;
2892 let cursor = self.fragments_cursor.as_mut()?;
2893
2894 while let Some(fragment) = cursor.item() {
2895 if fragment.id < *self.range.start.0 {
2896 cursor.next();
2897 continue;
2898 } else if fragment.id > *self.range.end.0 {
2899 break;
2900 }
2901
2902 if cursor.start().visible > self.visible_cursor.offset() {
2903 let summary = self.visible_cursor.summary(cursor.start().visible);
2904 self.old_end.add_assign(&summary);
2905 self.new_end.add_assign(&summary);
2906 }
2907
2908 if pending_edit
2909 .as_ref()
2910 .is_some_and(|(change, _)| change.new.end < self.new_end)
2911 {
2912 break;
2913 }
2914
2915 let start_anchor = Anchor::new(
2916 fragment.timestamp,
2917 fragment.insertion_offset,
2918 Bias::Right,
2919 Some(self.buffer_id),
2920 );
2921 let end_anchor = Anchor::new(
2922 fragment.timestamp,
2923 fragment.insertion_offset + fragment.len,
2924 Bias::Left,
2925 Some(self.buffer_id),
2926 );
2927
2928 if !fragment.was_visible(self.since, self.undos) && fragment.visible {
2929 let mut visible_end = cursor.end().visible;
2930 if fragment.id == *self.range.end.0 {
2931 visible_end = cmp::min(
2932 visible_end,
2933 cursor.start().visible
2934 + (self.range.end.1 - fragment.insertion_offset) as usize,
2935 );
2936 }
2937
2938 let fragment_summary = self.visible_cursor.summary(visible_end);
2939 let mut new_end = self.new_end;
2940 new_end.add_assign(&fragment_summary);
2941 if let Some((edit, range)) = pending_edit.as_mut() {
2942 edit.new.end = new_end;
2943 range.end = end_anchor;
2944 } else {
2945 pending_edit = Some((
2946 Edit {
2947 old: self.old_end..self.old_end,
2948 new: self.new_end..new_end,
2949 },
2950 start_anchor..end_anchor,
2951 ));
2952 }
2953
2954 self.new_end = new_end;
2955 } else if fragment.was_visible(self.since, self.undos) && !fragment.visible {
2956 let mut deleted_end = cursor.end().deleted;
2957 if fragment.id == *self.range.end.0 {
2958 deleted_end = cmp::min(
2959 deleted_end,
2960 cursor.start().deleted
2961 + (self.range.end.1 - fragment.insertion_offset) as usize,
2962 );
2963 }
2964
2965 if cursor.start().deleted > self.deleted_cursor.offset() {
2966 self.deleted_cursor.seek_forward(cursor.start().deleted);
2967 }
2968 let fragment_summary = self.deleted_cursor.summary(deleted_end);
2969 let mut old_end = self.old_end;
2970 old_end.add_assign(&fragment_summary);
2971 if let Some((edit, range)) = pending_edit.as_mut() {
2972 edit.old.end = old_end;
2973 range.end = end_anchor;
2974 } else {
2975 pending_edit = Some((
2976 Edit {
2977 old: self.old_end..old_end,
2978 new: self.new_end..self.new_end,
2979 },
2980 start_anchor..end_anchor,
2981 ));
2982 }
2983
2984 self.old_end = old_end;
2985 }
2986
2987 cursor.next();
2988 }
2989
2990 pending_edit
2991 }
2992}
2993
2994impl Fragment {
2995 fn is_visible(&self, undos: &UndoMap) -> bool {
2996 !undos.is_undone(self.timestamp) && self.deletions.iter().all(|d| undos.is_undone(*d))
2997 }
2998
2999 fn was_visible(&self, version: &clock::Global, undos: &UndoMap) -> bool {
3000 (version.observed(self.timestamp) && !undos.was_undone(self.timestamp, version))
3001 && self
3002 .deletions
3003 .iter()
3004 .all(|d| !version.observed(*d) || undos.was_undone(*d, version))
3005 }
3006}
3007
3008impl sum_tree::Item for Fragment {
3009 type Summary = FragmentSummary;
3010
3011 fn summary(&self, _cx: &Option<clock::Global>) -> Self::Summary {
3012 let mut max_version = clock::Global::new();
3013 max_version.observe(self.timestamp);
3014 for deletion in &self.deletions {
3015 max_version.observe(*deletion);
3016 }
3017 max_version.join(&self.max_undos);
3018
3019 let mut min_insertion_version = clock::Global::new();
3020 min_insertion_version.observe(self.timestamp);
3021 let max_insertion_version = min_insertion_version.clone();
3022 if self.visible {
3023 FragmentSummary {
3024 max_id: self.id.clone(),
3025 text: FragmentTextSummary {
3026 visible: self.len as usize,
3027 deleted: 0,
3028 },
3029 max_version,
3030 min_insertion_version,
3031 max_insertion_version,
3032 }
3033 } else {
3034 FragmentSummary {
3035 max_id: self.id.clone(),
3036 text: FragmentTextSummary {
3037 visible: 0,
3038 deleted: self.len as usize,
3039 },
3040 max_version,
3041 min_insertion_version,
3042 max_insertion_version,
3043 }
3044 }
3045 }
3046}
3047
3048impl sum_tree::Summary for FragmentSummary {
3049 type Context<'a> = &'a Option<clock::Global>;
3050
3051 fn zero(_cx: Self::Context<'_>) -> Self {
3052 Default::default()
3053 }
3054
3055 fn add_summary(&mut self, other: &Self, _: Self::Context<'_>) {
3056 self.max_id.assign(&other.max_id);
3057 self.text.visible += &other.text.visible;
3058 self.text.deleted += &other.text.deleted;
3059 self.max_version.join(&other.max_version);
3060 self.min_insertion_version
3061 .meet(&other.min_insertion_version);
3062 self.max_insertion_version
3063 .join(&other.max_insertion_version);
3064 }
3065}
3066
3067impl Default for FragmentSummary {
3068 fn default() -> Self {
3069 FragmentSummary {
3070 max_id: Locator::min(),
3071 text: FragmentTextSummary::default(),
3072 max_version: clock::Global::new(),
3073 min_insertion_version: clock::Global::new(),
3074 max_insertion_version: clock::Global::new(),
3075 }
3076 }
3077}
3078
3079impl sum_tree::Item for InsertionFragment {
3080 type Summary = InsertionFragmentKey;
3081
3082 fn summary(&self, _cx: ()) -> Self::Summary {
3083 InsertionFragmentKey {
3084 timestamp: self.timestamp,
3085 split_offset: self.split_offset,
3086 }
3087 }
3088}
3089
3090impl sum_tree::KeyedItem for InsertionFragment {
3091 type Key = InsertionFragmentKey;
3092
3093 fn key(&self) -> Self::Key {
3094 sum_tree::Item::summary(self, ())
3095 }
3096}
3097
3098impl InsertionFragment {
3099 fn new(fragment: &Fragment) -> Self {
3100 Self {
3101 timestamp: fragment.timestamp,
3102 split_offset: fragment.insertion_offset,
3103 fragment_id: fragment.id.clone(),
3104 }
3105 }
3106
3107 fn insert_new(fragment: &Fragment) -> sum_tree::Edit<Self> {
3108 sum_tree::Edit::Insert(Self::new(fragment))
3109 }
3110}
3111
3112impl sum_tree::ContextLessSummary for InsertionFragmentKey {
3113 fn zero() -> Self {
3114 InsertionFragmentKey {
3115 timestamp: Lamport::MIN,
3116 split_offset: 0,
3117 }
3118 }
3119
3120 fn add_summary(&mut self, summary: &Self) {
3121 *self = *summary;
3122 }
3123}
3124
3125#[derive(Copy, Clone, Debug, Default, PartialEq, Eq, PartialOrd, Ord, Hash)]
3126pub struct FullOffset(pub usize);
3127
3128impl ops::AddAssign<usize> for FullOffset {
3129 fn add_assign(&mut self, rhs: usize) {
3130 self.0 += rhs;
3131 }
3132}
3133
3134impl ops::Add<usize> for FullOffset {
3135 type Output = Self;
3136
3137 fn add(mut self, rhs: usize) -> Self::Output {
3138 self += rhs;
3139 self
3140 }
3141}
3142
3143impl ops::Sub for FullOffset {
3144 type Output = usize;
3145
3146 fn sub(self, rhs: Self) -> Self::Output {
3147 self.0 - rhs.0
3148 }
3149}
3150
3151impl sum_tree::Dimension<'_, FragmentSummary> for usize {
3152 fn zero(_: &Option<clock::Global>) -> Self {
3153 Default::default()
3154 }
3155
3156 fn add_summary(&mut self, summary: &FragmentSummary, _: &Option<clock::Global>) {
3157 *self += summary.text.visible;
3158 }
3159}
3160
3161impl sum_tree::Dimension<'_, FragmentSummary> for FullOffset {
3162 fn zero(_: &Option<clock::Global>) -> Self {
3163 Default::default()
3164 }
3165
3166 fn add_summary(&mut self, summary: &FragmentSummary, _: &Option<clock::Global>) {
3167 self.0 += summary.text.visible + summary.text.deleted;
3168 }
3169}
3170
3171impl<'a> sum_tree::Dimension<'a, FragmentSummary> for Option<&'a Locator> {
3172 fn zero(_: &Option<clock::Global>) -> Self {
3173 Default::default()
3174 }
3175
3176 fn add_summary(&mut self, summary: &'a FragmentSummary, _: &Option<clock::Global>) {
3177 *self = Some(&summary.max_id);
3178 }
3179}
3180
3181impl sum_tree::SeekTarget<'_, FragmentSummary, FragmentTextSummary> for usize {
3182 fn cmp(
3183 &self,
3184 cursor_location: &FragmentTextSummary,
3185 _: &Option<clock::Global>,
3186 ) -> cmp::Ordering {
3187 Ord::cmp(self, &cursor_location.visible)
3188 }
3189}
3190
3191#[derive(Copy, Clone, Debug, Eq, PartialEq)]
3192enum VersionedFullOffset {
3193 Offset(FullOffset),
3194 Invalid,
3195}
3196
3197impl VersionedFullOffset {
3198 fn full_offset(&self) -> FullOffset {
3199 if let Self::Offset(position) = self {
3200 *position
3201 } else {
3202 panic!("invalid version")
3203 }
3204 }
3205}
3206
3207impl Default for VersionedFullOffset {
3208 fn default() -> Self {
3209 Self::Offset(Default::default())
3210 }
3211}
3212
3213impl<'a> sum_tree::Dimension<'a, FragmentSummary> for VersionedFullOffset {
3214 fn zero(_cx: &Option<clock::Global>) -> Self {
3215 Default::default()
3216 }
3217
3218 fn add_summary(&mut self, summary: &'a FragmentSummary, cx: &Option<clock::Global>) {
3219 if let Self::Offset(offset) = self {
3220 let version = cx.as_ref().unwrap();
3221 if version.observed_all(&summary.max_insertion_version) {
3222 *offset += summary.text.visible + summary.text.deleted;
3223 } else if version.observed_any(&summary.min_insertion_version) {
3224 *self = Self::Invalid;
3225 }
3226 }
3227 }
3228}
3229
3230impl sum_tree::SeekTarget<'_, FragmentSummary, Self> for VersionedFullOffset {
3231 fn cmp(&self, cursor_position: &Self, _: &Option<clock::Global>) -> cmp::Ordering {
3232 match (self, cursor_position) {
3233 (Self::Offset(a), Self::Offset(b)) => Ord::cmp(a, b),
3234 (Self::Offset(_), Self::Invalid) => cmp::Ordering::Less,
3235 (Self::Invalid, _) => unreachable!(),
3236 }
3237 }
3238}
3239
3240impl Operation {
3241 fn replica_id(&self) -> ReplicaId {
3242 operation_queue::Operation::lamport_timestamp(self).replica_id
3243 }
3244
3245 pub fn timestamp(&self) -> clock::Lamport {
3246 match self {
3247 Operation::Edit(edit) => edit.timestamp,
3248 Operation::Undo(undo) => undo.timestamp,
3249 }
3250 }
3251
3252 pub fn as_edit(&self) -> Option<&EditOperation> {
3253 match self {
3254 Operation::Edit(edit) => Some(edit),
3255 _ => None,
3256 }
3257 }
3258
3259 pub fn is_edit(&self) -> bool {
3260 matches!(self, Operation::Edit { .. })
3261 }
3262}
3263
3264impl operation_queue::Operation for Operation {
3265 fn lamport_timestamp(&self) -> clock::Lamport {
3266 match self {
3267 Operation::Edit(edit) => edit.timestamp,
3268 Operation::Undo(undo) => undo.timestamp,
3269 }
3270 }
3271}
3272
3273pub trait ToOffset {
3274 fn to_offset(&self, snapshot: &BufferSnapshot) -> usize;
3275 /// Turns this point into the next offset in the buffer that comes after this, respecting utf8 boundaries.
3276 fn to_next_offset(&self, snapshot: &BufferSnapshot) -> usize {
3277 snapshot
3278 .visible_text
3279 .ceil_char_boundary(self.to_offset(snapshot) + 1)
3280 }
3281 /// Turns this point into the previous offset in the buffer that comes before this, respecting utf8 boundaries.
3282 fn to_previous_offset(&self, snapshot: &BufferSnapshot) -> usize {
3283 snapshot
3284 .visible_text
3285 .floor_char_boundary(self.to_offset(snapshot).saturating_sub(1))
3286 }
3287}
3288
3289impl ToOffset for Point {
3290 #[inline]
3291 fn to_offset(&self, snapshot: &BufferSnapshot) -> usize {
3292 snapshot.point_to_offset(*self)
3293 }
3294}
3295
3296impl ToOffset for usize {
3297 #[track_caller]
3298 fn to_offset(&self, snapshot: &BufferSnapshot) -> usize {
3299 if !snapshot
3300 .as_rope()
3301 .assert_char_boundary::<{ cfg!(debug_assertions) }>(*self)
3302 {
3303 snapshot.as_rope().floor_char_boundary(*self)
3304 } else {
3305 *self
3306 }
3307 }
3308}
3309
3310impl ToOffset for Anchor {
3311 #[inline]
3312 fn to_offset(&self, snapshot: &BufferSnapshot) -> usize {
3313 snapshot.summary_for_anchor(self)
3314 }
3315}
3316
3317impl<T: ToOffset> ToOffset for &T {
3318 #[inline]
3319 fn to_offset(&self, content: &BufferSnapshot) -> usize {
3320 (*self).to_offset(content)
3321 }
3322}
3323
3324impl ToOffset for PointUtf16 {
3325 #[inline]
3326 fn to_offset(&self, snapshot: &BufferSnapshot) -> usize {
3327 snapshot.point_utf16_to_offset(*self)
3328 }
3329}
3330
3331impl ToOffset for Unclipped<PointUtf16> {
3332 #[inline]
3333 fn to_offset(&self, snapshot: &BufferSnapshot) -> usize {
3334 snapshot.unclipped_point_utf16_to_offset(*self)
3335 }
3336}
3337
3338pub trait ToPoint {
3339 fn to_point(&self, snapshot: &BufferSnapshot) -> Point;
3340}
3341
3342impl ToPoint for Anchor {
3343 #[inline]
3344 fn to_point(&self, snapshot: &BufferSnapshot) -> Point {
3345 snapshot.summary_for_anchor(self)
3346 }
3347}
3348
3349impl ToPoint for usize {
3350 #[inline]
3351 fn to_point(&self, snapshot: &BufferSnapshot) -> Point {
3352 snapshot.offset_to_point(*self)
3353 }
3354}
3355
3356impl ToPoint for Point {
3357 #[inline]
3358 fn to_point(&self, _: &BufferSnapshot) -> Point {
3359 *self
3360 }
3361}
3362
3363impl ToPoint for Unclipped<PointUtf16> {
3364 #[inline]
3365 fn to_point(&self, snapshot: &BufferSnapshot) -> Point {
3366 snapshot.unclipped_point_utf16_to_point(*self)
3367 }
3368}
3369
3370pub trait ToPointUtf16 {
3371 fn to_point_utf16(&self, snapshot: &BufferSnapshot) -> PointUtf16;
3372}
3373
3374impl ToPointUtf16 for Anchor {
3375 #[inline]
3376 fn to_point_utf16(&self, snapshot: &BufferSnapshot) -> PointUtf16 {
3377 snapshot.summary_for_anchor(self)
3378 }
3379}
3380
3381impl ToPointUtf16 for usize {
3382 #[inline]
3383 fn to_point_utf16(&self, snapshot: &BufferSnapshot) -> PointUtf16 {
3384 snapshot.offset_to_point_utf16(*self)
3385 }
3386}
3387
3388impl ToPointUtf16 for PointUtf16 {
3389 #[inline]
3390 fn to_point_utf16(&self, _: &BufferSnapshot) -> PointUtf16 {
3391 *self
3392 }
3393}
3394
3395impl ToPointUtf16 for Point {
3396 #[inline]
3397 fn to_point_utf16(&self, snapshot: &BufferSnapshot) -> PointUtf16 {
3398 snapshot.point_to_point_utf16(*self)
3399 }
3400}
3401
3402pub trait ToOffsetUtf16 {
3403 fn to_offset_utf16(&self, snapshot: &BufferSnapshot) -> OffsetUtf16;
3404}
3405
3406impl ToOffsetUtf16 for Anchor {
3407 #[inline]
3408 fn to_offset_utf16(&self, snapshot: &BufferSnapshot) -> OffsetUtf16 {
3409 snapshot.summary_for_anchor(self)
3410 }
3411}
3412
3413impl ToOffsetUtf16 for usize {
3414 #[inline]
3415 fn to_offset_utf16(&self, snapshot: &BufferSnapshot) -> OffsetUtf16 {
3416 snapshot.offset_to_offset_utf16(*self)
3417 }
3418}
3419
3420impl ToOffsetUtf16 for OffsetUtf16 {
3421 #[inline]
3422 fn to_offset_utf16(&self, _snapshot: &BufferSnapshot) -> OffsetUtf16 {
3423 *self
3424 }
3425}
3426
3427pub trait FromAnchor {
3428 fn from_anchor(anchor: &Anchor, snapshot: &BufferSnapshot) -> Self;
3429}
3430
3431impl FromAnchor for Anchor {
3432 #[inline]
3433 fn from_anchor(anchor: &Anchor, _snapshot: &BufferSnapshot) -> Self {
3434 *anchor
3435 }
3436}
3437
3438impl FromAnchor for Point {
3439 #[inline]
3440 fn from_anchor(anchor: &Anchor, snapshot: &BufferSnapshot) -> Self {
3441 snapshot.summary_for_anchor(anchor)
3442 }
3443}
3444
3445impl FromAnchor for PointUtf16 {
3446 #[inline]
3447 fn from_anchor(anchor: &Anchor, snapshot: &BufferSnapshot) -> Self {
3448 snapshot.summary_for_anchor(anchor)
3449 }
3450}
3451
3452impl FromAnchor for usize {
3453 #[inline]
3454 fn from_anchor(anchor: &Anchor, snapshot: &BufferSnapshot) -> Self {
3455 snapshot.summary_for_anchor(anchor)
3456 }
3457}
3458
3459#[derive(Clone, Copy, Debug, PartialEq)]
3460pub enum LineEnding {
3461 Unix,
3462 Windows,
3463}
3464
3465impl Default for LineEnding {
3466 fn default() -> Self {
3467 #[cfg(unix)]
3468 return Self::Unix;
3469
3470 #[cfg(not(unix))]
3471 return Self::Windows;
3472 }
3473}
3474
3475impl LineEnding {
3476 pub fn as_str(&self) -> &'static str {
3477 match self {
3478 LineEnding::Unix => "\n",
3479 LineEnding::Windows => "\r\n",
3480 }
3481 }
3482
3483 pub fn label(&self) -> &'static str {
3484 match self {
3485 LineEnding::Unix => "LF",
3486 LineEnding::Windows => "CRLF",
3487 }
3488 }
3489
3490 pub fn detect(text: &str) -> Self {
3491 let mut max_ix = cmp::min(text.len(), 1000);
3492 while !text.is_char_boundary(max_ix) {
3493 max_ix -= 1;
3494 }
3495
3496 if let Some(ix) = text[..max_ix].find(['\n']) {
3497 if ix > 0 && text.as_bytes()[ix - 1] == b'\r' {
3498 Self::Windows
3499 } else {
3500 Self::Unix
3501 }
3502 } else {
3503 Self::default()
3504 }
3505 }
3506
3507 pub fn normalize(text: &mut String) {
3508 if let Cow::Owned(replaced) = LINE_SEPARATORS_REGEX.replace_all(text, "\n") {
3509 *text = replaced;
3510 }
3511 }
3512
3513 pub fn normalize_arc(text: Arc<str>) -> Arc<str> {
3514 if let Cow::Owned(replaced) = LINE_SEPARATORS_REGEX.replace_all(&text, "\n") {
3515 replaced.into()
3516 } else {
3517 text
3518 }
3519 }
3520
3521 pub fn normalize_cow(text: Cow<str>) -> Cow<str> {
3522 if let Cow::Owned(replaced) = LINE_SEPARATORS_REGEX.replace_all(&text, "\n") {
3523 replaced.into()
3524 } else {
3525 text
3526 }
3527 }
3528}
3529
3530pub fn chunks_with_line_ending(rope: &Rope, line_ending: LineEnding) -> impl Iterator<Item = &str> {
3531 rope.chunks().flat_map(move |chunk| {
3532 let mut newline = false;
3533 let end_with_newline = chunk.ends_with('\n').then_some(line_ending.as_str());
3534 chunk
3535 .lines()
3536 .flat_map(move |line| {
3537 let ending = if newline {
3538 Some(line_ending.as_str())
3539 } else {
3540 None
3541 };
3542 newline = true;
3543 ending.into_iter().chain([line])
3544 })
3545 .chain(end_with_newline)
3546 })
3547}
3548
3549#[cfg(debug_assertions)]
3550pub mod debug {
3551 use super::*;
3552 use parking_lot::Mutex;
3553 use std::any::TypeId;
3554 use std::hash::{Hash, Hasher};
3555
3556 static GLOBAL_DEBUG_RANGES: Mutex<Option<GlobalDebugRanges>> = Mutex::new(None);
3557
3558 pub struct GlobalDebugRanges {
3559 pub ranges: Vec<DebugRange>,
3560 key_to_occurrence_index: HashMap<Key, usize>,
3561 next_occurrence_index: usize,
3562 }
3563
3564 pub struct DebugRange {
3565 key: Key,
3566 pub ranges: Vec<Range<Anchor>>,
3567 pub value: Arc<str>,
3568 pub occurrence_index: usize,
3569 }
3570
3571 #[derive(Debug, Clone, PartialEq, Eq, Hash)]
3572 struct Key {
3573 type_id: TypeId,
3574 hash: u64,
3575 }
3576
3577 impl GlobalDebugRanges {
3578 pub fn with_locked<R>(f: impl FnOnce(&mut Self) -> R) -> R {
3579 let mut state = GLOBAL_DEBUG_RANGES.lock();
3580 if state.is_none() {
3581 *state = Some(GlobalDebugRanges {
3582 ranges: Vec::new(),
3583 key_to_occurrence_index: HashMap::default(),
3584 next_occurrence_index: 0,
3585 });
3586 }
3587 if let Some(global_debug_ranges) = state.as_mut() {
3588 f(global_debug_ranges)
3589 } else {
3590 unreachable!()
3591 }
3592 }
3593
3594 pub fn insert<K: Hash + 'static>(
3595 &mut self,
3596 key: &K,
3597 ranges: Vec<Range<Anchor>>,
3598 value: Arc<str>,
3599 ) {
3600 let occurrence_index = *self
3601 .key_to_occurrence_index
3602 .entry(Key::new(key))
3603 .or_insert_with(|| {
3604 let occurrence_index = self.next_occurrence_index;
3605 self.next_occurrence_index += 1;
3606 occurrence_index
3607 });
3608 let key = Key::new(key);
3609 let existing = self
3610 .ranges
3611 .iter()
3612 .enumerate()
3613 .rfind(|(_, existing)| existing.key == key);
3614 if let Some((existing_ix, _)) = existing {
3615 self.ranges.remove(existing_ix);
3616 }
3617 self.ranges.push(DebugRange {
3618 ranges,
3619 key,
3620 value,
3621 occurrence_index,
3622 });
3623 }
3624
3625 pub fn remove<K: Hash + 'static>(&mut self, key: &K) {
3626 self.remove_impl(&Key::new(key));
3627 }
3628
3629 fn remove_impl(&mut self, key: &Key) {
3630 let existing = self
3631 .ranges
3632 .iter()
3633 .enumerate()
3634 .rfind(|(_, existing)| &existing.key == key);
3635 if let Some((existing_ix, _)) = existing {
3636 self.ranges.remove(existing_ix);
3637 }
3638 }
3639
3640 pub fn remove_all_with_key_type<K: 'static>(&mut self) {
3641 self.ranges
3642 .retain(|item| item.key.type_id != TypeId::of::<K>());
3643 }
3644 }
3645
3646 impl Key {
3647 fn new<K: Hash + 'static>(key: &K) -> Self {
3648 let type_id = TypeId::of::<K>();
3649 let mut hasher = collections::FxHasher::default();
3650 key.hash(&mut hasher);
3651 Key {
3652 type_id,
3653 hash: hasher.finish(),
3654 }
3655 }
3656 }
3657
3658 pub trait ToDebugRanges {
3659 fn to_debug_ranges(&self, snapshot: &BufferSnapshot) -> Vec<Range<usize>>;
3660 }
3661
3662 impl<T: ToOffset> ToDebugRanges for T {
3663 fn to_debug_ranges(&self, snapshot: &BufferSnapshot) -> Vec<Range<usize>> {
3664 [self.to_offset(snapshot)].to_debug_ranges(snapshot)
3665 }
3666 }
3667
3668 impl<T: ToOffset + Clone> ToDebugRanges for Range<T> {
3669 fn to_debug_ranges(&self, snapshot: &BufferSnapshot) -> Vec<Range<usize>> {
3670 [self.clone()].to_debug_ranges(snapshot)
3671 }
3672 }
3673
3674 impl<T: ToOffset> ToDebugRanges for Vec<T> {
3675 fn to_debug_ranges(&self, snapshot: &BufferSnapshot) -> Vec<Range<usize>> {
3676 self.as_slice().to_debug_ranges(snapshot)
3677 }
3678 }
3679
3680 impl<T: ToOffset> ToDebugRanges for Vec<Range<T>> {
3681 fn to_debug_ranges(&self, snapshot: &BufferSnapshot) -> Vec<Range<usize>> {
3682 self.as_slice().to_debug_ranges(snapshot)
3683 }
3684 }
3685
3686 impl<T: ToOffset> ToDebugRanges for [T] {
3687 fn to_debug_ranges(&self, snapshot: &BufferSnapshot) -> Vec<Range<usize>> {
3688 self.iter()
3689 .map(|item| {
3690 let offset = item.to_offset(snapshot);
3691 offset..offset
3692 })
3693 .collect()
3694 }
3695 }
3696
3697 impl<T: ToOffset> ToDebugRanges for [Range<T>] {
3698 fn to_debug_ranges(&self, snapshot: &BufferSnapshot) -> Vec<Range<usize>> {
3699 self.iter()
3700 .map(|range| range.start.to_offset(snapshot)..range.end.to_offset(snapshot))
3701 .collect()
3702 }
3703 }
3704}