1mod anchor;
2pub mod locator;
3#[cfg(any(test, feature = "test-support"))]
4pub mod network;
5pub mod operation_queue;
6mod patch;
7mod selection;
8pub mod subscription;
9#[cfg(test)]
10mod tests;
11mod undo_map;
12
13pub use anchor::*;
14use anyhow::{Context as _, Result};
15use clock::Lamport;
16pub use clock::ReplicaId;
17use collections::{HashMap, HashSet};
18use locator::Locator;
19use operation_queue::OperationQueue;
20pub use patch::Patch;
21use postage::{oneshot, prelude::*};
22
23use regex::Regex;
24pub use rope::*;
25pub use selection::*;
26use smallvec::SmallVec;
27use std::{
28 borrow::Cow,
29 cmp::{self, Ordering, Reverse},
30 fmt::Display,
31 future::Future,
32 iter::Iterator,
33 num::NonZeroU64,
34 ops::{self, Deref, Range, Sub},
35 str,
36 sync::{Arc, LazyLock},
37 time::{Duration, Instant},
38};
39pub use subscription::*;
40pub use sum_tree::Bias;
41use sum_tree::{Dimensions, FilterCursor, SumTree, TreeMap, TreeSet};
42use undo_map::UndoMap;
43use util::debug_panic;
44
45#[cfg(any(test, feature = "test-support"))]
46use util::RandomCharIter;
47
48static LINE_SEPARATORS_REGEX: LazyLock<Regex> =
49 LazyLock::new(|| Regex::new(r"\r\n|\r").expect("Failed to create LINE_SEPARATORS_REGEX"));
50
51/// The maximum length of a single insertion operation.
52/// Fragments larger than this will be split into multiple smaller
53/// fragments. This allows us to use relative `u32` offsets instead of `usize`,
54/// reducing memory usage.
55const MAX_INSERTION_LEN: usize = if cfg!(test) { 16 } else { u32::MAX as usize };
56
57pub type TransactionId = clock::Lamport;
58
59pub struct Buffer {
60 snapshot: BufferSnapshot,
61 history: History,
62 deferred_ops: OperationQueue<Operation>,
63 deferred_replicas: HashSet<ReplicaId>,
64 pub lamport_clock: clock::Lamport,
65 subscriptions: Topic<usize>,
66 edit_id_resolvers: HashMap<clock::Lamport, Vec<oneshot::Sender<()>>>,
67 wait_for_version_txs: Vec<(clock::Global, oneshot::Sender<()>)>,
68}
69
70#[repr(transparent)]
71#[derive(Clone, Copy, Debug, Hash, PartialEq, PartialOrd, Ord, Eq)]
72pub struct BufferId(NonZeroU64);
73
74impl Display for BufferId {
75 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
76 write!(f, "{}", self.0)
77 }
78}
79
80impl From<NonZeroU64> for BufferId {
81 fn from(id: NonZeroU64) -> Self {
82 BufferId(id)
83 }
84}
85
86impl BufferId {
87 /// Returns Err if `id` is outside of BufferId domain.
88 pub fn new(id: u64) -> anyhow::Result<Self> {
89 let id = NonZeroU64::new(id).context("Buffer id cannot be 0.")?;
90 Ok(Self(id))
91 }
92
93 /// Increments this buffer id, returning the old value.
94 /// So that's a post-increment operator in disguise.
95 pub fn next(&mut self) -> Self {
96 let old = *self;
97 self.0 = self.0.saturating_add(1);
98 old
99 }
100
101 pub fn to_proto(self) -> u64 {
102 self.into()
103 }
104}
105
106impl From<BufferId> for u64 {
107 fn from(id: BufferId) -> Self {
108 id.0.get()
109 }
110}
111
112#[derive(Clone)]
113pub struct BufferSnapshot {
114 visible_text: Rope,
115 deleted_text: Rope,
116 fragments: SumTree<Fragment>,
117 insertions: SumTree<InsertionFragment>,
118 insertion_slices: TreeSet<InsertionSlice>,
119 undo_map: UndoMap,
120 pub version: clock::Global,
121 remote_id: BufferId,
122 replica_id: ReplicaId,
123 line_ending: LineEnding,
124}
125
126#[derive(Clone, Debug)]
127pub struct HistoryEntry {
128 transaction: Transaction,
129 first_edit_at: Instant,
130 last_edit_at: Instant,
131 suppress_grouping: bool,
132}
133
134#[derive(Clone, Debug)]
135pub struct Transaction {
136 pub id: TransactionId,
137 pub edit_ids: Vec<clock::Lamport>,
138 pub start: clock::Global,
139}
140
141impl Transaction {
142 pub fn merge_in(&mut self, other: Transaction) {
143 self.edit_ids.extend(other.edit_ids);
144 }
145}
146
147impl HistoryEntry {
148 pub fn transaction_id(&self) -> TransactionId {
149 self.transaction.id
150 }
151}
152
153struct History {
154 base_text: Rope,
155 operations: TreeMap<clock::Lamport, Operation>,
156 undo_stack: Vec<HistoryEntry>,
157 redo_stack: Vec<HistoryEntry>,
158 transaction_depth: usize,
159 group_interval: Duration,
160}
161
162#[derive(Clone, Debug, Eq, PartialEq)]
163struct InsertionSlice {
164 // Inline the lamports to allow the replica ids to share the same alignment
165 // saving 4 bytes space edit_id: clock::Lamport,
166 edit_id_value: clock::Seq,
167 edit_id_replica_id: ReplicaId,
168 // insertion_id: clock::Lamport,
169 insertion_id_value: clock::Seq,
170 insertion_id_replica_id: ReplicaId,
171 range: Range<u32>,
172}
173
174impl Ord for InsertionSlice {
175 fn cmp(&self, other: &Self) -> Ordering {
176 Lamport {
177 value: self.edit_id_value,
178 replica_id: self.edit_id_replica_id,
179 }
180 .cmp(&Lamport {
181 value: other.edit_id_value,
182 replica_id: other.edit_id_replica_id,
183 })
184 .then_with(|| {
185 Lamport {
186 value: self.insertion_id_value,
187 replica_id: self.insertion_id_replica_id,
188 }
189 .cmp(&Lamport {
190 value: other.insertion_id_value,
191 replica_id: other.insertion_id_replica_id,
192 })
193 })
194 .then_with(|| self.range.start.cmp(&other.range.start))
195 .then_with(|| self.range.end.cmp(&other.range.end))
196 }
197}
198
199impl PartialOrd for InsertionSlice {
200 fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
201 Some(self.cmp(other))
202 }
203}
204
205impl InsertionSlice {
206 fn from_fragment(edit_id: clock::Lamport, fragment: &Fragment) -> Self {
207 Self {
208 edit_id_value: edit_id.value,
209 edit_id_replica_id: edit_id.replica_id,
210 insertion_id_value: fragment.timestamp.value,
211 insertion_id_replica_id: fragment.timestamp.replica_id,
212 range: fragment.insertion_offset..fragment.insertion_offset + fragment.len,
213 }
214 }
215}
216
217impl History {
218 pub fn new(base_text: Rope) -> Self {
219 Self {
220 base_text,
221 operations: Default::default(),
222 undo_stack: Vec::new(),
223 redo_stack: Vec::new(),
224 transaction_depth: 0,
225 // Don't group transactions in tests unless we opt in, because it's a footgun.
226 group_interval: if cfg!(any(test, feature = "test-support")) {
227 Duration::ZERO
228 } else {
229 Duration::from_millis(300)
230 },
231 }
232 }
233
234 fn push(&mut self, op: Operation) {
235 self.operations.insert(op.timestamp(), op);
236 }
237
238 fn start_transaction(
239 &mut self,
240 start: clock::Global,
241 now: Instant,
242 clock: &mut clock::Lamport,
243 ) -> Option<TransactionId> {
244 self.transaction_depth += 1;
245 if self.transaction_depth == 1 {
246 let id = clock.tick();
247 self.undo_stack.push(HistoryEntry {
248 transaction: Transaction {
249 id,
250 start,
251 edit_ids: Default::default(),
252 },
253 first_edit_at: now,
254 last_edit_at: now,
255 suppress_grouping: false,
256 });
257 Some(id)
258 } else {
259 None
260 }
261 }
262
263 fn end_transaction(&mut self, now: Instant) -> Option<&HistoryEntry> {
264 assert_ne!(self.transaction_depth, 0);
265 self.transaction_depth -= 1;
266 if self.transaction_depth == 0 {
267 if self
268 .undo_stack
269 .last()
270 .unwrap()
271 .transaction
272 .edit_ids
273 .is_empty()
274 {
275 self.undo_stack.pop();
276 None
277 } else {
278 self.redo_stack.clear();
279 let entry = self.undo_stack.last_mut().unwrap();
280 entry.last_edit_at = now;
281 Some(entry)
282 }
283 } else {
284 None
285 }
286 }
287
288 fn group(&mut self) -> Option<TransactionId> {
289 let mut count = 0;
290 let mut entries = self.undo_stack.iter();
291 if let Some(mut entry) = entries.next_back() {
292 while let Some(prev_entry) = entries.next_back() {
293 if !prev_entry.suppress_grouping
294 && entry.first_edit_at - prev_entry.last_edit_at < self.group_interval
295 {
296 entry = prev_entry;
297 count += 1;
298 } else {
299 break;
300 }
301 }
302 }
303 self.group_trailing(count)
304 }
305
306 fn group_until(&mut self, transaction_id: TransactionId) {
307 let mut count = 0;
308 for entry in self.undo_stack.iter().rev() {
309 if entry.transaction_id() == transaction_id {
310 self.group_trailing(count);
311 break;
312 } else if entry.suppress_grouping {
313 break;
314 } else {
315 count += 1;
316 }
317 }
318 }
319
320 fn group_trailing(&mut self, n: usize) -> Option<TransactionId> {
321 let new_len = self.undo_stack.len() - n;
322 let (entries_to_keep, entries_to_merge) = self.undo_stack.split_at_mut(new_len);
323 if let Some(last_entry) = entries_to_keep.last_mut() {
324 for entry in &*entries_to_merge {
325 for edit_id in &entry.transaction.edit_ids {
326 last_entry.transaction.edit_ids.push(*edit_id);
327 }
328 }
329
330 if let Some(entry) = entries_to_merge.last_mut() {
331 last_entry.last_edit_at = entry.last_edit_at;
332 }
333 }
334
335 self.undo_stack.truncate(new_len);
336 self.undo_stack.last().map(|e| e.transaction.id)
337 }
338
339 fn finalize_last_transaction(&mut self) -> Option<&Transaction> {
340 self.undo_stack.last_mut().map(|entry| {
341 entry.transaction.edit_ids.shrink_to_fit();
342 entry.suppress_grouping = true;
343 &entry.transaction
344 })
345 }
346
347 fn push_transaction(&mut self, transaction: Transaction, now: Instant) {
348 assert_eq!(self.transaction_depth, 0);
349 self.undo_stack.push(HistoryEntry {
350 transaction,
351 first_edit_at: now,
352 last_edit_at: now,
353 suppress_grouping: false,
354 });
355 }
356
357 /// Differs from `push_transaction` in that it does not clear the redo
358 /// stack. Intended to be used to create a parent transaction to merge
359 /// potential child transactions into.
360 ///
361 /// The caller is responsible for removing it from the undo history using
362 /// `forget_transaction` if no edits are merged into it. Otherwise, if edits
363 /// are merged into this transaction, the caller is responsible for ensuring
364 /// the redo stack is cleared. The easiest way to ensure the redo stack is
365 /// cleared is to create transactions with the usual `start_transaction` and
366 /// `end_transaction` methods and merging the resulting transactions into
367 /// the transaction created by this method
368 fn push_empty_transaction(
369 &mut self,
370 start: clock::Global,
371 now: Instant,
372 clock: &mut clock::Lamport,
373 ) -> TransactionId {
374 assert_eq!(self.transaction_depth, 0);
375 let id = clock.tick();
376 let transaction = Transaction {
377 id,
378 start,
379 edit_ids: Vec::new(),
380 };
381 self.undo_stack.push(HistoryEntry {
382 transaction,
383 first_edit_at: now,
384 last_edit_at: now,
385 suppress_grouping: false,
386 });
387 id
388 }
389
390 fn push_undo(&mut self, op_id: clock::Lamport) {
391 assert_ne!(self.transaction_depth, 0);
392 if let Some(Operation::Edit(_)) = self.operations.get(&op_id) {
393 let last_transaction = self.undo_stack.last_mut().unwrap();
394 last_transaction.transaction.edit_ids.push(op_id);
395 }
396 }
397
398 fn pop_undo(&mut self) -> Option<&HistoryEntry> {
399 assert_eq!(self.transaction_depth, 0);
400 if let Some(entry) = self.undo_stack.pop() {
401 self.redo_stack.push(entry);
402 self.redo_stack.last()
403 } else {
404 None
405 }
406 }
407
408 fn remove_from_undo(&mut self, transaction_id: TransactionId) -> Option<&HistoryEntry> {
409 assert_eq!(self.transaction_depth, 0);
410
411 let entry_ix = self
412 .undo_stack
413 .iter()
414 .rposition(|entry| entry.transaction.id == transaction_id)?;
415 let entry = self.undo_stack.remove(entry_ix);
416 self.redo_stack.push(entry);
417 self.redo_stack.last()
418 }
419
420 fn remove_from_undo_until(&mut self, transaction_id: TransactionId) -> &[HistoryEntry] {
421 assert_eq!(self.transaction_depth, 0);
422
423 let redo_stack_start_len = self.redo_stack.len();
424 if let Some(entry_ix) = self
425 .undo_stack
426 .iter()
427 .rposition(|entry| entry.transaction.id == transaction_id)
428 {
429 self.redo_stack
430 .extend(self.undo_stack.drain(entry_ix..).rev());
431 }
432 &self.redo_stack[redo_stack_start_len..]
433 }
434
435 fn forget(&mut self, transaction_id: TransactionId) -> Option<Transaction> {
436 assert_eq!(self.transaction_depth, 0);
437 if let Some(entry_ix) = self
438 .undo_stack
439 .iter()
440 .rposition(|entry| entry.transaction.id == transaction_id)
441 {
442 Some(self.undo_stack.remove(entry_ix).transaction)
443 } else if let Some(entry_ix) = self
444 .redo_stack
445 .iter()
446 .rposition(|entry| entry.transaction.id == transaction_id)
447 {
448 Some(self.redo_stack.remove(entry_ix).transaction)
449 } else {
450 None
451 }
452 }
453
454 fn transaction(&self, transaction_id: TransactionId) -> Option<&Transaction> {
455 let entry = self
456 .undo_stack
457 .iter()
458 .rfind(|entry| entry.transaction.id == transaction_id)
459 .or_else(|| {
460 self.redo_stack
461 .iter()
462 .rfind(|entry| entry.transaction.id == transaction_id)
463 })?;
464 Some(&entry.transaction)
465 }
466
467 fn transaction_mut(&mut self, transaction_id: TransactionId) -> Option<&mut Transaction> {
468 let entry = self
469 .undo_stack
470 .iter_mut()
471 .rfind(|entry| entry.transaction.id == transaction_id)
472 .or_else(|| {
473 self.redo_stack
474 .iter_mut()
475 .rfind(|entry| entry.transaction.id == transaction_id)
476 })?;
477 Some(&mut entry.transaction)
478 }
479
480 fn merge_transactions(&mut self, transaction: TransactionId, destination: TransactionId) {
481 if let Some(transaction) = self.forget(transaction)
482 && let Some(destination) = self.transaction_mut(destination)
483 {
484 destination.edit_ids.extend(transaction.edit_ids);
485 }
486 }
487
488 fn pop_redo(&mut self) -> Option<&HistoryEntry> {
489 assert_eq!(self.transaction_depth, 0);
490 if let Some(entry) = self.redo_stack.pop() {
491 self.undo_stack.push(entry);
492 self.undo_stack.last()
493 } else {
494 None
495 }
496 }
497
498 fn remove_from_redo(&mut self, transaction_id: TransactionId) -> &[HistoryEntry] {
499 assert_eq!(self.transaction_depth, 0);
500
501 let undo_stack_start_len = self.undo_stack.len();
502 if let Some(entry_ix) = self
503 .redo_stack
504 .iter()
505 .rposition(|entry| entry.transaction.id == transaction_id)
506 {
507 self.undo_stack
508 .extend(self.redo_stack.drain(entry_ix..).rev());
509 }
510 &self.undo_stack[undo_stack_start_len..]
511 }
512}
513
514struct Edits<'a, D: TextDimension, F: FnMut(&FragmentSummary) -> bool> {
515 visible_cursor: rope::Cursor<'a>,
516 deleted_cursor: rope::Cursor<'a>,
517 fragments_cursor: Option<FilterCursor<'a, 'static, F, Fragment, FragmentTextSummary>>,
518 undos: &'a UndoMap,
519 since: &'a clock::Global,
520 old_end: D,
521 new_end: D,
522 range: Range<(&'a Locator, u32)>,
523 buffer_id: BufferId,
524}
525
526#[derive(Clone, Debug, Default, Eq, PartialEq)]
527pub struct Edit<D> {
528 pub old: Range<D>,
529 pub new: Range<D>,
530}
531impl<D> Edit<D>
532where
533 D: PartialEq,
534{
535 pub fn is_empty(&self) -> bool {
536 self.old.start == self.old.end && self.new.start == self.new.end
537 }
538}
539
540impl<D, DDelta> Edit<D>
541where
542 D: Sub<D, Output = DDelta> + Copy,
543{
544 pub fn old_len(&self) -> DDelta {
545 self.old.end - self.old.start
546 }
547
548 pub fn new_len(&self) -> DDelta {
549 self.new.end - self.new.start
550 }
551}
552
553impl<D1, D2> Edit<(D1, D2)> {
554 pub fn flatten(self) -> (Edit<D1>, Edit<D2>) {
555 (
556 Edit {
557 old: self.old.start.0..self.old.end.0,
558 new: self.new.start.0..self.new.end.0,
559 },
560 Edit {
561 old: self.old.start.1..self.old.end.1,
562 new: self.new.start.1..self.new.end.1,
563 },
564 )
565 }
566}
567
568#[derive(Eq, PartialEq, Clone, Debug)]
569struct Fragment {
570 id: Locator,
571 timestamp: clock::Lamport,
572 insertion_offset: u32,
573 len: u32,
574 visible: bool,
575 deletions: SmallVec<[clock::Lamport; 2]>,
576 max_undos: clock::Global,
577}
578
579#[derive(Eq, PartialEq, Clone, Debug)]
580struct FragmentSummary {
581 text: FragmentTextSummary,
582 max_id: Locator,
583 max_version: clock::Global,
584 min_insertion_version: clock::Global,
585 max_insertion_version: clock::Global,
586}
587
588#[derive(Copy, Default, Clone, Debug, PartialEq, Eq)]
589struct FragmentTextSummary {
590 visible: usize,
591 deleted: usize,
592}
593
594impl<'a> sum_tree::Dimension<'a, FragmentSummary> for FragmentTextSummary {
595 fn zero(_: &Option<clock::Global>) -> Self {
596 Default::default()
597 }
598
599 fn add_summary(&mut self, summary: &'a FragmentSummary, _: &Option<clock::Global>) {
600 self.visible += summary.text.visible;
601 self.deleted += summary.text.deleted;
602 }
603}
604
605#[derive(Eq, PartialEq, Clone, Debug)]
606struct InsertionFragment {
607 timestamp: clock::Lamport,
608 split_offset: u32,
609 fragment_id: Locator,
610}
611
612#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord)]
613struct InsertionFragmentKey {
614 timestamp: clock::Lamport,
615 split_offset: u32,
616}
617
618#[derive(Clone, Debug, Eq, PartialEq)]
619pub enum Operation {
620 Edit(EditOperation),
621 Undo(UndoOperation),
622}
623
624#[derive(Clone, Debug, Eq, PartialEq)]
625pub struct EditOperation {
626 pub timestamp: clock::Lamport,
627 pub version: clock::Global,
628 pub ranges: Vec<Range<FullOffset>>,
629 pub new_text: Vec<Arc<str>>,
630}
631
632#[derive(Clone, Debug, Eq, PartialEq)]
633pub struct UndoOperation {
634 pub timestamp: clock::Lamport,
635 pub version: clock::Global,
636 pub counts: HashMap<clock::Lamport, u32>,
637}
638
639/// Stores information about the indentation of a line (tabs and spaces).
640#[derive(Clone, Copy, Debug, Eq, PartialEq)]
641pub struct LineIndent {
642 pub tabs: u32,
643 pub spaces: u32,
644 pub line_blank: bool,
645}
646
647impl LineIndent {
648 pub fn from_chunks(chunks: &mut Chunks) -> Self {
649 let mut tabs = 0;
650 let mut spaces = 0;
651 let mut line_blank = true;
652
653 'outer: while let Some(chunk) = chunks.peek() {
654 for ch in chunk.chars() {
655 if ch == '\t' {
656 tabs += 1;
657 } else if ch == ' ' {
658 spaces += 1;
659 } else {
660 if ch != '\n' {
661 line_blank = false;
662 }
663 break 'outer;
664 }
665 }
666
667 chunks.next();
668 }
669
670 Self {
671 tabs,
672 spaces,
673 line_blank,
674 }
675 }
676
677 /// Constructs a new `LineIndent` which only contains spaces.
678 pub fn spaces(spaces: u32) -> Self {
679 Self {
680 tabs: 0,
681 spaces,
682 line_blank: true,
683 }
684 }
685
686 /// Constructs a new `LineIndent` which only contains tabs.
687 pub fn tabs(tabs: u32) -> Self {
688 Self {
689 tabs,
690 spaces: 0,
691 line_blank: true,
692 }
693 }
694
695 /// Indicates whether the line is empty.
696 pub fn is_line_empty(&self) -> bool {
697 self.tabs == 0 && self.spaces == 0 && self.line_blank
698 }
699
700 /// Indicates whether the line is blank (contains only whitespace).
701 pub fn is_line_blank(&self) -> bool {
702 self.line_blank
703 }
704
705 /// Returns the number of indentation characters (tabs or spaces).
706 pub fn raw_len(&self) -> u32 {
707 self.tabs + self.spaces
708 }
709
710 /// Returns the number of indentation characters (tabs or spaces), taking tab size into account.
711 pub fn len(&self, tab_size: u32) -> u32 {
712 self.tabs * tab_size + self.spaces
713 }
714}
715
716impl From<&str> for LineIndent {
717 fn from(value: &str) -> Self {
718 Self::from_iter(value.chars())
719 }
720}
721
722impl FromIterator<char> for LineIndent {
723 fn from_iter<T: IntoIterator<Item = char>>(chars: T) -> Self {
724 let mut tabs = 0;
725 let mut spaces = 0;
726 let mut line_blank = true;
727 for c in chars {
728 if c == '\t' {
729 tabs += 1;
730 } else if c == ' ' {
731 spaces += 1;
732 } else {
733 if c != '\n' {
734 line_blank = false;
735 }
736 break;
737 }
738 }
739 Self {
740 tabs,
741 spaces,
742 line_blank,
743 }
744 }
745}
746
747impl Buffer {
748 pub fn new(replica_id: ReplicaId, remote_id: BufferId, base_text: impl Into<String>) -> Buffer {
749 let mut base_text = base_text.into();
750 let line_ending = LineEnding::detect(&base_text);
751 LineEnding::normalize(&mut base_text);
752 Self::new_normalized(replica_id, remote_id, line_ending, Rope::from(&*base_text))
753 }
754
755 pub fn new_normalized(
756 replica_id: ReplicaId,
757 remote_id: BufferId,
758 line_ending: LineEnding,
759 normalized: Rope,
760 ) -> Buffer {
761 let history = History::new(normalized);
762 let mut fragments = SumTree::new(&None);
763 let mut insertions = SumTree::default();
764
765 let mut lamport_clock = clock::Lamport::new(replica_id);
766 let mut version = clock::Global::new();
767
768 let visible_text = history.base_text.clone();
769 if !visible_text.is_empty() {
770 let insertion_timestamp = clock::Lamport::new(ReplicaId::LOCAL);
771 lamport_clock.observe(insertion_timestamp);
772 version.observe(insertion_timestamp);
773
774 let mut insertion_offset: u32 = 0;
775 let mut text_offset: usize = 0;
776 let mut prev_locator = Locator::min();
777
778 while text_offset < visible_text.len() {
779 let target_end = visible_text.len().min(text_offset + MAX_INSERTION_LEN);
780 let chunk_end = if target_end == visible_text.len() {
781 target_end
782 } else {
783 visible_text.floor_char_boundary(target_end)
784 };
785 let chunk_len = chunk_end - text_offset;
786
787 let fragment_id = Locator::between(&prev_locator, &Locator::max());
788 let fragment = Fragment {
789 id: fragment_id.clone(),
790 timestamp: insertion_timestamp,
791 insertion_offset,
792 len: chunk_len as u32,
793 visible: true,
794 deletions: Default::default(),
795 max_undos: Default::default(),
796 };
797 insertions.push(InsertionFragment::new(&fragment), ());
798 fragments.push(fragment, &None);
799
800 prev_locator = fragment_id;
801 insertion_offset += chunk_len as u32;
802 text_offset = chunk_end;
803 }
804 }
805
806 Buffer {
807 snapshot: BufferSnapshot {
808 replica_id,
809 remote_id,
810 visible_text,
811 deleted_text: Rope::new(),
812 line_ending,
813 fragments,
814 insertions,
815 version,
816 undo_map: Default::default(),
817 insertion_slices: Default::default(),
818 },
819 history,
820 deferred_ops: OperationQueue::new(),
821 deferred_replicas: HashSet::default(),
822 lamport_clock,
823 subscriptions: Default::default(),
824 edit_id_resolvers: Default::default(),
825 wait_for_version_txs: Default::default(),
826 }
827 }
828
829 pub fn version(&self) -> clock::Global {
830 self.version.clone()
831 }
832
833 pub fn snapshot(&self) -> &BufferSnapshot {
834 &self.snapshot
835 }
836
837 pub fn into_snapshot(self) -> BufferSnapshot {
838 self.snapshot
839 }
840
841 /// Returns a preview snapshot derived by applying one synthetic local-branch edit.
842 ///
843 /// This is intended for one-off preview workflows like syntax interpolation, not for
844 /// repeatedly mutating or composing synthetic snapshot histories.
845 pub fn snapshot_with_edits<I, S, T>(&self, edits: I) -> BufferSnapshot
846 where
847 I: IntoIterator<Item = (Range<S>, T)>,
848 S: ToOffset,
849 T: Into<Arc<str>>,
850 {
851 let mut snapshot = self.snapshot.clone();
852 let timestamp = Lamport::new(ReplicaId::LOCAL_BRANCH).tick();
853 let edits: Vec<_> = edits
854 .into_iter()
855 .map(|(range, new_text)| (range.to_offset(&snapshot), new_text.into()))
856 .collect();
857 snapshot.apply_edit_internal(edits, timestamp);
858 snapshot.version.observe(timestamp);
859 snapshot
860 }
861
862 pub fn replica_id(&self) -> ReplicaId {
863 self.lamport_clock.replica_id
864 }
865
866 pub fn remote_id(&self) -> BufferId {
867 self.remote_id
868 }
869
870 pub fn deferred_ops_len(&self) -> usize {
871 self.deferred_ops.len()
872 }
873
874 pub fn transaction_group_interval(&self) -> Duration {
875 self.history.group_interval
876 }
877
878 pub fn edit<R, I, S, T>(&mut self, edits: R) -> Operation
879 where
880 R: IntoIterator<IntoIter = I>,
881 I: ExactSizeIterator<Item = (Range<S>, T)>,
882 S: ToOffset,
883 T: Into<Arc<str>>,
884 {
885 let edits = edits
886 .into_iter()
887 .map(|(range, new_text)| (range, new_text.into()));
888
889 self.start_transaction();
890 let timestamp = self.lamport_clock.tick();
891 let operation = Operation::Edit(self.apply_local_edit(edits, timestamp));
892
893 self.history.push(operation.clone());
894 self.history.push_undo(operation.timestamp());
895 self.snapshot.version.observe(operation.timestamp());
896 self.end_transaction();
897 operation
898 }
899
900 fn apply_local_edit<S: ToOffset, T: Into<Arc<str>>>(
901 &mut self,
902 edits: impl ExactSizeIterator<Item = (Range<S>, T)>,
903 timestamp: clock::Lamport,
904 ) -> EditOperation {
905 let edits: Vec<_> = edits
906 .map(|(range, new_text)| (range.to_offset(&*self), new_text.into()))
907 .collect();
908 let (edit_op, edits_patch) = self.snapshot.apply_edit_internal(edits, timestamp);
909 self.subscriptions.publish_mut(&edits_patch);
910 edit_op
911 }
912
913 pub fn set_line_ending(&mut self, line_ending: LineEnding) {
914 self.snapshot.line_ending = line_ending;
915 }
916
917 pub fn apply_ops<I: IntoIterator<Item = Operation>>(&mut self, ops: I) {
918 let mut deferred_ops = Vec::new();
919 for op in ops {
920 self.history.push(op.clone());
921 if self.can_apply_op(&op) {
922 self.apply_op(op);
923 } else {
924 self.deferred_replicas.insert(op.replica_id());
925 deferred_ops.push(op);
926 }
927 }
928 self.deferred_ops.insert(deferred_ops);
929 self.flush_deferred_ops();
930 }
931
932 fn apply_op(&mut self, op: Operation) {
933 match op {
934 Operation::Edit(edit) => {
935 if !self.version.observed(edit.timestamp) {
936 self.apply_remote_edit(
937 &edit.version,
938 &edit.ranges,
939 &edit.new_text,
940 edit.timestamp,
941 );
942 self.snapshot.version.observe(edit.timestamp);
943 self.lamport_clock.observe(edit.timestamp);
944 self.resolve_edit(edit.timestamp);
945 }
946 }
947 Operation::Undo(undo) => {
948 if !self.version.observed(undo.timestamp) {
949 self.apply_undo(&undo);
950 self.snapshot.version.observe(undo.timestamp);
951 self.lamport_clock.observe(undo.timestamp);
952 }
953 }
954 }
955 self.wait_for_version_txs.retain_mut(|(version, tx)| {
956 if self.snapshot.version().observed_all(version) {
957 tx.try_send(()).ok();
958 false
959 } else {
960 true
961 }
962 });
963 }
964
965 fn apply_remote_edit(
966 &mut self,
967 version: &clock::Global,
968 ranges: &[Range<FullOffset>],
969 new_text: &[Arc<str>],
970 timestamp: clock::Lamport,
971 ) {
972 if ranges.is_empty() {
973 return;
974 }
975
976 let edits = ranges.iter().zip(new_text.iter());
977 let mut edits_patch = Patch::default();
978 let mut insertion_slices = Vec::new();
979 let cx = Some(version.clone());
980 let mut new_insertions = Vec::new();
981 let mut insertion_offset: u32 = 0;
982 let mut new_ropes =
983 RopeBuilder::new(self.visible_text.cursor(0), self.deleted_text.cursor(0));
984 let mut old_fragments = self
985 .fragments
986 .cursor::<Dimensions<VersionedFullOffset, usize>>(&cx);
987 let mut new_fragments =
988 old_fragments.slice(&VersionedFullOffset::Offset(ranges[0].start), Bias::Left);
989 new_ropes.append(new_fragments.summary().text);
990
991 let mut fragment_start = old_fragments.start().0.full_offset();
992 for (range, new_text) in edits {
993 let fragment_end = old_fragments.end().0.full_offset();
994
995 // If the current fragment ends before this range, then jump ahead to the first fragment
996 // that extends past the start of this range, reusing any intervening fragments.
997 if fragment_end < range.start {
998 // If the current fragment has been partially consumed, then consume the rest of it
999 // and advance to the next fragment before slicing.
1000 if fragment_start > old_fragments.start().0.full_offset() {
1001 if fragment_end > fragment_start {
1002 let mut suffix = old_fragments.item().unwrap().clone();
1003 suffix.len = (fragment_end.0 - fragment_start.0) as u32;
1004 suffix.insertion_offset +=
1005 (fragment_start - old_fragments.start().0.full_offset()) as u32;
1006 new_insertions.push(InsertionFragment::insert_new(&suffix));
1007 new_ropes.push_fragment(&suffix, suffix.visible);
1008 new_fragments.push(suffix, &None);
1009 }
1010 old_fragments.next();
1011 }
1012
1013 let slice =
1014 old_fragments.slice(&VersionedFullOffset::Offset(range.start), Bias::Left);
1015 new_ropes.append(slice.summary().text);
1016 new_fragments.append(slice, &None);
1017 fragment_start = old_fragments.start().0.full_offset();
1018 }
1019
1020 // If we are at the end of a non-concurrent fragment, advance to the next one.
1021 let fragment_end = old_fragments.end().0.full_offset();
1022 if fragment_end == range.start && fragment_end > fragment_start {
1023 let mut fragment = old_fragments.item().unwrap().clone();
1024 fragment.len = (fragment_end.0 - fragment_start.0) as u32;
1025 fragment.insertion_offset +=
1026 (fragment_start - old_fragments.start().0.full_offset()) as u32;
1027 new_insertions.push(InsertionFragment::insert_new(&fragment));
1028 new_ropes.push_fragment(&fragment, fragment.visible);
1029 new_fragments.push(fragment, &None);
1030 old_fragments.next();
1031 fragment_start = old_fragments.start().0.full_offset();
1032 }
1033
1034 // Skip over insertions that are concurrent to this edit, but have a lower lamport
1035 // timestamp.
1036 while let Some(fragment) = old_fragments.item() {
1037 if fragment_start == range.start && fragment.timestamp > timestamp {
1038 new_ropes.push_fragment(fragment, fragment.visible);
1039 new_fragments.push(fragment.clone(), &None);
1040 old_fragments.next();
1041 debug_assert_eq!(fragment_start, range.start);
1042 } else {
1043 break;
1044 }
1045 }
1046 debug_assert!(fragment_start <= range.start);
1047
1048 // Preserve any portion of the current fragment that precedes this range.
1049 if fragment_start < range.start {
1050 let mut prefix = old_fragments.item().unwrap().clone();
1051 prefix.len = (range.start.0 - fragment_start.0) as u32;
1052 prefix.insertion_offset +=
1053 (fragment_start - old_fragments.start().0.full_offset()) as u32;
1054 prefix.id = Locator::between(&new_fragments.summary().max_id, &prefix.id);
1055 new_insertions.push(InsertionFragment::insert_new(&prefix));
1056 fragment_start = range.start;
1057 new_ropes.push_fragment(&prefix, prefix.visible);
1058 new_fragments.push(prefix, &None);
1059 }
1060
1061 // Insert the new text before any existing fragments within the range.
1062 if !new_text.is_empty() {
1063 let mut old_start = old_fragments.start().1;
1064 if old_fragments.item().is_some_and(|f| f.visible) {
1065 old_start += fragment_start.0 - old_fragments.start().0.full_offset().0;
1066 }
1067 let new_start = new_fragments.summary().text.visible;
1068 let next_fragment_id = old_fragments
1069 .item()
1070 .map_or(Locator::max_ref(), |old_fragment| &old_fragment.id);
1071 push_fragments_for_insertion(
1072 new_text,
1073 timestamp,
1074 &mut insertion_offset,
1075 &mut new_fragments,
1076 &mut new_insertions,
1077 &mut insertion_slices,
1078 &mut new_ropes,
1079 next_fragment_id,
1080 timestamp,
1081 );
1082 edits_patch.push(Edit {
1083 old: old_start..old_start,
1084 new: new_start..new_start + new_text.len(),
1085 });
1086 }
1087
1088 // Advance through every fragment that intersects this range, marking the intersecting
1089 // portions as deleted.
1090 while fragment_start < range.end {
1091 let fragment = old_fragments.item().unwrap();
1092 let fragment_end = old_fragments.end().0.full_offset();
1093 let mut intersection = fragment.clone();
1094 let intersection_end = cmp::min(range.end, fragment_end);
1095 if version.observed(fragment.timestamp) {
1096 intersection.len = (intersection_end.0 - fragment_start.0) as u32;
1097 intersection.insertion_offset +=
1098 (fragment_start - old_fragments.start().0.full_offset()) as u32;
1099 intersection.id =
1100 Locator::between(&new_fragments.summary().max_id, &intersection.id);
1101 if fragment.was_visible(version, &self.undo_map) {
1102 intersection.deletions.push(timestamp);
1103 intersection.visible = false;
1104 insertion_slices
1105 .push(InsertionSlice::from_fragment(timestamp, &intersection));
1106 }
1107 }
1108 if intersection.len > 0 {
1109 if fragment.visible && !intersection.visible {
1110 let old_start = old_fragments.start().1
1111 + (fragment_start.0 - old_fragments.start().0.full_offset().0);
1112 let new_start = new_fragments.summary().text.visible;
1113 edits_patch.push(Edit {
1114 old: old_start..old_start + intersection.len as usize,
1115 new: new_start..new_start,
1116 });
1117 }
1118 new_insertions.push(InsertionFragment::insert_new(&intersection));
1119 new_ropes.push_fragment(&intersection, fragment.visible);
1120 new_fragments.push(intersection, &None);
1121 fragment_start = intersection_end;
1122 }
1123 if fragment_end <= range.end {
1124 old_fragments.next();
1125 }
1126 }
1127 }
1128
1129 // If the current fragment has been partially consumed, then consume the rest of it
1130 // and advance to the next fragment before slicing.
1131 if fragment_start > old_fragments.start().0.full_offset() {
1132 let fragment_end = old_fragments.end().0.full_offset();
1133 if fragment_end > fragment_start {
1134 let mut suffix = old_fragments.item().unwrap().clone();
1135 suffix.len = (fragment_end.0 - fragment_start.0) as u32;
1136 suffix.insertion_offset +=
1137 (fragment_start - old_fragments.start().0.full_offset()) as u32;
1138 new_insertions.push(InsertionFragment::insert_new(&suffix));
1139 new_ropes.push_fragment(&suffix, suffix.visible);
1140 new_fragments.push(suffix, &None);
1141 }
1142 old_fragments.next();
1143 }
1144
1145 let suffix = old_fragments.suffix();
1146 new_ropes.append(suffix.summary().text);
1147 new_fragments.append(suffix, &None);
1148 let (visible_text, deleted_text) = new_ropes.finish();
1149 drop(old_fragments);
1150
1151 self.snapshot.fragments = new_fragments;
1152 self.snapshot.visible_text = visible_text;
1153 self.snapshot.deleted_text = deleted_text;
1154 self.snapshot.insertions.edit(new_insertions, ());
1155 self.snapshot.insertion_slices.extend(insertion_slices);
1156 self.subscriptions.publish_mut(&edits_patch)
1157 }
1158
1159 fn fragment_ids_for_edits<'a>(
1160 &'a self,
1161 edit_ids: impl Iterator<Item = &'a clock::Lamport>,
1162 ) -> Vec<&'a Locator> {
1163 // Get all of the insertion slices changed by the given edits.
1164 let mut insertion_slices = Vec::new();
1165 for edit_id in edit_ids {
1166 let insertion_slice = InsertionSlice {
1167 edit_id_value: edit_id.value,
1168 edit_id_replica_id: edit_id.replica_id,
1169 insertion_id_value: Lamport::MIN.value,
1170 insertion_id_replica_id: Lamport::MIN.replica_id,
1171 range: 0..0,
1172 };
1173 let slices = self
1174 .snapshot
1175 .insertion_slices
1176 .iter_from(&insertion_slice)
1177 .take_while(|slice| {
1178 Lamport {
1179 value: slice.edit_id_value,
1180 replica_id: slice.edit_id_replica_id,
1181 } == *edit_id
1182 });
1183 insertion_slices.extend(slices)
1184 }
1185 insertion_slices.sort_unstable_by_key(|s| {
1186 (
1187 Lamport {
1188 value: s.insertion_id_value,
1189 replica_id: s.insertion_id_replica_id,
1190 },
1191 s.range.start,
1192 Reverse(s.range.end),
1193 )
1194 });
1195
1196 // Get all of the fragments corresponding to these insertion slices.
1197 let mut fragment_ids = Vec::new();
1198 let mut insertions_cursor = self.insertions.cursor::<InsertionFragmentKey>(());
1199 for insertion_slice in &insertion_slices {
1200 let insertion_id = Lamport {
1201 value: insertion_slice.insertion_id_value,
1202 replica_id: insertion_slice.insertion_id_replica_id,
1203 };
1204 if insertion_id != insertions_cursor.start().timestamp
1205 || insertion_slice.range.start > insertions_cursor.start().split_offset
1206 {
1207 insertions_cursor.seek_forward(
1208 &InsertionFragmentKey {
1209 timestamp: insertion_id,
1210 split_offset: insertion_slice.range.start,
1211 },
1212 Bias::Left,
1213 );
1214 }
1215 while let Some(item) = insertions_cursor.item() {
1216 if item.timestamp != insertion_id || item.split_offset >= insertion_slice.range.end
1217 {
1218 break;
1219 }
1220 fragment_ids.push(&item.fragment_id);
1221 insertions_cursor.next();
1222 }
1223 }
1224 fragment_ids.sort_unstable();
1225 fragment_ids
1226 }
1227
1228 fn apply_undo(&mut self, undo: &UndoOperation) {
1229 self.snapshot.undo_map.insert(undo);
1230
1231 let mut edits = Patch::default();
1232 let mut old_fragments = self
1233 .fragments
1234 .cursor::<Dimensions<Option<&Locator>, usize>>(&None);
1235 let mut new_fragments = SumTree::new(&None);
1236 let mut new_ropes =
1237 RopeBuilder::new(self.visible_text.cursor(0), self.deleted_text.cursor(0));
1238
1239 for fragment_id in self.fragment_ids_for_edits(undo.counts.keys()) {
1240 let preceding_fragments = old_fragments.slice(&Some(fragment_id), Bias::Left);
1241 new_ropes.append(preceding_fragments.summary().text);
1242 new_fragments.append(preceding_fragments, &None);
1243
1244 if let Some(fragment) = old_fragments.item() {
1245 let mut fragment = fragment.clone();
1246 let fragment_was_visible = fragment.visible;
1247
1248 fragment.visible = fragment.is_visible(&self.undo_map);
1249 fragment.max_undos.observe(undo.timestamp);
1250
1251 let old_start = old_fragments.start().1;
1252 let new_start = new_fragments.summary().text.visible;
1253 if fragment_was_visible && !fragment.visible {
1254 edits.push(Edit {
1255 old: old_start..old_start + fragment.len as usize,
1256 new: new_start..new_start,
1257 });
1258 } else if !fragment_was_visible && fragment.visible {
1259 edits.push(Edit {
1260 old: old_start..old_start,
1261 new: new_start..new_start + fragment.len as usize,
1262 });
1263 }
1264 new_ropes.push_fragment(&fragment, fragment_was_visible);
1265 new_fragments.push(fragment, &None);
1266
1267 old_fragments.next();
1268 }
1269 }
1270
1271 let suffix = old_fragments.suffix();
1272 new_ropes.append(suffix.summary().text);
1273 new_fragments.append(suffix, &None);
1274
1275 drop(old_fragments);
1276 let (visible_text, deleted_text) = new_ropes.finish();
1277 self.snapshot.fragments = new_fragments;
1278 self.snapshot.visible_text = visible_text;
1279 self.snapshot.deleted_text = deleted_text;
1280 self.subscriptions.publish_mut(&edits);
1281 }
1282
1283 fn flush_deferred_ops(&mut self) {
1284 self.deferred_replicas.clear();
1285 let mut deferred_ops = Vec::new();
1286 for op in self.deferred_ops.drain().iter().cloned() {
1287 if self.can_apply_op(&op) {
1288 self.apply_op(op);
1289 } else {
1290 self.deferred_replicas.insert(op.replica_id());
1291 deferred_ops.push(op);
1292 }
1293 }
1294 self.deferred_ops.insert(deferred_ops);
1295 }
1296
1297 fn can_apply_op(&self, op: &Operation) -> bool {
1298 if self.deferred_replicas.contains(&op.replica_id()) {
1299 false
1300 } else {
1301 self.version.observed_all(match op {
1302 Operation::Edit(edit) => &edit.version,
1303 Operation::Undo(undo) => &undo.version,
1304 })
1305 }
1306 }
1307
1308 pub fn has_deferred_ops(&self) -> bool {
1309 !self.deferred_ops.is_empty()
1310 }
1311
1312 pub fn peek_undo_stack(&self) -> Option<&HistoryEntry> {
1313 self.history.undo_stack.last()
1314 }
1315
1316 pub fn peek_redo_stack(&self) -> Option<&HistoryEntry> {
1317 self.history.redo_stack.last()
1318 }
1319
1320 pub fn start_transaction(&mut self) -> Option<TransactionId> {
1321 self.start_transaction_at(Instant::now())
1322 }
1323
1324 pub fn start_transaction_at(&mut self, now: Instant) -> Option<TransactionId> {
1325 self.history
1326 .start_transaction(self.version.clone(), now, &mut self.lamport_clock)
1327 }
1328
1329 pub fn end_transaction(&mut self) -> Option<(TransactionId, clock::Global)> {
1330 self.end_transaction_at(Instant::now())
1331 }
1332
1333 pub fn end_transaction_at(&mut self, now: Instant) -> Option<(TransactionId, clock::Global)> {
1334 if let Some(entry) = self.history.end_transaction(now) {
1335 let since = entry.transaction.start.clone();
1336 let id = self.history.group().unwrap();
1337 Some((id, since))
1338 } else {
1339 None
1340 }
1341 }
1342
1343 pub fn finalize_last_transaction(&mut self) -> Option<&Transaction> {
1344 self.history.finalize_last_transaction()
1345 }
1346
1347 pub fn group_until_transaction(&mut self, transaction_id: TransactionId) {
1348 self.history.group_until(transaction_id);
1349 }
1350
1351 pub fn base_text(&self) -> &Rope {
1352 &self.history.base_text
1353 }
1354
1355 pub fn operations(&self) -> &TreeMap<clock::Lamport, Operation> {
1356 &self.history.operations
1357 }
1358
1359 pub fn undo(&mut self) -> Option<(TransactionId, Operation)> {
1360 if let Some(entry) = self.history.pop_undo() {
1361 let transaction = entry.transaction.clone();
1362 let transaction_id = transaction.id;
1363 let op = self.undo_or_redo(transaction);
1364 Some((transaction_id, op))
1365 } else {
1366 None
1367 }
1368 }
1369
1370 pub fn undo_transaction(&mut self, transaction_id: TransactionId) -> Option<Operation> {
1371 let transaction = self
1372 .history
1373 .remove_from_undo(transaction_id)?
1374 .transaction
1375 .clone();
1376 Some(self.undo_or_redo(transaction))
1377 }
1378
1379 pub fn undo_to_transaction(&mut self, transaction_id: TransactionId) -> Vec<Operation> {
1380 let transactions = self
1381 .history
1382 .remove_from_undo_until(transaction_id)
1383 .iter()
1384 .map(|entry| entry.transaction.clone())
1385 .collect::<Vec<_>>();
1386
1387 transactions
1388 .into_iter()
1389 .map(|transaction| self.undo_or_redo(transaction))
1390 .collect()
1391 }
1392
1393 pub fn forget_transaction(&mut self, transaction_id: TransactionId) -> Option<Transaction> {
1394 self.history.forget(transaction_id)
1395 }
1396
1397 pub fn get_transaction(&self, transaction_id: TransactionId) -> Option<&Transaction> {
1398 self.history.transaction(transaction_id)
1399 }
1400
1401 pub fn merge_transactions(&mut self, transaction: TransactionId, destination: TransactionId) {
1402 self.history.merge_transactions(transaction, destination);
1403 }
1404
1405 pub fn redo(&mut self) -> Option<(TransactionId, Operation)> {
1406 if let Some(entry) = self.history.pop_redo() {
1407 let transaction = entry.transaction.clone();
1408 let transaction_id = transaction.id;
1409 let op = self.undo_or_redo(transaction);
1410 Some((transaction_id, op))
1411 } else {
1412 None
1413 }
1414 }
1415
1416 pub fn redo_to_transaction(&mut self, transaction_id: TransactionId) -> Vec<Operation> {
1417 let transactions = self
1418 .history
1419 .remove_from_redo(transaction_id)
1420 .iter()
1421 .map(|entry| entry.transaction.clone())
1422 .collect::<Vec<_>>();
1423
1424 transactions
1425 .into_iter()
1426 .map(|transaction| self.undo_or_redo(transaction))
1427 .collect()
1428 }
1429
1430 fn undo_or_redo(&mut self, transaction: Transaction) -> Operation {
1431 let mut counts = HashMap::default();
1432 for edit_id in transaction.edit_ids {
1433 counts.insert(edit_id, self.undo_map.undo_count(edit_id).saturating_add(1));
1434 }
1435
1436 let operation = self.undo_operations(counts);
1437 self.history.push(operation.clone());
1438 operation
1439 }
1440
1441 pub fn undo_operations(&mut self, counts: HashMap<clock::Lamport, u32>) -> Operation {
1442 let timestamp = self.lamport_clock.tick();
1443 let version = self.version();
1444 self.snapshot.version.observe(timestamp);
1445 let undo = UndoOperation {
1446 timestamp,
1447 version,
1448 counts,
1449 };
1450 self.apply_undo(&undo);
1451 Operation::Undo(undo)
1452 }
1453
1454 pub fn push_transaction(&mut self, transaction: Transaction, now: Instant) {
1455 self.history.push_transaction(transaction, now);
1456 }
1457
1458 /// Differs from `push_transaction` in that it does not clear the redo stack.
1459 /// The caller responsible for
1460 /// Differs from `push_transaction` in that it does not clear the redo
1461 /// stack. Intended to be used to create a parent transaction to merge
1462 /// potential child transactions into.
1463 ///
1464 /// The caller is responsible for removing it from the undo history using
1465 /// `forget_transaction` if no edits are merged into it. Otherwise, if edits
1466 /// are merged into this transaction, the caller is responsible for ensuring
1467 /// the redo stack is cleared. The easiest way to ensure the redo stack is
1468 /// cleared is to create transactions with the usual `start_transaction` and
1469 /// `end_transaction` methods and merging the resulting transactions into
1470 /// the transaction created by this method
1471 pub fn push_empty_transaction(&mut self, now: Instant) -> TransactionId {
1472 self.history
1473 .push_empty_transaction(self.version.clone(), now, &mut self.lamport_clock)
1474 }
1475
1476 pub fn edited_ranges_for_transaction_id<D>(
1477 &self,
1478 transaction_id: TransactionId,
1479 ) -> impl '_ + Iterator<Item = Range<D>>
1480 where
1481 D: TextDimension,
1482 {
1483 self.history
1484 .transaction(transaction_id)
1485 .into_iter()
1486 .flat_map(|transaction| self.edited_ranges_for_transaction(transaction))
1487 }
1488
1489 pub fn edited_ranges_for_edit_ids<'a, D>(
1490 &'a self,
1491 edit_ids: impl IntoIterator<Item = &'a clock::Lamport>,
1492 ) -> impl 'a + Iterator<Item = Range<D>>
1493 where
1494 D: TextDimension,
1495 {
1496 // get fragment ranges
1497 let mut cursor = self
1498 .fragments
1499 .cursor::<Dimensions<Option<&Locator>, usize>>(&None);
1500 let offset_ranges = self
1501 .fragment_ids_for_edits(edit_ids.into_iter())
1502 .into_iter()
1503 .filter_map(move |fragment_id| {
1504 cursor.seek_forward(&Some(fragment_id), Bias::Left);
1505 let fragment = cursor.item()?;
1506 let start_offset = cursor.start().1;
1507 let end_offset = start_offset
1508 + if fragment.visible {
1509 fragment.len as usize
1510 } else {
1511 0
1512 };
1513 Some(start_offset..end_offset)
1514 });
1515
1516 // combine adjacent ranges
1517 let mut prev_range: Option<Range<usize>> = None;
1518 let disjoint_ranges = offset_ranges
1519 .map(Some)
1520 .chain([None])
1521 .filter_map(move |range| {
1522 if let Some((range, prev_range)) = range.as_ref().zip(prev_range.as_mut())
1523 && prev_range.end == range.start
1524 {
1525 prev_range.end = range.end;
1526 return None;
1527 }
1528 let result = prev_range.clone();
1529 prev_range = range;
1530 result
1531 });
1532
1533 // convert to the desired text dimension.
1534 let mut position = D::zero(());
1535 let mut rope_cursor = self.visible_text.cursor(0);
1536 disjoint_ranges.map(move |range| {
1537 position.add_assign(&rope_cursor.summary(range.start));
1538 let start = position;
1539 position.add_assign(&rope_cursor.summary(range.end));
1540 let end = position;
1541 start..end
1542 })
1543 }
1544
1545 pub fn edited_ranges_for_transaction<'a, D>(
1546 &'a self,
1547 transaction: &'a Transaction,
1548 ) -> impl 'a + Iterator<Item = Range<D>>
1549 where
1550 D: TextDimension,
1551 {
1552 self.edited_ranges_for_edit_ids(&transaction.edit_ids)
1553 }
1554
1555 pub fn subscribe(&mut self) -> Subscription<usize> {
1556 self.subscriptions.subscribe()
1557 }
1558
1559 pub fn wait_for_edits<It: IntoIterator<Item = clock::Lamport>>(
1560 &mut self,
1561 edit_ids: It,
1562 ) -> impl 'static + Future<Output = Result<()>> + use<It> {
1563 let mut futures = Vec::new();
1564 for edit_id in edit_ids {
1565 if !self.version.observed(edit_id) {
1566 let (tx, rx) = oneshot::channel();
1567 self.edit_id_resolvers.entry(edit_id).or_default().push(tx);
1568 futures.push(rx);
1569 }
1570 }
1571
1572 async move {
1573 for mut future in futures {
1574 if future.recv().await.is_none() {
1575 anyhow::bail!("gave up waiting for edits");
1576 }
1577 }
1578 Ok(())
1579 }
1580 }
1581
1582 pub fn wait_for_anchors<It: IntoIterator<Item = Anchor>>(
1583 &mut self,
1584 anchors: It,
1585 ) -> impl 'static + Future<Output = Result<()>> + use<It> {
1586 let mut futures = Vec::new();
1587 for anchor in anchors {
1588 if !self.version.observed(anchor.timestamp()) && !anchor.is_max() && !anchor.is_min() {
1589 let (tx, rx) = oneshot::channel();
1590 self.edit_id_resolvers
1591 .entry(anchor.timestamp())
1592 .or_default()
1593 .push(tx);
1594 futures.push(rx);
1595 }
1596 }
1597
1598 async move {
1599 for mut future in futures {
1600 if future.recv().await.is_none() {
1601 anyhow::bail!("gave up waiting for anchors");
1602 }
1603 }
1604 Ok(())
1605 }
1606 }
1607
1608 pub fn wait_for_version(
1609 &mut self,
1610 version: clock::Global,
1611 ) -> impl Future<Output = Result<()>> + use<> {
1612 let mut rx = None;
1613 if !self.snapshot.version.observed_all(&version) {
1614 let channel = oneshot::channel();
1615 self.wait_for_version_txs.push((version, channel.0));
1616 rx = Some(channel.1);
1617 }
1618 async move {
1619 if let Some(mut rx) = rx
1620 && rx.recv().await.is_none()
1621 {
1622 anyhow::bail!("gave up waiting for version");
1623 }
1624 Ok(())
1625 }
1626 }
1627
1628 pub fn give_up_waiting(&mut self) {
1629 self.edit_id_resolvers.clear();
1630 self.wait_for_version_txs.clear();
1631 }
1632
1633 fn resolve_edit(&mut self, edit_id: clock::Lamport) {
1634 for mut tx in self
1635 .edit_id_resolvers
1636 .remove(&edit_id)
1637 .into_iter()
1638 .flatten()
1639 {
1640 tx.try_send(()).ok();
1641 }
1642 }
1643
1644 pub fn set_group_interval(&mut self, group_interval: Duration) {
1645 self.history.group_interval = group_interval;
1646 }
1647}
1648
1649#[cfg(any(test, feature = "test-support"))]
1650impl Buffer {
1651 #[track_caller]
1652 pub fn edit_via_marked_text(&mut self, marked_string: &str) {
1653 let edits = self.edits_for_marked_text(marked_string);
1654 self.edit(edits);
1655 }
1656
1657 #[track_caller]
1658 pub fn edits_for_marked_text(&self, marked_string: &str) -> Vec<(Range<usize>, String)> {
1659 let old_text = self.text();
1660 let (new_text, mut ranges) = util::test::marked_text_ranges(marked_string, false);
1661 if ranges.is_empty() {
1662 ranges.push(0..new_text.len());
1663 }
1664
1665 assert_eq!(
1666 old_text[..ranges[0].start],
1667 new_text[..ranges[0].start],
1668 "invalid edit"
1669 );
1670
1671 let mut delta = 0;
1672 let mut edits = Vec::new();
1673 let mut ranges = ranges.into_iter().peekable();
1674
1675 while let Some(inserted_range) = ranges.next() {
1676 let new_start = inserted_range.start;
1677 let old_start = (new_start as isize - delta) as usize;
1678
1679 let following_text = if let Some(next_range) = ranges.peek() {
1680 &new_text[inserted_range.end..next_range.start]
1681 } else {
1682 &new_text[inserted_range.end..]
1683 };
1684
1685 let inserted_len = inserted_range.len();
1686 let deleted_len = old_text[old_start..]
1687 .find(following_text)
1688 .expect("invalid edit");
1689
1690 let old_range = old_start..old_start + deleted_len;
1691 edits.push((old_range, new_text[inserted_range].to_string()));
1692 delta += inserted_len as isize - deleted_len as isize;
1693 }
1694
1695 assert_eq!(
1696 old_text.len() as isize + delta,
1697 new_text.len() as isize,
1698 "invalid edit"
1699 );
1700
1701 edits
1702 }
1703
1704 pub fn check_invariants(&self) {
1705 // Ensure every fragment is ordered by locator in the fragment tree and corresponds
1706 // to an insertion fragment in the insertions tree.
1707 let mut prev_fragment_id = Locator::min();
1708 for fragment in self.snapshot.fragments.items(&None) {
1709 assert!(fragment.id > prev_fragment_id);
1710 prev_fragment_id = fragment.id.clone();
1711
1712 let insertion_fragment = self
1713 .snapshot
1714 .insertions
1715 .get(
1716 &InsertionFragmentKey {
1717 timestamp: fragment.timestamp,
1718 split_offset: fragment.insertion_offset,
1719 },
1720 (),
1721 )
1722 .unwrap();
1723 assert_eq!(
1724 insertion_fragment.fragment_id, fragment.id,
1725 "fragment: {:?}\ninsertion: {:?}",
1726 fragment, insertion_fragment
1727 );
1728 }
1729
1730 let mut cursor = self.snapshot.fragments.cursor::<Option<&Locator>>(&None);
1731 for insertion_fragment in self.snapshot.insertions.cursor::<()>(()) {
1732 cursor.seek(&Some(&insertion_fragment.fragment_id), Bias::Left);
1733 let fragment = cursor.item().unwrap();
1734 assert_eq!(insertion_fragment.fragment_id, fragment.id);
1735 assert_eq!(insertion_fragment.split_offset, fragment.insertion_offset);
1736 }
1737
1738 let fragment_summary = self.snapshot.fragments.summary();
1739 assert_eq!(
1740 fragment_summary.text.visible,
1741 self.snapshot.visible_text.len()
1742 );
1743 assert_eq!(
1744 fragment_summary.text.deleted,
1745 self.snapshot.deleted_text.len()
1746 );
1747
1748 assert!(!self.text().contains("\r\n"));
1749 }
1750
1751 pub fn random_byte_range(&self, start_offset: usize, rng: &mut impl rand::Rng) -> Range<usize> {
1752 let end = self.clip_offset(rng.random_range(start_offset..=self.len()), Bias::Right);
1753 let start = self.clip_offset(rng.random_range(start_offset..=end), Bias::Right);
1754 start..end
1755 }
1756
1757 pub fn get_random_edits<T>(
1758 &self,
1759 rng: &mut T,
1760 edit_count: usize,
1761 ) -> Vec<(Range<usize>, Arc<str>)>
1762 where
1763 T: rand::Rng,
1764 {
1765 let mut edits: Vec<(Range<usize>, Arc<str>)> = Vec::new();
1766 let mut last_end = None;
1767 for _ in 0..edit_count {
1768 if last_end.is_some_and(|last_end| last_end >= self.len()) {
1769 break;
1770 }
1771 let new_start = last_end.map_or(0, |last_end| last_end + 1);
1772 let range = self.random_byte_range(new_start, rng);
1773 last_end = Some(range.end);
1774
1775 let new_text_len = rng.random_range(0..10);
1776 let new_text: String = RandomCharIter::new(&mut *rng).take(new_text_len).collect();
1777
1778 edits.push((range, new_text.into()));
1779 }
1780 edits
1781 }
1782
1783 pub fn randomly_edit<T>(
1784 &mut self,
1785 rng: &mut T,
1786 edit_count: usize,
1787 ) -> (Vec<(Range<usize>, Arc<str>)>, Operation)
1788 where
1789 T: rand::Rng,
1790 {
1791 let mut edits = self.get_random_edits(rng, edit_count);
1792 log::info!("mutating buffer {:?} with {:?}", self.replica_id, edits);
1793
1794 let op = self.edit(edits.iter().cloned());
1795 if let Operation::Edit(edit) = &op {
1796 assert_eq!(edits.len(), edit.new_text.len());
1797 for (edit, new_text) in edits.iter_mut().zip(&edit.new_text) {
1798 edit.1 = new_text.clone();
1799 }
1800 } else {
1801 unreachable!()
1802 }
1803
1804 (edits, op)
1805 }
1806
1807 pub fn randomly_undo_redo(&mut self, rng: &mut impl rand::Rng) -> Vec<Operation> {
1808 use rand::prelude::*;
1809
1810 let mut ops = Vec::new();
1811 for _ in 0..rng.random_range(1..=5) {
1812 if let Some(entry) = self.history.undo_stack.choose(rng) {
1813 let transaction = entry.transaction.clone();
1814 log::info!(
1815 "undoing buffer {:?} transaction {:?}",
1816 self.replica_id,
1817 transaction
1818 );
1819 ops.push(self.undo_or_redo(transaction));
1820 }
1821 }
1822 ops
1823 }
1824}
1825
1826fn push_fragments_for_insertion(
1827 new_text: &str,
1828 timestamp: clock::Lamport,
1829 insertion_offset: &mut u32,
1830 new_fragments: &mut SumTree<Fragment>,
1831 new_insertions: &mut Vec<sum_tree::Edit<InsertionFragment>>,
1832 insertion_slices: &mut Vec<InsertionSlice>,
1833 new_ropes: &mut RopeBuilder,
1834 next_fragment_id: &Locator,
1835 edit_timestamp: clock::Lamport,
1836) {
1837 let mut text_offset = 0;
1838 while text_offset < new_text.len() {
1839 let target_end = new_text.len().min(text_offset + MAX_INSERTION_LEN);
1840 let chunk_end = if target_end == new_text.len() {
1841 target_end
1842 } else {
1843 new_text.floor_char_boundary(target_end)
1844 };
1845 if chunk_end == text_offset {
1846 break;
1847 }
1848 let chunk_len = chunk_end - text_offset;
1849
1850 let fragment = Fragment {
1851 id: Locator::between(&new_fragments.summary().max_id, next_fragment_id),
1852 timestamp,
1853 insertion_offset: *insertion_offset,
1854 len: chunk_len as u32,
1855 deletions: Default::default(),
1856 max_undos: Default::default(),
1857 visible: true,
1858 };
1859 insertion_slices.push(InsertionSlice::from_fragment(edit_timestamp, &fragment));
1860 new_insertions.push(InsertionFragment::insert_new(&fragment));
1861 new_fragments.push(fragment, &None);
1862
1863 *insertion_offset += chunk_len as u32;
1864 text_offset = chunk_end;
1865 }
1866 new_ropes.push_str(new_text);
1867}
1868
1869impl Deref for Buffer {
1870 type Target = BufferSnapshot;
1871
1872 fn deref(&self) -> &Self::Target {
1873 &self.snapshot
1874 }
1875}
1876
1877impl BufferSnapshot {
1878 fn apply_edit_internal(
1879 &mut self,
1880 edits: Vec<(Range<usize>, Arc<str>)>,
1881 timestamp: clock::Lamport,
1882 ) -> (EditOperation, Patch<usize>) {
1883 let mut edits_patch = Patch::default();
1884 let mut edit_op = EditOperation {
1885 timestamp,
1886 version: self.version.clone(),
1887 ranges: Vec::with_capacity(edits.len()),
1888 new_text: Vec::with_capacity(edits.len()),
1889 };
1890 let mut new_insertions = Vec::new();
1891 let mut insertion_offset: u32 = 0;
1892 let mut insertion_slices = Vec::new();
1893
1894 let mut edits = edits.into_iter().peekable();
1895
1896 if edits.peek().is_none() {
1897 return (edit_op, edits_patch);
1898 }
1899
1900 let mut new_ropes =
1901 RopeBuilder::new(self.visible_text.cursor(0), self.deleted_text.cursor(0));
1902 let mut old_fragments = self.fragments.cursor::<FragmentTextSummary>(&None);
1903 let mut new_fragments = old_fragments.slice(&edits.peek().unwrap().0.start, Bias::Right);
1904 new_ropes.append(new_fragments.summary().text);
1905
1906 let mut fragment_start = old_fragments.start().visible;
1907 for (range, new_text) in edits {
1908 let new_text: Arc<str> = LineEnding::normalize_arc(new_text);
1909 let fragment_end = old_fragments.end().visible;
1910
1911 // If the current fragment ends before this range, then jump ahead to the first fragment
1912 // that extends past the start of this range, reusing any intervening fragments.
1913 if fragment_end < range.start {
1914 // If the current fragment has been partially consumed, then consume the rest of it
1915 // and advance to the next fragment before slicing.
1916 if fragment_start > old_fragments.start().visible {
1917 if fragment_end > fragment_start {
1918 let mut suffix = old_fragments.item().unwrap().clone();
1919 suffix.len = (fragment_end - fragment_start) as u32;
1920 suffix.insertion_offset +=
1921 (fragment_start - old_fragments.start().visible) as u32;
1922 new_insertions.push(InsertionFragment::insert_new(&suffix));
1923 new_ropes.push_fragment(&suffix, suffix.visible);
1924 new_fragments.push(suffix, &None);
1925 }
1926 old_fragments.next();
1927 }
1928
1929 let slice = old_fragments.slice(&range.start, Bias::Right);
1930 new_ropes.append(slice.summary().text);
1931 new_fragments.append(slice, &None);
1932 fragment_start = old_fragments.start().visible;
1933 }
1934
1935 let full_range_start = FullOffset(range.start + old_fragments.start().deleted);
1936
1937 // Preserve any portion of the current fragment that precedes this range.
1938 if fragment_start < range.start {
1939 let mut prefix = old_fragments.item().unwrap().clone();
1940 prefix.len = (range.start - fragment_start) as u32;
1941 prefix.insertion_offset += (fragment_start - old_fragments.start().visible) as u32;
1942 prefix.id = Locator::between(&new_fragments.summary().max_id, &prefix.id);
1943 new_insertions.push(InsertionFragment::insert_new(&prefix));
1944 new_ropes.push_fragment(&prefix, prefix.visible);
1945 new_fragments.push(prefix, &None);
1946 fragment_start = range.start;
1947 }
1948
1949 // Insert the new text before any existing fragments within the range.
1950 if !new_text.is_empty() {
1951 let new_start = new_fragments.summary().text.visible;
1952
1953 let next_fragment_id = old_fragments
1954 .item()
1955 .map_or(Locator::max_ref(), |old_fragment| &old_fragment.id);
1956 push_fragments_for_insertion(
1957 new_text.as_ref(),
1958 timestamp,
1959 &mut insertion_offset,
1960 &mut new_fragments,
1961 &mut new_insertions,
1962 &mut insertion_slices,
1963 &mut new_ropes,
1964 next_fragment_id,
1965 timestamp,
1966 );
1967 edits_patch.push(Edit {
1968 old: fragment_start..fragment_start,
1969 new: new_start..new_start + new_text.len(),
1970 });
1971 }
1972
1973 // Advance through every fragment that intersects this range, marking the intersecting
1974 // portions as deleted.
1975 while fragment_start < range.end {
1976 let fragment = old_fragments.item().unwrap();
1977 let fragment_end = old_fragments.end().visible;
1978 let mut intersection = fragment.clone();
1979 let intersection_end = cmp::min(range.end, fragment_end);
1980 if fragment.visible {
1981 intersection.len = (intersection_end - fragment_start) as u32;
1982 intersection.insertion_offset +=
1983 (fragment_start - old_fragments.start().visible) as u32;
1984 intersection.id =
1985 Locator::between(&new_fragments.summary().max_id, &intersection.id);
1986 intersection.deletions.push(timestamp);
1987 intersection.visible = false;
1988 }
1989 if intersection.len > 0 {
1990 if fragment.visible && !intersection.visible {
1991 let new_start = new_fragments.summary().text.visible;
1992 edits_patch.push(Edit {
1993 old: fragment_start..intersection_end,
1994 new: new_start..new_start,
1995 });
1996 insertion_slices
1997 .push(InsertionSlice::from_fragment(timestamp, &intersection));
1998 }
1999 new_insertions.push(InsertionFragment::insert_new(&intersection));
2000 new_ropes.push_fragment(&intersection, fragment.visible);
2001 new_fragments.push(intersection, &None);
2002 fragment_start = intersection_end;
2003 }
2004 if fragment_end <= range.end {
2005 old_fragments.next();
2006 }
2007 }
2008
2009 let full_range_end = FullOffset(range.end + old_fragments.start().deleted);
2010 edit_op.ranges.push(full_range_start..full_range_end);
2011 edit_op.new_text.push(new_text);
2012 }
2013
2014 // If the current fragment has been partially consumed, then consume the rest of it
2015 // and advance to the next fragment before slicing.
2016 if fragment_start > old_fragments.start().visible {
2017 let fragment_end = old_fragments.end().visible;
2018 if fragment_end > fragment_start {
2019 let mut suffix = old_fragments.item().unwrap().clone();
2020 suffix.len = (fragment_end - fragment_start) as u32;
2021 suffix.insertion_offset += (fragment_start - old_fragments.start().visible) as u32;
2022 new_insertions.push(InsertionFragment::insert_new(&suffix));
2023 new_ropes.push_fragment(&suffix, suffix.visible);
2024 new_fragments.push(suffix, &None);
2025 }
2026 old_fragments.next();
2027 }
2028
2029 let suffix = old_fragments.suffix();
2030 new_ropes.append(suffix.summary().text);
2031 new_fragments.append(suffix, &None);
2032 let (visible_text, deleted_text) = new_ropes.finish();
2033 drop(old_fragments);
2034
2035 self.fragments = new_fragments;
2036 self.insertions.edit(new_insertions, ());
2037 self.visible_text = visible_text;
2038 self.deleted_text = deleted_text;
2039 self.insertion_slices.extend(insertion_slices);
2040 (edit_op, edits_patch)
2041 }
2042
2043 pub fn as_rope(&self) -> &Rope {
2044 &self.visible_text
2045 }
2046
2047 pub fn rope_for_version(&self, version: &clock::Global) -> Rope {
2048 let mut rope = Rope::new();
2049
2050 let mut cursor = self
2051 .fragments
2052 .filter::<_, FragmentTextSummary>(&None, move |summary| {
2053 !version.observed_all(&summary.max_version)
2054 });
2055 cursor.next();
2056
2057 let mut visible_cursor = self.visible_text.cursor(0);
2058 let mut deleted_cursor = self.deleted_text.cursor(0);
2059
2060 while let Some(fragment) = cursor.item() {
2061 if cursor.start().visible > visible_cursor.offset() {
2062 let text = visible_cursor.slice(cursor.start().visible);
2063 rope.append(text);
2064 }
2065
2066 if fragment.was_visible(version, &self.undo_map) {
2067 if fragment.visible {
2068 let text = visible_cursor.slice(cursor.end().visible);
2069 rope.append(text);
2070 } else {
2071 deleted_cursor.seek_forward(cursor.start().deleted);
2072 let text = deleted_cursor.slice(cursor.end().deleted);
2073 rope.append(text);
2074 }
2075 } else if fragment.visible {
2076 visible_cursor.seek_forward(cursor.end().visible);
2077 }
2078
2079 cursor.next();
2080 }
2081
2082 if cursor.start().visible > visible_cursor.offset() {
2083 let text = visible_cursor.slice(cursor.start().visible);
2084 rope.append(text);
2085 }
2086
2087 rope
2088 }
2089
2090 pub fn remote_id(&self) -> BufferId {
2091 self.remote_id
2092 }
2093
2094 pub fn replica_id(&self) -> ReplicaId {
2095 self.replica_id
2096 }
2097
2098 pub fn row_count(&self) -> u32 {
2099 self.max_point().row + 1
2100 }
2101
2102 pub fn len(&self) -> usize {
2103 self.visible_text.len()
2104 }
2105
2106 pub fn is_empty(&self) -> bool {
2107 self.len() == 0
2108 }
2109
2110 pub fn chars(&self) -> impl Iterator<Item = char> + '_ {
2111 self.chars_at(0)
2112 }
2113
2114 pub fn chars_for_range<T: ToOffset>(&self, range: Range<T>) -> impl Iterator<Item = char> + '_ {
2115 self.text_for_range(range).flat_map(str::chars)
2116 }
2117
2118 pub fn reversed_chars_for_range<T: ToOffset>(
2119 &self,
2120 range: Range<T>,
2121 ) -> impl Iterator<Item = char> + '_ {
2122 self.reversed_chunks_in_range(range)
2123 .flat_map(|chunk| chunk.chars().rev())
2124 }
2125
2126 pub fn contains_str_at<T>(&self, position: T, needle: &str) -> bool
2127 where
2128 T: ToOffset,
2129 {
2130 let position = position.to_offset(self);
2131 position == self.clip_offset(position, Bias::Left)
2132 && self
2133 .bytes_in_range(position..self.len())
2134 .flatten()
2135 .copied()
2136 .take(needle.len())
2137 .eq(needle.bytes())
2138 }
2139
2140 pub fn common_prefix_at<T>(&self, position: T, needle: &str) -> Range<T>
2141 where
2142 T: ToOffset + TextDimension,
2143 {
2144 let offset = position.to_offset(self);
2145 let common_prefix_len = needle
2146 .char_indices()
2147 .map(|(index, _)| index)
2148 .chain([needle.len()])
2149 .take_while(|&len| len <= offset)
2150 .filter(|&len| {
2151 let left = self
2152 .chars_for_range(offset - len..offset)
2153 .flat_map(char::to_lowercase);
2154 let right = needle[..len].chars().flat_map(char::to_lowercase);
2155 left.eq(right)
2156 })
2157 .last()
2158 .unwrap_or(0);
2159 let start_offset = offset - common_prefix_len;
2160 let start = self.text_summary_for_range(0..start_offset);
2161 start..position
2162 }
2163
2164 pub fn text(&self) -> String {
2165 self.visible_text.to_string()
2166 }
2167
2168 pub fn line_ending(&self) -> LineEnding {
2169 self.line_ending
2170 }
2171
2172 pub fn deleted_text(&self) -> String {
2173 self.deleted_text.to_string()
2174 }
2175
2176 pub fn text_summary(&self) -> TextSummary {
2177 self.visible_text.summary()
2178 }
2179
2180 pub fn max_point(&self) -> Point {
2181 self.visible_text.max_point()
2182 }
2183
2184 pub fn max_point_utf16(&self) -> PointUtf16 {
2185 self.visible_text.max_point_utf16()
2186 }
2187
2188 pub fn point_to_offset(&self, point: Point) -> usize {
2189 self.visible_text.point_to_offset(point)
2190 }
2191
2192 pub fn point_to_offset_utf16(&self, point: Point) -> OffsetUtf16 {
2193 self.visible_text.point_to_offset_utf16(point)
2194 }
2195
2196 pub fn point_utf16_to_offset_utf16(&self, point: PointUtf16) -> OffsetUtf16 {
2197 self.visible_text.point_utf16_to_offset_utf16(point)
2198 }
2199
2200 pub fn point_utf16_to_offset(&self, point: PointUtf16) -> usize {
2201 self.visible_text.point_utf16_to_offset(point)
2202 }
2203
2204 pub fn unclipped_point_utf16_to_offset(&self, point: Unclipped<PointUtf16>) -> usize {
2205 self.visible_text.unclipped_point_utf16_to_offset(point)
2206 }
2207
2208 pub fn unclipped_point_utf16_to_point(&self, point: Unclipped<PointUtf16>) -> Point {
2209 self.visible_text.unclipped_point_utf16_to_point(point)
2210 }
2211
2212 pub fn offset_utf16_to_offset(&self, offset: OffsetUtf16) -> usize {
2213 self.visible_text.offset_utf16_to_offset(offset)
2214 }
2215
2216 pub fn offset_to_offset_utf16(&self, offset: usize) -> OffsetUtf16 {
2217 self.visible_text.offset_to_offset_utf16(offset)
2218 }
2219
2220 pub fn offset_to_point(&self, offset: usize) -> Point {
2221 self.visible_text.offset_to_point(offset)
2222 }
2223
2224 pub fn offset_to_point_utf16(&self, offset: usize) -> PointUtf16 {
2225 self.visible_text.offset_to_point_utf16(offset)
2226 }
2227
2228 pub fn point_to_point_utf16(&self, point: Point) -> PointUtf16 {
2229 self.visible_text.point_to_point_utf16(point)
2230 }
2231
2232 pub fn point_utf16_to_point(&self, point: PointUtf16) -> Point {
2233 self.visible_text.point_utf16_to_point(point)
2234 }
2235
2236 pub fn version(&self) -> &clock::Global {
2237 &self.version
2238 }
2239
2240 pub fn chars_at<T: ToOffset>(&self, position: T) -> impl Iterator<Item = char> + '_ {
2241 let offset = position.to_offset(self);
2242 self.visible_text.chars_at(offset)
2243 }
2244
2245 pub fn reversed_chars_at<T: ToOffset>(&self, position: T) -> impl Iterator<Item = char> + '_ {
2246 let offset = position.to_offset(self);
2247 self.visible_text.reversed_chars_at(offset)
2248 }
2249
2250 pub fn reversed_chunks_in_range<T: ToOffset>(&self, range: Range<T>) -> rope::Chunks<'_> {
2251 let range = range.start.to_offset(self)..range.end.to_offset(self);
2252 self.visible_text.reversed_chunks_in_range(range)
2253 }
2254
2255 pub fn bytes_in_range<T: ToOffset>(&self, range: Range<T>) -> rope::Bytes<'_> {
2256 let start = range.start.to_offset(self);
2257 let end = range.end.to_offset(self);
2258 self.visible_text.bytes_in_range(start..end)
2259 }
2260
2261 pub fn reversed_bytes_in_range<T: ToOffset>(&self, range: Range<T>) -> rope::Bytes<'_> {
2262 let start = range.start.to_offset(self);
2263 let end = range.end.to_offset(self);
2264 self.visible_text.reversed_bytes_in_range(start..end)
2265 }
2266
2267 pub fn text_for_range<T: ToOffset>(&self, range: Range<T>) -> Chunks<'_> {
2268 let start = range.start.to_offset(self);
2269 let end = range.end.to_offset(self);
2270 self.visible_text.chunks_in_range(start..end)
2271 }
2272
2273 pub fn line_len(&self, row: u32) -> u32 {
2274 let row_start_offset = Point::new(row, 0).to_offset(self);
2275 let row_end_offset = if row >= self.max_point().row {
2276 self.len()
2277 } else {
2278 Point::new(row + 1, 0).to_previous_offset(self)
2279 };
2280 (row_end_offset - row_start_offset) as u32
2281 }
2282
2283 /// A function to convert character offsets from e.g. user's `go.mod:22:33` input into byte-offset Point columns.
2284 pub fn point_from_external_input(&self, row: u32, characters: u32) -> Point {
2285 const MAX_BYTES_IN_UTF_8: u32 = 4;
2286
2287 let row = row.min(self.max_point().row);
2288 let start = Point::new(row, 0);
2289 let end = self.clip_point(
2290 Point::new(
2291 row,
2292 characters
2293 .saturating_mul(MAX_BYTES_IN_UTF_8)
2294 .saturating_add(1),
2295 ),
2296 Bias::Right,
2297 );
2298 let range = start..end;
2299 let mut point = range.start;
2300 let mut remaining_columns = characters;
2301
2302 for chunk in self.text_for_range(range) {
2303 for character in chunk.chars() {
2304 if remaining_columns == 0 {
2305 return point;
2306 }
2307 remaining_columns -= 1;
2308 point.column += character.len_utf8() as u32;
2309 }
2310 }
2311 point
2312 }
2313
2314 pub fn line_indents_in_row_range(
2315 &self,
2316 row_range: Range<u32>,
2317 ) -> impl Iterator<Item = (u32, LineIndent)> + '_ {
2318 let start = Point::new(row_range.start, 0).to_offset(self);
2319 let end = Point::new(row_range.end, self.line_len(row_range.end)).to_offset(self);
2320
2321 let mut chunks = self.as_rope().chunks_in_range(start..end);
2322 let mut row = row_range.start;
2323 let mut done = false;
2324 std::iter::from_fn(move || {
2325 if done {
2326 None
2327 } else {
2328 let indent = (row, LineIndent::from_chunks(&mut chunks));
2329 done = !chunks.next_line();
2330 row += 1;
2331 Some(indent)
2332 }
2333 })
2334 }
2335
2336 /// Returns the line indents in the given row range, exclusive of end row, in reversed order.
2337 pub fn reversed_line_indents_in_row_range(
2338 &self,
2339 row_range: Range<u32>,
2340 ) -> impl Iterator<Item = (u32, LineIndent)> + '_ {
2341 let start = Point::new(row_range.start, 0).to_offset(self);
2342
2343 let end_point;
2344 let end;
2345 if row_range.end > row_range.start {
2346 end_point = Point::new(row_range.end - 1, self.line_len(row_range.end - 1));
2347 end = end_point.to_offset(self);
2348 } else {
2349 end_point = Point::new(row_range.start, 0);
2350 end = start;
2351 };
2352
2353 let mut chunks = self.as_rope().chunks_in_range(start..end);
2354 // Move the cursor to the start of the last line if it's not empty.
2355 chunks.seek(end);
2356 if end_point.column > 0 {
2357 chunks.prev_line();
2358 }
2359
2360 let mut row = end_point.row;
2361 let mut done = false;
2362 std::iter::from_fn(move || {
2363 if done {
2364 None
2365 } else {
2366 let initial_offset = chunks.offset();
2367 let indent = (row, LineIndent::from_chunks(&mut chunks));
2368 if chunks.offset() > initial_offset {
2369 chunks.prev_line();
2370 }
2371 done = !chunks.prev_line();
2372 if !done {
2373 row -= 1;
2374 }
2375
2376 Some(indent)
2377 }
2378 })
2379 }
2380
2381 pub fn line_indent_for_row(&self, row: u32) -> LineIndent {
2382 LineIndent::from_iter(self.chars_at(Point::new(row, 0)))
2383 }
2384
2385 pub fn is_line_blank(&self, row: u32) -> bool {
2386 self.text_for_range(Point::new(row, 0)..Point::new(row, self.line_len(row)))
2387 .all(|chunk| chunk.matches(|c: char| !c.is_whitespace()).next().is_none())
2388 }
2389
2390 pub fn text_summary_for_range<D, O: ToOffset>(&self, range: Range<O>) -> D
2391 where
2392 D: TextDimension,
2393 {
2394 self.visible_text
2395 .cursor(range.start.to_offset(self))
2396 .summary(range.end.to_offset(self))
2397 }
2398
2399 pub fn summaries_for_anchors<'a, D, A>(&'a self, anchors: A) -> impl 'a + Iterator<Item = D>
2400 where
2401 D: 'a + TextDimension,
2402 A: 'a + IntoIterator<Item = Anchor>,
2403 {
2404 let anchors = anchors.into_iter();
2405 self.summaries_for_anchors_with_payload::<D, _, ()>(anchors.map(|a| (a, ())))
2406 .map(|d| d.0)
2407 }
2408
2409 pub fn summaries_for_anchors_with_payload<'a, D, A, T>(
2410 &'a self,
2411 anchors: A,
2412 ) -> impl 'a + Iterator<Item = (D, T)>
2413 where
2414 D: 'a + TextDimension,
2415 A: 'a + IntoIterator<Item = (Anchor, T)>,
2416 {
2417 let anchors = anchors.into_iter();
2418 let mut fragment_cursor = self
2419 .fragments
2420 .cursor::<Dimensions<Option<&Locator>, usize>>(&None);
2421 let mut text_cursor = self.visible_text.cursor(0);
2422 let mut position = D::zero(());
2423
2424 anchors.map(move |(anchor, payload)| {
2425 if anchor.is_min() {
2426 return (D::zero(()), payload);
2427 } else if anchor.is_max() {
2428 return (D::from_text_summary(&self.visible_text.summary()), payload);
2429 }
2430
2431 let Some(insertion) = self.try_find_fragment(&anchor) else {
2432 panic!(
2433 "invalid insertion for buffer {}@{:?} with anchor {:?}",
2434 self.remote_id(),
2435 self.version,
2436 anchor
2437 );
2438 };
2439 // TODO verbose debug because we are seeing is_max return false unexpectedly,
2440 // remove this once that is understood and fixed
2441 assert_eq!(
2442 insertion.timestamp,
2443 anchor.timestamp(),
2444 "invalid insertion for buffer {}@{:?}. anchor: {:?}, {:?}, {:?}, {:?}, {:?}. timestamp: {:?}, offset: {:?}, bias: {:?}",
2445 self.remote_id(),
2446 self.version,
2447 anchor.timestamp_replica_id,
2448 anchor.timestamp_value,
2449 anchor.offset,
2450 anchor.bias,
2451 anchor.buffer_id,
2452 anchor.timestamp() == clock::Lamport::MAX,
2453 anchor.offset == u32::MAX,
2454 anchor.bias == Bias::Right,
2455 );
2456
2457 fragment_cursor.seek_forward(&Some(&insertion.fragment_id), Bias::Left);
2458 let fragment = fragment_cursor.item().unwrap();
2459 let mut fragment_offset = fragment_cursor.start().1;
2460 if fragment.visible {
2461 fragment_offset += (anchor.offset - insertion.split_offset) as usize;
2462 }
2463
2464 position.add_assign(&text_cursor.summary(fragment_offset));
2465 (position, payload)
2466 })
2467 }
2468
2469 pub fn summary_for_anchor<D>(&self, anchor: &Anchor) -> D
2470 where
2471 D: TextDimension,
2472 {
2473 self.text_summary_for_range(0..self.offset_for_anchor(anchor))
2474 }
2475
2476 pub fn offset_for_anchor(&self, anchor: &Anchor) -> usize {
2477 if anchor.is_min() {
2478 0
2479 } else if anchor.is_max() {
2480 self.visible_text.len()
2481 } else {
2482 debug_assert_eq!(anchor.buffer_id, self.remote_id);
2483 debug_assert!(
2484 self.version.observed(anchor.timestamp()),
2485 "Anchor timestamp {:?} not observed by buffer {:?}",
2486 anchor.timestamp(),
2487 self.version
2488 );
2489 let item = self.try_find_fragment(anchor);
2490 let Some(insertion) =
2491 item.filter(|insertion| insertion.timestamp == anchor.timestamp())
2492 else {
2493 self.panic_bad_anchor(anchor);
2494 };
2495
2496 let (start, _, item) = self
2497 .fragments
2498 .find::<Dimensions<Option<&Locator>, usize>, _>(
2499 &None,
2500 &Some(&insertion.fragment_id),
2501 Bias::Left,
2502 );
2503 let fragment = item.unwrap();
2504 let mut fragment_offset = start.1;
2505 if fragment.visible {
2506 fragment_offset += (anchor.offset - insertion.split_offset) as usize;
2507 }
2508 fragment_offset
2509 }
2510 }
2511
2512 #[cold]
2513 fn panic_bad_anchor(&self, anchor: &Anchor) -> ! {
2514 if anchor.buffer_id != self.remote_id {
2515 panic!(
2516 "invalid anchor - buffer id does not match: anchor {anchor:?}; buffer id: {}, version: {:?}",
2517 self.remote_id, self.version
2518 );
2519 } else if !self.version.observed(anchor.timestamp()) {
2520 panic!(
2521 "invalid anchor - snapshot has not observed lamport: {:?}; version: {:?}",
2522 anchor, self.version
2523 );
2524 } else {
2525 panic!(
2526 "invalid anchor {:?}. buffer id: {}, version: {:?}",
2527 anchor, self.remote_id, self.version
2528 );
2529 }
2530 }
2531
2532 fn fragment_id_for_anchor(&self, anchor: &Anchor) -> &Locator {
2533 self.try_fragment_id_for_anchor(anchor)
2534 .unwrap_or_else(|| self.panic_bad_anchor(anchor))
2535 }
2536
2537 fn try_fragment_id_for_anchor(&self, anchor: &Anchor) -> Option<&Locator> {
2538 if anchor.is_min() {
2539 Some(Locator::min_ref())
2540 } else if anchor.is_max() {
2541 Some(Locator::max_ref())
2542 } else {
2543 let item = self.try_find_fragment(anchor);
2544 item.filter(|insertion| {
2545 !cfg!(debug_assertions) || insertion.timestamp == anchor.timestamp()
2546 })
2547 .map(|insertion| &insertion.fragment_id)
2548 }
2549 }
2550
2551 fn try_find_fragment(&self, anchor: &Anchor) -> Option<&InsertionFragment> {
2552 let anchor_key = InsertionFragmentKey {
2553 timestamp: anchor.timestamp(),
2554 split_offset: anchor.offset,
2555 };
2556 match self.insertions.find_with_prev::<InsertionFragmentKey, _>(
2557 (),
2558 &anchor_key,
2559 anchor.bias,
2560 ) {
2561 (_, _, Some((prev, insertion))) => {
2562 let comparison = sum_tree::KeyedItem::key(insertion).cmp(&anchor_key);
2563 if comparison == Ordering::Greater
2564 || (anchor.bias == Bias::Left
2565 && comparison == Ordering::Equal
2566 && anchor.offset > 0)
2567 {
2568 prev
2569 } else {
2570 Some(insertion)
2571 }
2572 }
2573 _ => self.insertions.last(),
2574 }
2575 }
2576
2577 /// Returns an anchor range for the given input position range that is anchored to the text in the range.
2578 pub fn anchor_range_inside<T: ToOffset>(&self, position: Range<T>) -> Range<Anchor> {
2579 self.anchor_after(position.start)..self.anchor_before(position.end)
2580 }
2581
2582 /// Returns an anchor range for the given input position range that is anchored to the text before and after.
2583 pub fn anchor_range_outside<T: ToOffset>(&self, position: Range<T>) -> Range<Anchor> {
2584 self.anchor_before(position.start)..self.anchor_after(position.end)
2585 }
2586
2587 /// Returns an anchor for the given input position that is anchored to the text before the position.
2588 pub fn anchor_before<T: ToOffset>(&self, position: T) -> Anchor {
2589 self.anchor_at(position, Bias::Left)
2590 }
2591
2592 /// Returns an anchor for the given input position that is anchored to the text after the position.
2593 pub fn anchor_after<T: ToOffset>(&self, position: T) -> Anchor {
2594 self.anchor_at(position, Bias::Right)
2595 }
2596
2597 pub fn anchor_at<T: ToOffset>(&self, position: T, bias: Bias) -> Anchor {
2598 self.anchor_at_offset(position.to_offset(self), bias)
2599 }
2600
2601 fn anchor_at_offset(&self, mut offset: usize, bias: Bias) -> Anchor {
2602 if bias == Bias::Left && offset == 0 {
2603 Anchor::min_for_buffer(self.remote_id)
2604 } else if bias == Bias::Right
2605 && ((!cfg!(debug_assertions) && offset >= self.len()) || offset == self.len())
2606 {
2607 Anchor::max_for_buffer(self.remote_id)
2608 } else {
2609 if !self
2610 .visible_text
2611 .assert_char_boundary::<{ cfg!(debug_assertions) }>(offset)
2612 {
2613 offset = match bias {
2614 Bias::Left => self.visible_text.floor_char_boundary(offset),
2615 Bias::Right => self.visible_text.ceil_char_boundary(offset),
2616 };
2617 }
2618 let (start, _, item) = self.fragments.find::<usize, _>(&None, &offset, bias);
2619 let Some(fragment) = item else {
2620 // We got a bad offset, likely out of bounds
2621 debug_panic!(
2622 "Failed to find fragment at offset {} (len: {})",
2623 offset,
2624 self.len()
2625 );
2626 return Anchor::max_for_buffer(self.remote_id);
2627 };
2628 let overshoot = offset - start;
2629 Anchor::new(
2630 fragment.timestamp,
2631 fragment.insertion_offset + overshoot as u32,
2632 bias,
2633 self.remote_id,
2634 )
2635 }
2636 }
2637
2638 pub fn can_resolve(&self, anchor: &Anchor) -> bool {
2639 anchor.is_min()
2640 || anchor.is_max()
2641 || (self.remote_id == anchor.buffer_id && self.version.observed(anchor.timestamp()))
2642 }
2643
2644 pub fn clip_offset(&self, offset: usize, bias: Bias) -> usize {
2645 self.visible_text.clip_offset(offset, bias)
2646 }
2647
2648 pub fn clip_point(&self, point: Point, bias: Bias) -> Point {
2649 self.visible_text.clip_point(point, bias)
2650 }
2651
2652 pub fn clip_offset_utf16(&self, offset: OffsetUtf16, bias: Bias) -> OffsetUtf16 {
2653 self.visible_text.clip_offset_utf16(offset, bias)
2654 }
2655
2656 pub fn clip_point_utf16(&self, point: Unclipped<PointUtf16>, bias: Bias) -> PointUtf16 {
2657 self.visible_text.clip_point_utf16(point, bias)
2658 }
2659
2660 pub fn edits_since<'a, D>(
2661 &'a self,
2662 since: &'a clock::Global,
2663 ) -> impl 'a + Iterator<Item = Edit<D>>
2664 where
2665 D: TextDimension + Ord,
2666 {
2667 self.edits_since_in_range(
2668 since,
2669 Anchor::min_for_buffer(self.remote_id)..Anchor::max_for_buffer(self.remote_id),
2670 )
2671 }
2672
2673 pub fn anchored_edits_since<'a, D>(
2674 &'a self,
2675 since: &'a clock::Global,
2676 ) -> impl 'a + Iterator<Item = (Edit<D>, Range<Anchor>)>
2677 where
2678 D: TextDimension + Ord,
2679 {
2680 self.anchored_edits_since_in_range(
2681 since,
2682 Anchor::min_for_buffer(self.remote_id)..Anchor::max_for_buffer(self.remote_id),
2683 )
2684 }
2685
2686 pub fn edits_since_in_range<'a, D>(
2687 &'a self,
2688 since: &'a clock::Global,
2689 range: Range<Anchor>,
2690 ) -> impl 'a + Iterator<Item = Edit<D>>
2691 where
2692 D: TextDimension + Ord,
2693 {
2694 self.anchored_edits_since_in_range(since, range)
2695 .map(|item| item.0)
2696 }
2697
2698 pub fn anchored_edits_since_in_range<'a, D>(
2699 &'a self,
2700 since: &'a clock::Global,
2701 range: Range<Anchor>,
2702 ) -> impl 'a + Iterator<Item = (Edit<D>, Range<Anchor>)>
2703 where
2704 D: TextDimension + Ord,
2705 {
2706 if *since == self.version {
2707 return None.into_iter().flatten();
2708 }
2709 let mut cursor = self.fragments.filter(&None, move |summary| {
2710 !since.observed_all(&summary.max_version)
2711 });
2712 cursor.next();
2713 let fragments_cursor = Some(cursor);
2714 let start_fragment_id = self.fragment_id_for_anchor(&range.start);
2715 let (start, _, item) = self
2716 .fragments
2717 .find::<Dimensions<Option<&Locator>, FragmentTextSummary>, _>(
2718 &None,
2719 &Some(start_fragment_id),
2720 Bias::Left,
2721 );
2722 let mut visible_start = start.1.visible;
2723 let mut deleted_start = start.1.deleted;
2724 if let Some(fragment) = item {
2725 let overshoot = (range.start.offset - fragment.insertion_offset) as usize;
2726 if fragment.visible {
2727 visible_start += overshoot;
2728 } else {
2729 deleted_start += overshoot;
2730 }
2731 }
2732 let end_fragment_id = self.fragment_id_for_anchor(&range.end);
2733
2734 Some(Edits {
2735 visible_cursor: self.visible_text.cursor(visible_start),
2736 deleted_cursor: self.deleted_text.cursor(deleted_start),
2737 fragments_cursor,
2738 undos: &self.undo_map,
2739 since,
2740 old_end: D::zero(()),
2741 new_end: D::zero(()),
2742 range: (start_fragment_id, range.start.offset)..(end_fragment_id, range.end.offset),
2743 buffer_id: self.remote_id,
2744 })
2745 .into_iter()
2746 .flatten()
2747 }
2748
2749 pub fn has_edits_since_in_range(&self, since: &clock::Global, range: Range<Anchor>) -> bool {
2750 if *since != self.version {
2751 let start_fragment_id = self.fragment_id_for_anchor(&range.start);
2752 let end_fragment_id = self.fragment_id_for_anchor(&range.end);
2753 let mut cursor = self.fragments.filter::<_, usize>(&None, move |summary| {
2754 !since.observed_all(&summary.max_version)
2755 });
2756 cursor.next();
2757 while let Some(fragment) = cursor.item() {
2758 if fragment.id > *end_fragment_id {
2759 break;
2760 }
2761 if fragment.id > *start_fragment_id {
2762 let was_visible = fragment.was_visible(since, &self.undo_map);
2763 let is_visible = fragment.visible;
2764 if was_visible != is_visible {
2765 return true;
2766 }
2767 }
2768 cursor.next();
2769 }
2770 }
2771 false
2772 }
2773
2774 pub fn has_edits_since(&self, since: &clock::Global) -> bool {
2775 if *since != self.version {
2776 let mut cursor = self.fragments.filter::<_, usize>(&None, move |summary| {
2777 !since.observed_all(&summary.max_version)
2778 });
2779 cursor.next();
2780 while let Some(fragment) = cursor.item() {
2781 let was_visible = fragment.was_visible(since, &self.undo_map);
2782 let is_visible = fragment.visible;
2783 if was_visible != is_visible {
2784 return true;
2785 }
2786 cursor.next();
2787 }
2788 }
2789 false
2790 }
2791
2792 pub fn range_to_version(&self, range: Range<usize>, version: &clock::Global) -> Range<usize> {
2793 let mut offsets = self.offsets_to_version([range.start, range.end], version);
2794 offsets.next().unwrap()..offsets.next().unwrap()
2795 }
2796
2797 /// Converts the given sequence of offsets into their corresponding offsets
2798 /// at a prior version of this buffer.
2799 pub fn offsets_to_version<'a>(
2800 &'a self,
2801 offsets: impl 'a + IntoIterator<Item = usize>,
2802 version: &'a clock::Global,
2803 ) -> impl 'a + Iterator<Item = usize> {
2804 let mut edits = self.edits_since(version).peekable();
2805 let mut last_old_end = 0;
2806 let mut last_new_end = 0;
2807 offsets.into_iter().map(move |new_offset| {
2808 while let Some(edit) = edits.peek() {
2809 if edit.new.start > new_offset {
2810 break;
2811 }
2812
2813 if edit.new.end <= new_offset {
2814 last_new_end = edit.new.end;
2815 last_old_end = edit.old.end;
2816 edits.next();
2817 continue;
2818 }
2819
2820 let overshoot = new_offset - edit.new.start;
2821 return (edit.old.start + overshoot).min(edit.old.end);
2822 }
2823
2824 last_old_end + new_offset.saturating_sub(last_new_end)
2825 })
2826 }
2827
2828 /// Visually annotates a position or range with the `Debug` representation of a value. The
2829 /// callsite of this function is used as a key - previous annotations will be removed.
2830 #[cfg(debug_assertions)]
2831 #[track_caller]
2832 pub fn debug<R, V>(&self, ranges: &R, value: V)
2833 where
2834 R: debug::ToDebugRanges,
2835 V: std::fmt::Debug,
2836 {
2837 self.debug_with_key(std::panic::Location::caller(), ranges, value);
2838 }
2839
2840 /// Visually annotates a position or range with the `Debug` representation of a value. Previous
2841 /// debug annotations with the same key will be removed. The key is also used to determine the
2842 /// annotation's color.
2843 #[cfg(debug_assertions)]
2844 pub fn debug_with_key<K, R, V>(&self, key: &K, ranges: &R, value: V)
2845 where
2846 K: std::hash::Hash + 'static,
2847 R: debug::ToDebugRanges,
2848 V: std::fmt::Debug,
2849 {
2850 let ranges = ranges
2851 .to_debug_ranges(self)
2852 .into_iter()
2853 .map(|range| self.anchor_after(range.start)..self.anchor_before(range.end))
2854 .collect();
2855 debug::GlobalDebugRanges::with_locked(|debug_ranges| {
2856 debug_ranges.insert(key, ranges, format!("{value:?}").into());
2857 });
2858 }
2859}
2860
2861struct RopeBuilder<'a> {
2862 old_visible_cursor: rope::Cursor<'a>,
2863 old_deleted_cursor: rope::Cursor<'a>,
2864 new_visible: Rope,
2865 new_deleted: Rope,
2866}
2867
2868impl<'a> RopeBuilder<'a> {
2869 fn new(old_visible_cursor: rope::Cursor<'a>, old_deleted_cursor: rope::Cursor<'a>) -> Self {
2870 Self {
2871 old_visible_cursor,
2872 old_deleted_cursor,
2873 new_visible: Rope::new(),
2874 new_deleted: Rope::new(),
2875 }
2876 }
2877
2878 fn append(&mut self, len: FragmentTextSummary) {
2879 self.push(len.visible, true, true);
2880 self.push(len.deleted, false, false);
2881 }
2882
2883 fn push_fragment(&mut self, fragment: &Fragment, was_visible: bool) {
2884 debug_assert!(fragment.len > 0);
2885 self.push(fragment.len as usize, was_visible, fragment.visible)
2886 }
2887
2888 fn push(&mut self, len: usize, was_visible: bool, is_visible: bool) {
2889 let text = if was_visible {
2890 self.old_visible_cursor
2891 .slice(self.old_visible_cursor.offset() + len)
2892 } else {
2893 self.old_deleted_cursor
2894 .slice(self.old_deleted_cursor.offset() + len)
2895 };
2896 if is_visible {
2897 self.new_visible.append(text);
2898 } else {
2899 self.new_deleted.append(text);
2900 }
2901 }
2902
2903 fn push_str(&mut self, text: &str) {
2904 self.new_visible.push(text);
2905 }
2906
2907 fn finish(mut self) -> (Rope, Rope) {
2908 self.new_visible.append(self.old_visible_cursor.suffix());
2909 self.new_deleted.append(self.old_deleted_cursor.suffix());
2910 (self.new_visible, self.new_deleted)
2911 }
2912}
2913
2914impl<D: TextDimension + Ord, F: FnMut(&FragmentSummary) -> bool> Iterator for Edits<'_, D, F> {
2915 type Item = (Edit<D>, Range<Anchor>);
2916
2917 fn next(&mut self) -> Option<Self::Item> {
2918 let mut pending_edit: Option<Self::Item> = None;
2919 let cursor = self.fragments_cursor.as_mut()?;
2920
2921 while let Some(fragment) = cursor.item() {
2922 if fragment.id < *self.range.start.0 {
2923 cursor.next();
2924 continue;
2925 } else if fragment.id > *self.range.end.0 {
2926 break;
2927 }
2928
2929 if cursor.start().visible > self.visible_cursor.offset() {
2930 let summary = self.visible_cursor.summary(cursor.start().visible);
2931 self.old_end.add_assign(&summary);
2932 self.new_end.add_assign(&summary);
2933 }
2934
2935 if pending_edit
2936 .as_ref()
2937 .is_some_and(|(change, _)| change.new.end < self.new_end)
2938 {
2939 break;
2940 }
2941
2942 let start_anchor = Anchor::new(
2943 fragment.timestamp,
2944 fragment.insertion_offset,
2945 Bias::Right,
2946 self.buffer_id,
2947 );
2948 let end_anchor = Anchor::new(
2949 fragment.timestamp,
2950 fragment.insertion_offset + fragment.len,
2951 Bias::Left,
2952 self.buffer_id,
2953 );
2954
2955 if !fragment.was_visible(self.since, self.undos) && fragment.visible {
2956 let mut visible_end = cursor.end().visible;
2957 if fragment.id == *self.range.end.0 {
2958 visible_end = cmp::min(
2959 visible_end,
2960 cursor.start().visible
2961 + (self.range.end.1 - fragment.insertion_offset) as usize,
2962 );
2963 }
2964
2965 let fragment_summary = self.visible_cursor.summary(visible_end);
2966 let mut new_end = self.new_end;
2967 new_end.add_assign(&fragment_summary);
2968 if let Some((edit, range)) = pending_edit.as_mut() {
2969 edit.new.end = new_end;
2970 range.end = end_anchor;
2971 } else {
2972 pending_edit = Some((
2973 Edit {
2974 old: self.old_end..self.old_end,
2975 new: self.new_end..new_end,
2976 },
2977 start_anchor..end_anchor,
2978 ));
2979 }
2980
2981 self.new_end = new_end;
2982 } else if fragment.was_visible(self.since, self.undos) && !fragment.visible {
2983 let mut deleted_end = cursor.end().deleted;
2984 if fragment.id == *self.range.end.0 {
2985 deleted_end = cmp::min(
2986 deleted_end,
2987 cursor.start().deleted
2988 + (self.range.end.1 - fragment.insertion_offset) as usize,
2989 );
2990 }
2991
2992 if cursor.start().deleted > self.deleted_cursor.offset() {
2993 self.deleted_cursor.seek_forward(cursor.start().deleted);
2994 }
2995 let fragment_summary = self.deleted_cursor.summary(deleted_end);
2996 let mut old_end = self.old_end;
2997 old_end.add_assign(&fragment_summary);
2998 if let Some((edit, range)) = pending_edit.as_mut() {
2999 edit.old.end = old_end;
3000 range.end = end_anchor;
3001 } else {
3002 pending_edit = Some((
3003 Edit {
3004 old: self.old_end..old_end,
3005 new: self.new_end..self.new_end,
3006 },
3007 start_anchor..end_anchor,
3008 ));
3009 }
3010
3011 self.old_end = old_end;
3012 }
3013
3014 cursor.next();
3015 }
3016
3017 pending_edit
3018 }
3019}
3020
3021impl Fragment {
3022 fn is_visible(&self, undos: &UndoMap) -> bool {
3023 !undos.is_undone(self.timestamp) && self.deletions.iter().all(|d| undos.is_undone(*d))
3024 }
3025
3026 fn was_visible(&self, version: &clock::Global, undos: &UndoMap) -> bool {
3027 (version.observed(self.timestamp) && !undos.was_undone(self.timestamp, version))
3028 && self
3029 .deletions
3030 .iter()
3031 .all(|d| !version.observed(*d) || undos.was_undone(*d, version))
3032 }
3033}
3034
3035impl sum_tree::Item for Fragment {
3036 type Summary = FragmentSummary;
3037
3038 fn summary(&self, _cx: &Option<clock::Global>) -> Self::Summary {
3039 let mut max_version = clock::Global::new();
3040 max_version.observe(self.timestamp);
3041 for deletion in &self.deletions {
3042 max_version.observe(*deletion);
3043 }
3044 max_version.join(&self.max_undos);
3045
3046 let mut min_insertion_version = clock::Global::new();
3047 min_insertion_version.observe(self.timestamp);
3048 let max_insertion_version = min_insertion_version.clone();
3049 if self.visible {
3050 FragmentSummary {
3051 max_id: self.id.clone(),
3052 text: FragmentTextSummary {
3053 visible: self.len as usize,
3054 deleted: 0,
3055 },
3056 max_version,
3057 min_insertion_version,
3058 max_insertion_version,
3059 }
3060 } else {
3061 FragmentSummary {
3062 max_id: self.id.clone(),
3063 text: FragmentTextSummary {
3064 visible: 0,
3065 deleted: self.len as usize,
3066 },
3067 max_version,
3068 min_insertion_version,
3069 max_insertion_version,
3070 }
3071 }
3072 }
3073}
3074
3075impl sum_tree::Summary for FragmentSummary {
3076 type Context<'a> = &'a Option<clock::Global>;
3077
3078 fn zero(_cx: Self::Context<'_>) -> Self {
3079 Default::default()
3080 }
3081
3082 fn add_summary(&mut self, other: &Self, _: Self::Context<'_>) {
3083 self.max_id.assign(&other.max_id);
3084 self.text.visible += &other.text.visible;
3085 self.text.deleted += &other.text.deleted;
3086 self.max_version.join(&other.max_version);
3087 self.min_insertion_version
3088 .meet(&other.min_insertion_version);
3089 self.max_insertion_version
3090 .join(&other.max_insertion_version);
3091 }
3092}
3093
3094impl Default for FragmentSummary {
3095 fn default() -> Self {
3096 FragmentSummary {
3097 max_id: Locator::min(),
3098 text: FragmentTextSummary::default(),
3099 max_version: clock::Global::new(),
3100 min_insertion_version: clock::Global::new(),
3101 max_insertion_version: clock::Global::new(),
3102 }
3103 }
3104}
3105
3106impl sum_tree::Item for InsertionFragment {
3107 type Summary = InsertionFragmentKey;
3108
3109 fn summary(&self, _cx: ()) -> Self::Summary {
3110 InsertionFragmentKey {
3111 timestamp: self.timestamp,
3112 split_offset: self.split_offset,
3113 }
3114 }
3115}
3116
3117impl sum_tree::KeyedItem for InsertionFragment {
3118 type Key = InsertionFragmentKey;
3119
3120 fn key(&self) -> Self::Key {
3121 sum_tree::Item::summary(self, ())
3122 }
3123}
3124
3125impl InsertionFragment {
3126 fn new(fragment: &Fragment) -> Self {
3127 Self {
3128 timestamp: fragment.timestamp,
3129 split_offset: fragment.insertion_offset,
3130 fragment_id: fragment.id.clone(),
3131 }
3132 }
3133
3134 fn insert_new(fragment: &Fragment) -> sum_tree::Edit<Self> {
3135 sum_tree::Edit::Insert(Self::new(fragment))
3136 }
3137}
3138
3139impl sum_tree::ContextLessSummary for InsertionFragmentKey {
3140 fn zero() -> Self {
3141 InsertionFragmentKey {
3142 timestamp: Lamport::MIN,
3143 split_offset: 0,
3144 }
3145 }
3146
3147 fn add_summary(&mut self, summary: &Self) {
3148 *self = *summary;
3149 }
3150}
3151
3152#[derive(Copy, Clone, Debug, Default, PartialEq, Eq, PartialOrd, Ord, Hash)]
3153pub struct FullOffset(pub usize);
3154
3155impl ops::AddAssign<usize> for FullOffset {
3156 fn add_assign(&mut self, rhs: usize) {
3157 self.0 += rhs;
3158 }
3159}
3160
3161impl ops::Add<usize> for FullOffset {
3162 type Output = Self;
3163
3164 fn add(mut self, rhs: usize) -> Self::Output {
3165 self += rhs;
3166 self
3167 }
3168}
3169
3170impl ops::Sub for FullOffset {
3171 type Output = usize;
3172
3173 fn sub(self, rhs: Self) -> Self::Output {
3174 self.0 - rhs.0
3175 }
3176}
3177
3178impl sum_tree::Dimension<'_, FragmentSummary> for usize {
3179 fn zero(_: &Option<clock::Global>) -> Self {
3180 Default::default()
3181 }
3182
3183 fn add_summary(&mut self, summary: &FragmentSummary, _: &Option<clock::Global>) {
3184 *self += summary.text.visible;
3185 }
3186}
3187
3188impl sum_tree::Dimension<'_, FragmentSummary> for FullOffset {
3189 fn zero(_: &Option<clock::Global>) -> Self {
3190 Default::default()
3191 }
3192
3193 fn add_summary(&mut self, summary: &FragmentSummary, _: &Option<clock::Global>) {
3194 self.0 += summary.text.visible + summary.text.deleted;
3195 }
3196}
3197
3198impl<'a> sum_tree::Dimension<'a, FragmentSummary> for Option<&'a Locator> {
3199 fn zero(_: &Option<clock::Global>) -> Self {
3200 Default::default()
3201 }
3202
3203 fn add_summary(&mut self, summary: &'a FragmentSummary, _: &Option<clock::Global>) {
3204 *self = Some(&summary.max_id);
3205 }
3206}
3207
3208impl sum_tree::SeekTarget<'_, FragmentSummary, FragmentTextSummary> for usize {
3209 fn cmp(
3210 &self,
3211 cursor_location: &FragmentTextSummary,
3212 _: &Option<clock::Global>,
3213 ) -> cmp::Ordering {
3214 Ord::cmp(self, &cursor_location.visible)
3215 }
3216}
3217
3218#[derive(Copy, Clone, Debug, Eq, PartialEq)]
3219enum VersionedFullOffset {
3220 Offset(FullOffset),
3221 Invalid,
3222}
3223
3224impl VersionedFullOffset {
3225 fn full_offset(&self) -> FullOffset {
3226 if let Self::Offset(position) = self {
3227 *position
3228 } else {
3229 panic!("invalid version")
3230 }
3231 }
3232}
3233
3234impl Default for VersionedFullOffset {
3235 fn default() -> Self {
3236 Self::Offset(Default::default())
3237 }
3238}
3239
3240impl<'a> sum_tree::Dimension<'a, FragmentSummary> for VersionedFullOffset {
3241 fn zero(_cx: &Option<clock::Global>) -> Self {
3242 Default::default()
3243 }
3244
3245 fn add_summary(&mut self, summary: &'a FragmentSummary, cx: &Option<clock::Global>) {
3246 if let Self::Offset(offset) = self {
3247 let version = cx.as_ref().unwrap();
3248 if version.observed_all(&summary.max_insertion_version) {
3249 *offset += summary.text.visible + summary.text.deleted;
3250 } else if version.observed_any(&summary.min_insertion_version) {
3251 *self = Self::Invalid;
3252 }
3253 }
3254 }
3255}
3256
3257impl sum_tree::SeekTarget<'_, FragmentSummary, Self> for VersionedFullOffset {
3258 fn cmp(&self, cursor_position: &Self, _: &Option<clock::Global>) -> cmp::Ordering {
3259 match (self, cursor_position) {
3260 (Self::Offset(a), Self::Offset(b)) => Ord::cmp(a, b),
3261 (Self::Offset(_), Self::Invalid) => cmp::Ordering::Less,
3262 (Self::Invalid, _) => unreachable!(),
3263 }
3264 }
3265}
3266
3267impl Operation {
3268 fn replica_id(&self) -> ReplicaId {
3269 operation_queue::Operation::lamport_timestamp(self).replica_id
3270 }
3271
3272 pub fn timestamp(&self) -> clock::Lamport {
3273 match self {
3274 Operation::Edit(edit) => edit.timestamp,
3275 Operation::Undo(undo) => undo.timestamp,
3276 }
3277 }
3278
3279 pub fn as_edit(&self) -> Option<&EditOperation> {
3280 match self {
3281 Operation::Edit(edit) => Some(edit),
3282 _ => None,
3283 }
3284 }
3285
3286 pub fn is_edit(&self) -> bool {
3287 matches!(self, Operation::Edit { .. })
3288 }
3289}
3290
3291impl operation_queue::Operation for Operation {
3292 fn lamport_timestamp(&self) -> clock::Lamport {
3293 match self {
3294 Operation::Edit(edit) => edit.timestamp,
3295 Operation::Undo(undo) => undo.timestamp,
3296 }
3297 }
3298}
3299
3300pub trait ToOffset {
3301 fn to_offset(&self, snapshot: &BufferSnapshot) -> usize;
3302 /// Turns this point into the next offset in the buffer that comes after this, respecting utf8 boundaries.
3303 fn to_next_offset(&self, snapshot: &BufferSnapshot) -> usize {
3304 snapshot
3305 .visible_text
3306 .ceil_char_boundary(self.to_offset(snapshot) + 1)
3307 }
3308 /// Turns this point into the previous offset in the buffer that comes before this, respecting utf8 boundaries.
3309 fn to_previous_offset(&self, snapshot: &BufferSnapshot) -> usize {
3310 snapshot
3311 .visible_text
3312 .floor_char_boundary(self.to_offset(snapshot).saturating_sub(1))
3313 }
3314}
3315
3316impl ToOffset for Point {
3317 #[inline]
3318 fn to_offset(&self, snapshot: &BufferSnapshot) -> usize {
3319 snapshot.point_to_offset(*self)
3320 }
3321}
3322
3323impl ToOffset for usize {
3324 #[track_caller]
3325 fn to_offset(&self, snapshot: &BufferSnapshot) -> usize {
3326 if !snapshot
3327 .as_rope()
3328 .assert_char_boundary::<{ cfg!(debug_assertions) }>(*self)
3329 {
3330 snapshot.as_rope().floor_char_boundary(*self)
3331 } else {
3332 *self
3333 }
3334 }
3335}
3336
3337impl ToOffset for Anchor {
3338 #[inline]
3339 fn to_offset(&self, snapshot: &BufferSnapshot) -> usize {
3340 snapshot.summary_for_anchor(self)
3341 }
3342}
3343
3344impl<T: ToOffset> ToOffset for &T {
3345 #[inline]
3346 fn to_offset(&self, content: &BufferSnapshot) -> usize {
3347 (*self).to_offset(content)
3348 }
3349}
3350
3351impl ToOffset for PointUtf16 {
3352 #[inline]
3353 fn to_offset(&self, snapshot: &BufferSnapshot) -> usize {
3354 snapshot.point_utf16_to_offset(*self)
3355 }
3356}
3357
3358impl ToOffset for Unclipped<PointUtf16> {
3359 #[inline]
3360 fn to_offset(&self, snapshot: &BufferSnapshot) -> usize {
3361 snapshot.unclipped_point_utf16_to_offset(*self)
3362 }
3363}
3364
3365pub trait ToPoint {
3366 fn to_point(&self, snapshot: &BufferSnapshot) -> Point;
3367}
3368
3369impl ToPoint for Anchor {
3370 #[inline]
3371 fn to_point(&self, snapshot: &BufferSnapshot) -> Point {
3372 snapshot.summary_for_anchor(self)
3373 }
3374}
3375
3376impl ToPoint for usize {
3377 #[inline]
3378 fn to_point(&self, snapshot: &BufferSnapshot) -> Point {
3379 snapshot.offset_to_point(*self)
3380 }
3381}
3382
3383impl ToPoint for Point {
3384 #[inline]
3385 fn to_point(&self, _: &BufferSnapshot) -> Point {
3386 *self
3387 }
3388}
3389
3390impl ToPoint for Unclipped<PointUtf16> {
3391 #[inline]
3392 fn to_point(&self, snapshot: &BufferSnapshot) -> Point {
3393 snapshot.unclipped_point_utf16_to_point(*self)
3394 }
3395}
3396
3397pub trait ToPointUtf16 {
3398 fn to_point_utf16(&self, snapshot: &BufferSnapshot) -> PointUtf16;
3399}
3400
3401impl ToPointUtf16 for Anchor {
3402 #[inline]
3403 fn to_point_utf16(&self, snapshot: &BufferSnapshot) -> PointUtf16 {
3404 snapshot.summary_for_anchor(self)
3405 }
3406}
3407
3408impl ToPointUtf16 for usize {
3409 #[inline]
3410 fn to_point_utf16(&self, snapshot: &BufferSnapshot) -> PointUtf16 {
3411 snapshot.offset_to_point_utf16(*self)
3412 }
3413}
3414
3415impl ToPointUtf16 for PointUtf16 {
3416 #[inline]
3417 fn to_point_utf16(&self, _: &BufferSnapshot) -> PointUtf16 {
3418 *self
3419 }
3420}
3421
3422impl ToPointUtf16 for Point {
3423 #[inline]
3424 fn to_point_utf16(&self, snapshot: &BufferSnapshot) -> PointUtf16 {
3425 snapshot.point_to_point_utf16(*self)
3426 }
3427}
3428
3429pub trait ToOffsetUtf16 {
3430 fn to_offset_utf16(&self, snapshot: &BufferSnapshot) -> OffsetUtf16;
3431}
3432
3433impl ToOffsetUtf16 for Anchor {
3434 #[inline]
3435 fn to_offset_utf16(&self, snapshot: &BufferSnapshot) -> OffsetUtf16 {
3436 snapshot.summary_for_anchor(self)
3437 }
3438}
3439
3440impl ToOffsetUtf16 for usize {
3441 #[inline]
3442 fn to_offset_utf16(&self, snapshot: &BufferSnapshot) -> OffsetUtf16 {
3443 snapshot.offset_to_offset_utf16(*self)
3444 }
3445}
3446
3447impl ToOffsetUtf16 for OffsetUtf16 {
3448 #[inline]
3449 fn to_offset_utf16(&self, _snapshot: &BufferSnapshot) -> OffsetUtf16 {
3450 *self
3451 }
3452}
3453
3454pub trait FromAnchor {
3455 fn from_anchor(anchor: &Anchor, snapshot: &BufferSnapshot) -> Self;
3456}
3457
3458impl FromAnchor for Anchor {
3459 #[inline]
3460 fn from_anchor(anchor: &Anchor, _snapshot: &BufferSnapshot) -> Self {
3461 *anchor
3462 }
3463}
3464
3465impl FromAnchor for Point {
3466 #[inline]
3467 fn from_anchor(anchor: &Anchor, snapshot: &BufferSnapshot) -> Self {
3468 snapshot.summary_for_anchor(anchor)
3469 }
3470}
3471
3472impl FromAnchor for PointUtf16 {
3473 #[inline]
3474 fn from_anchor(anchor: &Anchor, snapshot: &BufferSnapshot) -> Self {
3475 snapshot.summary_for_anchor(anchor)
3476 }
3477}
3478
3479impl FromAnchor for usize {
3480 #[inline]
3481 fn from_anchor(anchor: &Anchor, snapshot: &BufferSnapshot) -> Self {
3482 snapshot.summary_for_anchor(anchor)
3483 }
3484}
3485
3486#[derive(Clone, Copy, Debug, PartialEq)]
3487pub enum LineEnding {
3488 Unix,
3489 Windows,
3490}
3491
3492impl Default for LineEnding {
3493 fn default() -> Self {
3494 #[cfg(unix)]
3495 return Self::Unix;
3496
3497 #[cfg(not(unix))]
3498 return Self::Windows;
3499 }
3500}
3501
3502impl LineEnding {
3503 pub fn as_str(&self) -> &'static str {
3504 match self {
3505 LineEnding::Unix => "\n",
3506 LineEnding::Windows => "\r\n",
3507 }
3508 }
3509
3510 pub fn label(&self) -> &'static str {
3511 match self {
3512 LineEnding::Unix => "LF",
3513 LineEnding::Windows => "CRLF",
3514 }
3515 }
3516
3517 pub fn detect(text: &str) -> Self {
3518 let mut max_ix = cmp::min(text.len(), 1000);
3519 while !text.is_char_boundary(max_ix) {
3520 max_ix -= 1;
3521 }
3522
3523 if let Some(ix) = text[..max_ix].find(['\n']) {
3524 if ix > 0 && text.as_bytes()[ix - 1] == b'\r' {
3525 Self::Windows
3526 } else {
3527 Self::Unix
3528 }
3529 } else {
3530 Self::default()
3531 }
3532 }
3533
3534 pub fn normalize(text: &mut String) {
3535 if let Cow::Owned(replaced) = LINE_SEPARATORS_REGEX.replace_all(text, "\n") {
3536 *text = replaced;
3537 }
3538 }
3539
3540 pub fn normalize_arc(text: Arc<str>) -> Arc<str> {
3541 if let Cow::Owned(replaced) = LINE_SEPARATORS_REGEX.replace_all(&text, "\n") {
3542 replaced.into()
3543 } else {
3544 text
3545 }
3546 }
3547
3548 pub fn normalize_cow(text: Cow<str>) -> Cow<str> {
3549 if let Cow::Owned(replaced) = LINE_SEPARATORS_REGEX.replace_all(&text, "\n") {
3550 replaced.into()
3551 } else {
3552 text
3553 }
3554 }
3555}
3556
3557pub fn chunks_with_line_ending(rope: &Rope, line_ending: LineEnding) -> impl Iterator<Item = &str> {
3558 rope.chunks().flat_map(move |chunk| {
3559 let mut newline = false;
3560 let end_with_newline = chunk.ends_with('\n').then_some(line_ending.as_str());
3561 chunk
3562 .lines()
3563 .flat_map(move |line| {
3564 let ending = if newline {
3565 Some(line_ending.as_str())
3566 } else {
3567 None
3568 };
3569 newline = true;
3570 ending.into_iter().chain([line])
3571 })
3572 .chain(end_with_newline)
3573 })
3574}
3575
3576#[cfg(debug_assertions)]
3577pub mod debug {
3578 use super::*;
3579 use parking_lot::Mutex;
3580 use std::any::TypeId;
3581 use std::hash::{Hash, Hasher};
3582
3583 static GLOBAL_DEBUG_RANGES: Mutex<Option<GlobalDebugRanges>> = Mutex::new(None);
3584
3585 pub struct GlobalDebugRanges {
3586 pub ranges: Vec<DebugRange>,
3587 key_to_occurrence_index: HashMap<Key, usize>,
3588 next_occurrence_index: usize,
3589 }
3590
3591 pub struct DebugRange {
3592 key: Key,
3593 pub ranges: Vec<Range<Anchor>>,
3594 pub value: Arc<str>,
3595 pub occurrence_index: usize,
3596 }
3597
3598 #[derive(Debug, Clone, PartialEq, Eq, Hash)]
3599 struct Key {
3600 type_id: TypeId,
3601 hash: u64,
3602 }
3603
3604 impl GlobalDebugRanges {
3605 pub fn with_locked<R>(f: impl FnOnce(&mut Self) -> R) -> R {
3606 let mut state = GLOBAL_DEBUG_RANGES.lock();
3607 if state.is_none() {
3608 *state = Some(GlobalDebugRanges {
3609 ranges: Vec::new(),
3610 key_to_occurrence_index: HashMap::default(),
3611 next_occurrence_index: 0,
3612 });
3613 }
3614 if let Some(global_debug_ranges) = state.as_mut() {
3615 f(global_debug_ranges)
3616 } else {
3617 unreachable!()
3618 }
3619 }
3620
3621 pub fn insert<K: Hash + 'static>(
3622 &mut self,
3623 key: &K,
3624 ranges: Vec<Range<Anchor>>,
3625 value: Arc<str>,
3626 ) {
3627 let occurrence_index = *self
3628 .key_to_occurrence_index
3629 .entry(Key::new(key))
3630 .or_insert_with(|| {
3631 let occurrence_index = self.next_occurrence_index;
3632 self.next_occurrence_index += 1;
3633 occurrence_index
3634 });
3635 let key = Key::new(key);
3636 let existing = self
3637 .ranges
3638 .iter()
3639 .enumerate()
3640 .rfind(|(_, existing)| existing.key == key);
3641 if let Some((existing_ix, _)) = existing {
3642 self.ranges.remove(existing_ix);
3643 }
3644 self.ranges.push(DebugRange {
3645 ranges,
3646 key,
3647 value,
3648 occurrence_index,
3649 });
3650 }
3651
3652 pub fn remove<K: Hash + 'static>(&mut self, key: &K) {
3653 self.remove_impl(&Key::new(key));
3654 }
3655
3656 fn remove_impl(&mut self, key: &Key) {
3657 let existing = self
3658 .ranges
3659 .iter()
3660 .enumerate()
3661 .rfind(|(_, existing)| &existing.key == key);
3662 if let Some((existing_ix, _)) = existing {
3663 self.ranges.remove(existing_ix);
3664 }
3665 }
3666
3667 pub fn remove_all_with_key_type<K: 'static>(&mut self) {
3668 self.ranges
3669 .retain(|item| item.key.type_id != TypeId::of::<K>());
3670 }
3671 }
3672
3673 impl Key {
3674 fn new<K: Hash + 'static>(key: &K) -> Self {
3675 let type_id = TypeId::of::<K>();
3676 let mut hasher = collections::FxHasher::default();
3677 key.hash(&mut hasher);
3678 Key {
3679 type_id,
3680 hash: hasher.finish(),
3681 }
3682 }
3683 }
3684
3685 pub trait ToDebugRanges {
3686 fn to_debug_ranges(&self, snapshot: &BufferSnapshot) -> Vec<Range<usize>>;
3687 }
3688
3689 impl<T: ToOffset> ToDebugRanges for T {
3690 fn to_debug_ranges(&self, snapshot: &BufferSnapshot) -> Vec<Range<usize>> {
3691 [self.to_offset(snapshot)].to_debug_ranges(snapshot)
3692 }
3693 }
3694
3695 impl<T: ToOffset + Clone> ToDebugRanges for Range<T> {
3696 fn to_debug_ranges(&self, snapshot: &BufferSnapshot) -> Vec<Range<usize>> {
3697 [self.clone()].to_debug_ranges(snapshot)
3698 }
3699 }
3700
3701 impl<T: ToOffset> ToDebugRanges for Vec<T> {
3702 fn to_debug_ranges(&self, snapshot: &BufferSnapshot) -> Vec<Range<usize>> {
3703 self.as_slice().to_debug_ranges(snapshot)
3704 }
3705 }
3706
3707 impl<T: ToOffset> ToDebugRanges for Vec<Range<T>> {
3708 fn to_debug_ranges(&self, snapshot: &BufferSnapshot) -> Vec<Range<usize>> {
3709 self.as_slice().to_debug_ranges(snapshot)
3710 }
3711 }
3712
3713 impl<T: ToOffset> ToDebugRanges for [T] {
3714 fn to_debug_ranges(&self, snapshot: &BufferSnapshot) -> Vec<Range<usize>> {
3715 self.iter()
3716 .map(|item| {
3717 let offset = item.to_offset(snapshot);
3718 offset..offset
3719 })
3720 .collect()
3721 }
3722 }
3723
3724 impl<T: ToOffset> ToDebugRanges for [Range<T>] {
3725 fn to_debug_ranges(&self, snapshot: &BufferSnapshot) -> Vec<Range<usize>> {
3726 self.iter()
3727 .map(|range| range.start.to_offset(snapshot)..range.end.to_offset(snapshot))
3728 .collect()
3729 }
3730 }
3731}