1mod anchor;
2pub mod locator;
3#[cfg(any(test, feature = "test-support"))]
4pub mod network;
5pub mod operation_queue;
6mod patch;
7mod selection;
8pub mod subscription;
9#[cfg(test)]
10mod tests;
11mod undo_map;
12
13pub use anchor::*;
14use anyhow::{Context as _, Result};
15use clock::Lamport;
16pub use clock::ReplicaId;
17use collections::{HashMap, HashSet};
18use locator::Locator;
19use operation_queue::OperationQueue;
20pub use patch::Patch;
21use postage::{oneshot, prelude::*};
22
23use regex::Regex;
24pub use rope::*;
25pub use selection::*;
26use smallvec::SmallVec;
27use std::{
28 borrow::Cow,
29 cmp::{self, Ordering, Reverse},
30 fmt::Display,
31 future::Future,
32 iter::Iterator,
33 num::NonZeroU64,
34 ops::{self, Deref, Range, Sub},
35 str,
36 sync::{Arc, LazyLock},
37 time::{Duration, Instant},
38};
39pub use subscription::*;
40pub use sum_tree::Bias;
41use sum_tree::{Dimensions, FilterCursor, SumTree, Summary, TreeMap, TreeSet};
42use undo_map::UndoMap;
43use util::debug_panic;
44
45#[cfg(any(test, feature = "test-support"))]
46use util::RandomCharIter;
47
48static LINE_SEPARATORS_REGEX: LazyLock<Regex> =
49 LazyLock::new(|| Regex::new(r"\r\n|\r").expect("Failed to create LINE_SEPARATORS_REGEX"));
50
51/// The maximum length of a single insertion operation.
52/// Fragments larger than this will be split into multiple smaller
53/// fragments. This allows us to use relative `u32` offsets instead of `usize`,
54/// reducing memory usage.
55const MAX_INSERTION_LEN: usize = if cfg!(test) { 16 } else { u32::MAX as usize };
56
57pub type TransactionId = clock::Lamport;
58
59pub struct Buffer {
60 snapshot: BufferSnapshot,
61 history: History,
62 deferred_ops: OperationQueue<Operation>,
63 deferred_replicas: HashSet<ReplicaId>,
64 pub lamport_clock: clock::Lamport,
65 subscriptions: Topic<usize>,
66 edit_id_resolvers: HashMap<clock::Lamport, Vec<oneshot::Sender<()>>>,
67 wait_for_version_txs: Vec<(clock::Global, oneshot::Sender<()>)>,
68}
69
70#[repr(transparent)]
71#[derive(Clone, Copy, Debug, Hash, PartialEq, PartialOrd, Ord, Eq)]
72pub struct BufferId(NonZeroU64);
73
74impl Display for BufferId {
75 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
76 write!(f, "{}", self.0)
77 }
78}
79
80impl From<NonZeroU64> for BufferId {
81 fn from(id: NonZeroU64) -> Self {
82 BufferId(id)
83 }
84}
85
86impl BufferId {
87 /// Returns Err if `id` is outside of BufferId domain.
88 pub fn new(id: u64) -> anyhow::Result<Self> {
89 let id = NonZeroU64::new(id).context("Buffer id cannot be 0.")?;
90 Ok(Self(id))
91 }
92
93 /// Increments this buffer id, returning the old value.
94 /// So that's a post-increment operator in disguise.
95 pub fn next(&mut self) -> Self {
96 let old = *self;
97 self.0 = self.0.saturating_add(1);
98 old
99 }
100
101 pub fn to_proto(self) -> u64 {
102 self.into()
103 }
104}
105
106impl From<BufferId> for u64 {
107 fn from(id: BufferId) -> Self {
108 id.0.get()
109 }
110}
111
112#[derive(Clone)]
113pub struct BufferSnapshot {
114 visible_text: Rope,
115 deleted_text: Rope,
116 fragments: SumTree<Fragment>,
117 insertions: SumTree<InsertionFragment>,
118 insertion_slices: TreeSet<InsertionSlice>,
119 undo_map: UndoMap,
120 pub version: clock::Global,
121 remote_id: BufferId,
122 replica_id: ReplicaId,
123 line_ending: LineEnding,
124}
125
126#[derive(Clone, Debug)]
127pub struct HistoryEntry {
128 transaction: Transaction,
129 first_edit_at: Instant,
130 last_edit_at: Instant,
131 suppress_grouping: bool,
132}
133
134#[derive(Clone, Debug)]
135pub struct Transaction {
136 pub id: TransactionId,
137 pub edit_ids: Vec<clock::Lamport>,
138 pub start: clock::Global,
139}
140
141impl Transaction {
142 pub fn merge_in(&mut self, other: Transaction) {
143 self.edit_ids.extend(other.edit_ids);
144 }
145}
146
147impl HistoryEntry {
148 pub fn transaction_id(&self) -> TransactionId {
149 self.transaction.id
150 }
151}
152
153struct History {
154 base_text: Rope,
155 operations: TreeMap<clock::Lamport, Operation>,
156 undo_stack: Vec<HistoryEntry>,
157 redo_stack: Vec<HistoryEntry>,
158 transaction_depth: usize,
159 group_interval: Duration,
160}
161
162#[derive(Clone, Debug, Eq, PartialEq)]
163struct InsertionSlice {
164 // Inline the lamports to allow the replica ids to share the same alignment
165 // saving 4 bytes space edit_id: clock::Lamport,
166 edit_id_value: clock::Seq,
167 edit_id_replica_id: ReplicaId,
168 // insertion_id: clock::Lamport,
169 insertion_id_value: clock::Seq,
170 insertion_id_replica_id: ReplicaId,
171 range: Range<u32>,
172}
173
174impl Ord for InsertionSlice {
175 fn cmp(&self, other: &Self) -> Ordering {
176 Lamport {
177 value: self.edit_id_value,
178 replica_id: self.edit_id_replica_id,
179 }
180 .cmp(&Lamport {
181 value: other.edit_id_value,
182 replica_id: other.edit_id_replica_id,
183 })
184 .then_with(|| {
185 Lamport {
186 value: self.insertion_id_value,
187 replica_id: self.insertion_id_replica_id,
188 }
189 .cmp(&Lamport {
190 value: other.insertion_id_value,
191 replica_id: other.insertion_id_replica_id,
192 })
193 })
194 .then_with(|| self.range.start.cmp(&other.range.start))
195 .then_with(|| self.range.end.cmp(&other.range.end))
196 }
197}
198
199impl PartialOrd for InsertionSlice {
200 fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
201 Some(self.cmp(other))
202 }
203}
204
205impl InsertionSlice {
206 fn from_fragment(edit_id: clock::Lamport, fragment: &Fragment) -> Self {
207 Self {
208 edit_id_value: edit_id.value,
209 edit_id_replica_id: edit_id.replica_id,
210 insertion_id_value: fragment.timestamp.value,
211 insertion_id_replica_id: fragment.timestamp.replica_id,
212 range: fragment.insertion_offset..fragment.insertion_offset + fragment.len,
213 }
214 }
215}
216
217impl History {
218 pub fn new(base_text: Rope) -> Self {
219 Self {
220 base_text,
221 operations: Default::default(),
222 undo_stack: Vec::new(),
223 redo_stack: Vec::new(),
224 transaction_depth: 0,
225 // Don't group transactions in tests unless we opt in, because it's a footgun.
226 group_interval: if cfg!(any(test, feature = "test-support")) {
227 Duration::ZERO
228 } else {
229 Duration::from_millis(300)
230 },
231 }
232 }
233
234 fn push(&mut self, op: Operation) {
235 self.operations.insert(op.timestamp(), op);
236 }
237
238 fn start_transaction(
239 &mut self,
240 start: clock::Global,
241 now: Instant,
242 clock: &mut clock::Lamport,
243 ) -> Option<TransactionId> {
244 self.transaction_depth += 1;
245 if self.transaction_depth == 1 {
246 let id = clock.tick();
247 self.undo_stack.push(HistoryEntry {
248 transaction: Transaction {
249 id,
250 start,
251 edit_ids: Default::default(),
252 },
253 first_edit_at: now,
254 last_edit_at: now,
255 suppress_grouping: false,
256 });
257 Some(id)
258 } else {
259 None
260 }
261 }
262
263 fn end_transaction(&mut self, now: Instant) -> Option<&HistoryEntry> {
264 assert_ne!(self.transaction_depth, 0);
265 self.transaction_depth -= 1;
266 if self.transaction_depth == 0 {
267 if self
268 .undo_stack
269 .last()
270 .unwrap()
271 .transaction
272 .edit_ids
273 .is_empty()
274 {
275 self.undo_stack.pop();
276 None
277 } else {
278 self.redo_stack.clear();
279 let entry = self.undo_stack.last_mut().unwrap();
280 entry.last_edit_at = now;
281 Some(entry)
282 }
283 } else {
284 None
285 }
286 }
287
288 fn group(&mut self) -> Option<TransactionId> {
289 let mut count = 0;
290 let mut entries = self.undo_stack.iter();
291 if let Some(mut entry) = entries.next_back() {
292 while let Some(prev_entry) = entries.next_back() {
293 if !prev_entry.suppress_grouping
294 && entry.first_edit_at - prev_entry.last_edit_at < self.group_interval
295 {
296 entry = prev_entry;
297 count += 1;
298 } else {
299 break;
300 }
301 }
302 }
303 self.group_trailing(count)
304 }
305
306 fn group_until(&mut self, transaction_id: TransactionId) {
307 let mut count = 0;
308 for entry in self.undo_stack.iter().rev() {
309 if entry.transaction_id() == transaction_id {
310 self.group_trailing(count);
311 break;
312 } else if entry.suppress_grouping {
313 break;
314 } else {
315 count += 1;
316 }
317 }
318 }
319
320 fn group_trailing(&mut self, n: usize) -> Option<TransactionId> {
321 let new_len = self.undo_stack.len() - n;
322 let (entries_to_keep, entries_to_merge) = self.undo_stack.split_at_mut(new_len);
323 if let Some(last_entry) = entries_to_keep.last_mut() {
324 for entry in &*entries_to_merge {
325 for edit_id in &entry.transaction.edit_ids {
326 last_entry.transaction.edit_ids.push(*edit_id);
327 }
328 }
329
330 if let Some(entry) = entries_to_merge.last_mut() {
331 last_entry.last_edit_at = entry.last_edit_at;
332 }
333 }
334
335 self.undo_stack.truncate(new_len);
336 self.undo_stack.last().map(|e| e.transaction.id)
337 }
338
339 fn finalize_last_transaction(&mut self) -> Option<&Transaction> {
340 self.undo_stack.last_mut().map(|entry| {
341 entry.transaction.edit_ids.shrink_to_fit();
342 entry.suppress_grouping = true;
343 &entry.transaction
344 })
345 }
346
347 fn push_transaction(&mut self, transaction: Transaction, now: Instant) {
348 assert_eq!(self.transaction_depth, 0);
349 self.undo_stack.push(HistoryEntry {
350 transaction,
351 first_edit_at: now,
352 last_edit_at: now,
353 suppress_grouping: false,
354 });
355 }
356
357 /// Differs from `push_transaction` in that it does not clear the redo
358 /// stack. Intended to be used to create a parent transaction to merge
359 /// potential child transactions into.
360 ///
361 /// The caller is responsible for removing it from the undo history using
362 /// `forget_transaction` if no edits are merged into it. Otherwise, if edits
363 /// are merged into this transaction, the caller is responsible for ensuring
364 /// the redo stack is cleared. The easiest way to ensure the redo stack is
365 /// cleared is to create transactions with the usual `start_transaction` and
366 /// `end_transaction` methods and merging the resulting transactions into
367 /// the transaction created by this method
368 fn push_empty_transaction(
369 &mut self,
370 start: clock::Global,
371 now: Instant,
372 clock: &mut clock::Lamport,
373 ) -> TransactionId {
374 assert_eq!(self.transaction_depth, 0);
375 let id = clock.tick();
376 let transaction = Transaction {
377 id,
378 start,
379 edit_ids: Vec::new(),
380 };
381 self.undo_stack.push(HistoryEntry {
382 transaction,
383 first_edit_at: now,
384 last_edit_at: now,
385 suppress_grouping: false,
386 });
387 id
388 }
389
390 fn push_undo(&mut self, op_id: clock::Lamport) {
391 assert_ne!(self.transaction_depth, 0);
392 if let Some(Operation::Edit(_)) = self.operations.get(&op_id) {
393 let last_transaction = self.undo_stack.last_mut().unwrap();
394 last_transaction.transaction.edit_ids.push(op_id);
395 }
396 }
397
398 fn pop_undo(&mut self) -> Option<&HistoryEntry> {
399 assert_eq!(self.transaction_depth, 0);
400 if let Some(entry) = self.undo_stack.pop() {
401 self.redo_stack.push(entry);
402 self.redo_stack.last()
403 } else {
404 None
405 }
406 }
407
408 fn remove_from_undo(&mut self, transaction_id: TransactionId) -> Option<&HistoryEntry> {
409 assert_eq!(self.transaction_depth, 0);
410
411 let entry_ix = self
412 .undo_stack
413 .iter()
414 .rposition(|entry| entry.transaction.id == transaction_id)?;
415 let entry = self.undo_stack.remove(entry_ix);
416 self.redo_stack.push(entry);
417 self.redo_stack.last()
418 }
419
420 fn remove_from_undo_until(&mut self, transaction_id: TransactionId) -> &[HistoryEntry] {
421 assert_eq!(self.transaction_depth, 0);
422
423 let redo_stack_start_len = self.redo_stack.len();
424 if let Some(entry_ix) = self
425 .undo_stack
426 .iter()
427 .rposition(|entry| entry.transaction.id == transaction_id)
428 {
429 self.redo_stack
430 .extend(self.undo_stack.drain(entry_ix..).rev());
431 }
432 &self.redo_stack[redo_stack_start_len..]
433 }
434
435 fn forget(&mut self, transaction_id: TransactionId) -> Option<Transaction> {
436 assert_eq!(self.transaction_depth, 0);
437 if let Some(entry_ix) = self
438 .undo_stack
439 .iter()
440 .rposition(|entry| entry.transaction.id == transaction_id)
441 {
442 Some(self.undo_stack.remove(entry_ix).transaction)
443 } else if let Some(entry_ix) = self
444 .redo_stack
445 .iter()
446 .rposition(|entry| entry.transaction.id == transaction_id)
447 {
448 Some(self.redo_stack.remove(entry_ix).transaction)
449 } else {
450 None
451 }
452 }
453
454 fn transaction(&self, transaction_id: TransactionId) -> Option<&Transaction> {
455 let entry = self
456 .undo_stack
457 .iter()
458 .rfind(|entry| entry.transaction.id == transaction_id)
459 .or_else(|| {
460 self.redo_stack
461 .iter()
462 .rfind(|entry| entry.transaction.id == transaction_id)
463 })?;
464 Some(&entry.transaction)
465 }
466
467 fn transaction_mut(&mut self, transaction_id: TransactionId) -> Option<&mut Transaction> {
468 let entry = self
469 .undo_stack
470 .iter_mut()
471 .rfind(|entry| entry.transaction.id == transaction_id)
472 .or_else(|| {
473 self.redo_stack
474 .iter_mut()
475 .rfind(|entry| entry.transaction.id == transaction_id)
476 })?;
477 Some(&mut entry.transaction)
478 }
479
480 fn merge_transactions(&mut self, transaction: TransactionId, destination: TransactionId) {
481 if let Some(transaction) = self.forget(transaction)
482 && let Some(destination) = self.transaction_mut(destination)
483 {
484 destination.edit_ids.extend(transaction.edit_ids);
485 }
486 }
487
488 fn pop_redo(&mut self) -> Option<&HistoryEntry> {
489 assert_eq!(self.transaction_depth, 0);
490 if let Some(entry) = self.redo_stack.pop() {
491 self.undo_stack.push(entry);
492 self.undo_stack.last()
493 } else {
494 None
495 }
496 }
497
498 fn remove_from_redo(&mut self, transaction_id: TransactionId) -> &[HistoryEntry] {
499 assert_eq!(self.transaction_depth, 0);
500
501 let undo_stack_start_len = self.undo_stack.len();
502 if let Some(entry_ix) = self
503 .redo_stack
504 .iter()
505 .rposition(|entry| entry.transaction.id == transaction_id)
506 {
507 self.undo_stack
508 .extend(self.redo_stack.drain(entry_ix..).rev());
509 }
510 &self.undo_stack[undo_stack_start_len..]
511 }
512}
513
514struct Edits<'a, D: TextDimension, F: FnMut(&FragmentSummary) -> bool> {
515 visible_cursor: rope::Cursor<'a>,
516 deleted_cursor: rope::Cursor<'a>,
517 fragments_cursor: Option<FilterCursor<'a, 'static, F, Fragment, FragmentTextSummary>>,
518 undos: &'a UndoMap,
519 since: &'a clock::Global,
520 old_end: D,
521 new_end: D,
522 range: Range<(&'a Locator, u32)>,
523 buffer_id: BufferId,
524}
525
526#[derive(Clone, Debug, Default, Eq, PartialEq)]
527pub struct Edit<D> {
528 pub old: Range<D>,
529 pub new: Range<D>,
530}
531impl<D> Edit<D>
532where
533 D: PartialEq,
534{
535 pub fn is_empty(&self) -> bool {
536 self.old.start == self.old.end && self.new.start == self.new.end
537 }
538}
539
540impl<D, DDelta> Edit<D>
541where
542 D: Sub<D, Output = DDelta> + Copy,
543{
544 pub fn old_len(&self) -> DDelta {
545 self.old.end - self.old.start
546 }
547
548 pub fn new_len(&self) -> DDelta {
549 self.new.end - self.new.start
550 }
551}
552
553impl<D1, D2> Edit<(D1, D2)> {
554 pub fn flatten(self) -> (Edit<D1>, Edit<D2>) {
555 (
556 Edit {
557 old: self.old.start.0..self.old.end.0,
558 new: self.new.start.0..self.new.end.0,
559 },
560 Edit {
561 old: self.old.start.1..self.old.end.1,
562 new: self.new.start.1..self.new.end.1,
563 },
564 )
565 }
566}
567
568#[derive(Eq, PartialEq, Clone, Debug)]
569struct Fragment {
570 id: Locator,
571 timestamp: clock::Lamport,
572 insertion_offset: u32,
573 len: u32,
574 visible: bool,
575 deletions: SmallVec<[clock::Lamport; 2]>,
576 max_undos: clock::Global,
577}
578
579#[derive(Eq, PartialEq, Clone, Debug)]
580struct FragmentSummary {
581 text: FragmentTextSummary,
582 max_id: Locator,
583 max_version: clock::Global,
584 min_insertion_version: clock::Global,
585 max_insertion_version: clock::Global,
586}
587
588#[derive(Copy, Default, Clone, Debug, PartialEq, Eq)]
589struct FragmentTextSummary {
590 visible: usize,
591 deleted: usize,
592}
593
594impl<'a> sum_tree::Dimension<'a, FragmentSummary> for FragmentTextSummary {
595 fn zero(_: &Option<clock::Global>) -> Self {
596 Default::default()
597 }
598
599 fn add_summary(&mut self, summary: &'a FragmentSummary, _: &Option<clock::Global>) {
600 self.visible += summary.text.visible;
601 self.deleted += summary.text.deleted;
602 }
603}
604
605#[derive(Eq, PartialEq, Clone, Debug)]
606struct InsertionFragment {
607 timestamp: clock::Lamport,
608 split_offset: u32,
609 fragment_id: Locator,
610}
611
612#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord)]
613struct InsertionFragmentKey {
614 timestamp: clock::Lamport,
615 split_offset: u32,
616}
617
618#[derive(Clone, Debug, Eq, PartialEq)]
619pub enum Operation {
620 Edit(EditOperation),
621 Undo(UndoOperation),
622}
623
624#[derive(Clone, Debug, Eq, PartialEq)]
625pub struct EditOperation {
626 pub timestamp: clock::Lamport,
627 pub version: clock::Global,
628 pub ranges: Vec<Range<FullOffset>>,
629 pub new_text: Vec<Arc<str>>,
630}
631
632#[derive(Clone, Debug, Eq, PartialEq)]
633pub struct UndoOperation {
634 pub timestamp: clock::Lamport,
635 pub version: clock::Global,
636 pub counts: HashMap<clock::Lamport, u32>,
637}
638
639/// Stores information about the indentation of a line (tabs and spaces).
640#[derive(Clone, Copy, Debug, Eq, PartialEq)]
641pub struct LineIndent {
642 pub tabs: u32,
643 pub spaces: u32,
644 pub line_blank: bool,
645}
646
647impl LineIndent {
648 pub fn from_chunks(chunks: &mut Chunks) -> Self {
649 let mut tabs = 0;
650 let mut spaces = 0;
651 let mut line_blank = true;
652
653 'outer: while let Some(chunk) = chunks.peek() {
654 for ch in chunk.chars() {
655 if ch == '\t' {
656 tabs += 1;
657 } else if ch == ' ' {
658 spaces += 1;
659 } else {
660 if ch != '\n' {
661 line_blank = false;
662 }
663 break 'outer;
664 }
665 }
666
667 chunks.next();
668 }
669
670 Self {
671 tabs,
672 spaces,
673 line_blank,
674 }
675 }
676
677 /// Constructs a new `LineIndent` which only contains spaces.
678 pub fn spaces(spaces: u32) -> Self {
679 Self {
680 tabs: 0,
681 spaces,
682 line_blank: true,
683 }
684 }
685
686 /// Constructs a new `LineIndent` which only contains tabs.
687 pub fn tabs(tabs: u32) -> Self {
688 Self {
689 tabs,
690 spaces: 0,
691 line_blank: true,
692 }
693 }
694
695 /// Indicates whether the line is empty.
696 pub fn is_line_empty(&self) -> bool {
697 self.tabs == 0 && self.spaces == 0 && self.line_blank
698 }
699
700 /// Indicates whether the line is blank (contains only whitespace).
701 pub fn is_line_blank(&self) -> bool {
702 self.line_blank
703 }
704
705 /// Returns the number of indentation characters (tabs or spaces).
706 pub fn raw_len(&self) -> u32 {
707 self.tabs + self.spaces
708 }
709
710 /// Returns the number of indentation characters (tabs or spaces), taking tab size into account.
711 pub fn len(&self, tab_size: u32) -> u32 {
712 self.tabs * tab_size + self.spaces
713 }
714}
715
716impl From<&str> for LineIndent {
717 fn from(value: &str) -> Self {
718 Self::from_iter(value.chars())
719 }
720}
721
722impl FromIterator<char> for LineIndent {
723 fn from_iter<T: IntoIterator<Item = char>>(chars: T) -> Self {
724 let mut tabs = 0;
725 let mut spaces = 0;
726 let mut line_blank = true;
727 for c in chars {
728 if c == '\t' {
729 tabs += 1;
730 } else if c == ' ' {
731 spaces += 1;
732 } else {
733 if c != '\n' {
734 line_blank = false;
735 }
736 break;
737 }
738 }
739 Self {
740 tabs,
741 spaces,
742 line_blank,
743 }
744 }
745}
746
747impl Buffer {
748 pub fn new(replica_id: ReplicaId, remote_id: BufferId, base_text: impl Into<String>) -> Buffer {
749 let mut base_text = base_text.into();
750 let line_ending = LineEnding::detect(&base_text);
751 LineEnding::normalize(&mut base_text);
752 Self::new_normalized(replica_id, remote_id, line_ending, Rope::from(&*base_text))
753 }
754
755 pub fn new_normalized(
756 replica_id: ReplicaId,
757 remote_id: BufferId,
758 line_ending: LineEnding,
759 normalized: Rope,
760 ) -> Buffer {
761 let history = History::new(normalized);
762 let mut fragments = SumTree::new(&None);
763 let mut insertions = SumTree::default();
764
765 let mut lamport_clock = clock::Lamport::new(replica_id);
766 let mut version = clock::Global::new();
767
768 let visible_text = history.base_text.clone();
769 if !visible_text.is_empty() {
770 let insertion_timestamp = clock::Lamport::new(ReplicaId::LOCAL);
771 lamport_clock.observe(insertion_timestamp);
772 version.observe(insertion_timestamp);
773
774 let mut insertion_offset: u32 = 0;
775 let mut text_offset: usize = 0;
776 let mut prev_locator = Locator::min();
777
778 while text_offset < visible_text.len() {
779 let target_end = visible_text.len().min(text_offset + MAX_INSERTION_LEN);
780 let chunk_end = if target_end == visible_text.len() {
781 target_end
782 } else {
783 visible_text.floor_char_boundary(target_end)
784 };
785 let chunk_len = chunk_end - text_offset;
786
787 let fragment_id = Locator::between(&prev_locator, &Locator::max());
788 let fragment = Fragment {
789 id: fragment_id.clone(),
790 timestamp: insertion_timestamp,
791 insertion_offset,
792 len: chunk_len as u32,
793 visible: true,
794 deletions: Default::default(),
795 max_undos: Default::default(),
796 };
797 insertions.push(InsertionFragment::new(&fragment), ());
798 fragments.push(fragment, &None);
799
800 prev_locator = fragment_id;
801 insertion_offset += chunk_len as u32;
802 text_offset = chunk_end;
803 }
804 }
805
806 Buffer {
807 snapshot: BufferSnapshot {
808 replica_id,
809 remote_id,
810 visible_text,
811 deleted_text: Rope::new(),
812 line_ending,
813 fragments,
814 insertions,
815 version,
816 undo_map: Default::default(),
817 insertion_slices: Default::default(),
818 },
819 history,
820 deferred_ops: OperationQueue::new(),
821 deferred_replicas: HashSet::default(),
822 lamport_clock,
823 subscriptions: Default::default(),
824 edit_id_resolvers: Default::default(),
825 wait_for_version_txs: Default::default(),
826 }
827 }
828
829 pub fn version(&self) -> clock::Global {
830 self.version.clone()
831 }
832
833 pub fn snapshot(&self) -> &BufferSnapshot {
834 &self.snapshot
835 }
836
837 pub fn into_snapshot(self) -> BufferSnapshot {
838 self.snapshot
839 }
840
841 pub fn branch(&self) -> Self {
842 Self {
843 snapshot: self.snapshot.clone(),
844 history: History::new(self.base_text().clone()),
845 deferred_ops: OperationQueue::new(),
846 deferred_replicas: HashSet::default(),
847 lamport_clock: clock::Lamport::new(ReplicaId::LOCAL_BRANCH),
848 subscriptions: Default::default(),
849 edit_id_resolvers: Default::default(),
850 wait_for_version_txs: Default::default(),
851 }
852 }
853
854 pub fn replica_id(&self) -> ReplicaId {
855 self.lamport_clock.replica_id
856 }
857
858 pub fn remote_id(&self) -> BufferId {
859 self.remote_id
860 }
861
862 pub fn deferred_ops_len(&self) -> usize {
863 self.deferred_ops.len()
864 }
865
866 pub fn transaction_group_interval(&self) -> Duration {
867 self.history.group_interval
868 }
869
870 pub fn edit<R, I, S, T>(&mut self, edits: R) -> Operation
871 where
872 R: IntoIterator<IntoIter = I>,
873 I: ExactSizeIterator<Item = (Range<S>, T)>,
874 S: ToOffset,
875 T: Into<Arc<str>>,
876 {
877 let edits = edits
878 .into_iter()
879 .map(|(range, new_text)| (range, new_text.into()));
880
881 self.start_transaction();
882 let timestamp = self.lamport_clock.tick();
883 let operation = Operation::Edit(self.apply_local_edit(edits, timestamp));
884
885 self.history.push(operation.clone());
886 self.history.push_undo(operation.timestamp());
887 self.snapshot.version.observe(operation.timestamp());
888 self.end_transaction();
889 operation
890 }
891
892 fn apply_local_edit<S: ToOffset, T: Into<Arc<str>>>(
893 &mut self,
894 edits: impl ExactSizeIterator<Item = (Range<S>, T)>,
895 timestamp: clock::Lamport,
896 ) -> EditOperation {
897 let mut edits_patch = Patch::default();
898 let mut edit_op = EditOperation {
899 timestamp,
900 version: self.version(),
901 ranges: Vec::with_capacity(edits.len()),
902 new_text: Vec::with_capacity(edits.len()),
903 };
904 let mut new_insertions = Vec::new();
905 let mut insertion_offset: u32 = 0;
906 let mut insertion_slices = Vec::new();
907
908 let mut edits = edits
909 .map(|(range, new_text)| (range.to_offset(&*self), new_text))
910 .peekable();
911
912 let mut new_ropes =
913 RopeBuilder::new(self.visible_text.cursor(0), self.deleted_text.cursor(0));
914 let mut old_fragments = self.fragments.cursor::<FragmentTextSummary>(&None);
915 let mut new_fragments =
916 FragmentBuilder::new(old_fragments.slice(&edits.peek().unwrap().0.start, Bias::Right));
917 new_ropes.append(new_fragments.summary().text);
918
919 let mut fragment_start = old_fragments.start().visible;
920 for (range, new_text) in edits {
921 let new_text = LineEnding::normalize_arc(new_text.into());
922 let fragment_end = old_fragments.end().visible;
923
924 // If the current fragment ends before this range, then jump ahead to the first fragment
925 // that extends past the start of this range, reusing any intervening fragments.
926 if fragment_end < range.start {
927 // If the current fragment has been partially consumed, then consume the rest of it
928 // and advance to the next fragment before slicing.
929 if fragment_start > old_fragments.start().visible {
930 if fragment_end > fragment_start {
931 let mut suffix = old_fragments.item().unwrap().clone();
932 suffix.len = (fragment_end - fragment_start) as u32;
933 suffix.insertion_offset +=
934 (fragment_start - old_fragments.start().visible) as u32;
935 new_insertions.push(InsertionFragment::insert_new(&suffix));
936 new_ropes.push_fragment(&suffix, suffix.visible);
937 new_fragments.push(suffix, &None);
938 }
939 old_fragments.next();
940 }
941
942 let slice = old_fragments.slice(&range.start, Bias::Right);
943 new_ropes.append(slice.summary().text);
944 new_fragments.append(slice, &None);
945 fragment_start = old_fragments.start().visible;
946 }
947
948 let full_range_start = FullOffset(range.start + old_fragments.start().deleted);
949
950 // Preserve any portion of the current fragment that precedes this range.
951 if fragment_start < range.start {
952 let mut prefix = old_fragments.item().unwrap().clone();
953 prefix.len = (range.start - fragment_start) as u32;
954 prefix.insertion_offset += (fragment_start - old_fragments.start().visible) as u32;
955 prefix.id = Locator::between(&new_fragments.summary().max_id, &prefix.id);
956 new_insertions.push(InsertionFragment::insert_new(&prefix));
957 new_ropes.push_fragment(&prefix, prefix.visible);
958 new_fragments.push(prefix, &None);
959 fragment_start = range.start;
960 }
961
962 // Insert the new text before any existing fragments within the range.
963 if !new_text.is_empty() {
964 let new_start = new_fragments.summary().text.visible;
965
966 let next_fragment_id = old_fragments
967 .item()
968 .map_or(Locator::max_ref(), |old_fragment| &old_fragment.id);
969 Self::push_fragments_for_insertion(
970 new_text.as_ref(),
971 timestamp,
972 &mut insertion_offset,
973 &mut new_fragments,
974 &mut new_insertions,
975 &mut insertion_slices,
976 &mut new_ropes,
977 next_fragment_id,
978 timestamp,
979 );
980 edits_patch.push(Edit {
981 old: fragment_start..fragment_start,
982 new: new_start..new_start + new_text.len(),
983 });
984 }
985
986 // Advance through every fragment that intersects this range, marking the intersecting
987 // portions as deleted.
988 while fragment_start < range.end {
989 let fragment = old_fragments.item().unwrap();
990 let fragment_end = old_fragments.end().visible;
991 let mut intersection = fragment.clone();
992 let intersection_end = cmp::min(range.end, fragment_end);
993 if fragment.visible {
994 intersection.len = (intersection_end - fragment_start) as u32;
995 intersection.insertion_offset +=
996 (fragment_start - old_fragments.start().visible) as u32;
997 intersection.id =
998 Locator::between(&new_fragments.summary().max_id, &intersection.id);
999 intersection.deletions.push(timestamp);
1000 intersection.visible = false;
1001 }
1002 if intersection.len > 0 {
1003 if fragment.visible && !intersection.visible {
1004 let new_start = new_fragments.summary().text.visible;
1005 edits_patch.push(Edit {
1006 old: fragment_start..intersection_end,
1007 new: new_start..new_start,
1008 });
1009 insertion_slices
1010 .push(InsertionSlice::from_fragment(timestamp, &intersection));
1011 }
1012 new_insertions.push(InsertionFragment::insert_new(&intersection));
1013 new_ropes.push_fragment(&intersection, fragment.visible);
1014 new_fragments.push(intersection, &None);
1015 fragment_start = intersection_end;
1016 }
1017 if fragment_end <= range.end {
1018 old_fragments.next();
1019 }
1020 }
1021
1022 let full_range_end = FullOffset(range.end + old_fragments.start().deleted);
1023 edit_op.ranges.push(full_range_start..full_range_end);
1024 edit_op.new_text.push(new_text);
1025 }
1026
1027 // If the current fragment has been partially consumed, then consume the rest of it
1028 // and advance to the next fragment before slicing.
1029 if fragment_start > old_fragments.start().visible {
1030 let fragment_end = old_fragments.end().visible;
1031 if fragment_end > fragment_start {
1032 let mut suffix = old_fragments.item().unwrap().clone();
1033 suffix.len = (fragment_end - fragment_start) as u32;
1034 suffix.insertion_offset += (fragment_start - old_fragments.start().visible) as u32;
1035 new_insertions.push(InsertionFragment::insert_new(&suffix));
1036 new_ropes.push_fragment(&suffix, suffix.visible);
1037 new_fragments.push(suffix, &None);
1038 }
1039 old_fragments.next();
1040 }
1041
1042 let suffix = old_fragments.suffix();
1043 new_ropes.append(suffix.summary().text);
1044 new_fragments.append(suffix, &None);
1045 let (visible_text, deleted_text) = new_ropes.finish();
1046 drop(old_fragments);
1047
1048 self.snapshot.fragments = new_fragments.to_sum_tree(&None);
1049 self.snapshot.insertions.edit(new_insertions, ());
1050 self.snapshot.visible_text = visible_text;
1051 self.snapshot.deleted_text = deleted_text;
1052 self.subscriptions.publish_mut(&edits_patch);
1053 self.snapshot.insertion_slices.extend(insertion_slices);
1054 edit_op
1055 }
1056
1057 pub fn set_line_ending(&mut self, line_ending: LineEnding) {
1058 self.snapshot.line_ending = line_ending;
1059 }
1060
1061 pub fn apply_ops<I: IntoIterator<Item = Operation>>(&mut self, ops: I) {
1062 let mut deferred_ops = Vec::new();
1063 for op in ops {
1064 self.history.push(op.clone());
1065 if self.can_apply_op(&op) {
1066 self.apply_op(op);
1067 } else {
1068 self.deferred_replicas.insert(op.replica_id());
1069 deferred_ops.push(op);
1070 }
1071 }
1072 self.deferred_ops.insert(deferred_ops);
1073 self.flush_deferred_ops();
1074 }
1075
1076 fn apply_op(&mut self, op: Operation) {
1077 match op {
1078 Operation::Edit(edit) => {
1079 if !self.version.observed(edit.timestamp) {
1080 self.apply_remote_edit(
1081 &edit.version,
1082 &edit.ranges,
1083 &edit.new_text,
1084 edit.timestamp,
1085 );
1086 self.snapshot.version.observe(edit.timestamp);
1087 self.lamport_clock.observe(edit.timestamp);
1088 self.resolve_edit(edit.timestamp);
1089 }
1090 }
1091 Operation::Undo(undo) => {
1092 if !self.version.observed(undo.timestamp) {
1093 self.apply_undo(&undo);
1094 self.snapshot.version.observe(undo.timestamp);
1095 self.lamport_clock.observe(undo.timestamp);
1096 }
1097 }
1098 }
1099 self.wait_for_version_txs.retain_mut(|(version, tx)| {
1100 if self.snapshot.version().observed_all(version) {
1101 tx.try_send(()).ok();
1102 false
1103 } else {
1104 true
1105 }
1106 });
1107 }
1108
1109 fn apply_remote_edit(
1110 &mut self,
1111 version: &clock::Global,
1112 ranges: &[Range<FullOffset>],
1113 new_text: &[Arc<str>],
1114 timestamp: clock::Lamport,
1115 ) {
1116 if ranges.is_empty() {
1117 return;
1118 }
1119
1120 let edits = ranges.iter().zip(new_text.iter());
1121 let mut edits_patch = Patch::default();
1122 let mut insertion_slices = Vec::new();
1123 let cx = Some(version.clone());
1124 let mut new_insertions = Vec::new();
1125 let mut insertion_offset: u32 = 0;
1126 let mut new_ropes =
1127 RopeBuilder::new(self.visible_text.cursor(0), self.deleted_text.cursor(0));
1128 let mut old_fragments = self
1129 .fragments
1130 .cursor::<Dimensions<VersionedFullOffset, usize>>(&cx);
1131 let mut new_fragments = FragmentBuilder::new(
1132 old_fragments.slice(&VersionedFullOffset::Offset(ranges[0].start), Bias::Left),
1133 );
1134 new_ropes.append(new_fragments.summary().text);
1135
1136 let mut fragment_start = old_fragments.start().0.full_offset();
1137 for (range, new_text) in edits {
1138 let fragment_end = old_fragments.end().0.full_offset();
1139
1140 // If the current fragment ends before this range, then jump ahead to the first fragment
1141 // that extends past the start of this range, reusing any intervening fragments.
1142 if fragment_end < range.start {
1143 // If the current fragment has been partially consumed, then consume the rest of it
1144 // and advance to the next fragment before slicing.
1145 if fragment_start > old_fragments.start().0.full_offset() {
1146 if fragment_end > fragment_start {
1147 let mut suffix = old_fragments.item().unwrap().clone();
1148 suffix.len = (fragment_end.0 - fragment_start.0) as u32;
1149 suffix.insertion_offset +=
1150 (fragment_start - old_fragments.start().0.full_offset()) as u32;
1151 new_insertions.push(InsertionFragment::insert_new(&suffix));
1152 new_ropes.push_fragment(&suffix, suffix.visible);
1153 new_fragments.push(suffix, &None);
1154 }
1155 old_fragments.next();
1156 }
1157
1158 let slice =
1159 old_fragments.slice(&VersionedFullOffset::Offset(range.start), Bias::Left);
1160 new_ropes.append(slice.summary().text);
1161 new_fragments.append(slice, &None);
1162 fragment_start = old_fragments.start().0.full_offset();
1163 }
1164
1165 // If we are at the end of a non-concurrent fragment, advance to the next one.
1166 let fragment_end = old_fragments.end().0.full_offset();
1167 if fragment_end == range.start && fragment_end > fragment_start {
1168 let mut fragment = old_fragments.item().unwrap().clone();
1169 fragment.len = (fragment_end.0 - fragment_start.0) as u32;
1170 fragment.insertion_offset +=
1171 (fragment_start - old_fragments.start().0.full_offset()) as u32;
1172 new_insertions.push(InsertionFragment::insert_new(&fragment));
1173 new_ropes.push_fragment(&fragment, fragment.visible);
1174 new_fragments.push(fragment, &None);
1175 old_fragments.next();
1176 fragment_start = old_fragments.start().0.full_offset();
1177 }
1178
1179 // Skip over insertions that are concurrent to this edit, but have a lower lamport
1180 // timestamp.
1181 while let Some(fragment) = old_fragments.item() {
1182 if fragment_start == range.start && fragment.timestamp > timestamp {
1183 new_ropes.push_fragment(fragment, fragment.visible);
1184 new_fragments.push(fragment.clone(), &None);
1185 old_fragments.next();
1186 debug_assert_eq!(fragment_start, range.start);
1187 } else {
1188 break;
1189 }
1190 }
1191 debug_assert!(fragment_start <= range.start);
1192
1193 // Preserve any portion of the current fragment that precedes this range.
1194 if fragment_start < range.start {
1195 let mut prefix = old_fragments.item().unwrap().clone();
1196 prefix.len = (range.start.0 - fragment_start.0) as u32;
1197 prefix.insertion_offset +=
1198 (fragment_start - old_fragments.start().0.full_offset()) as u32;
1199 prefix.id = Locator::between(&new_fragments.summary().max_id, &prefix.id);
1200 new_insertions.push(InsertionFragment::insert_new(&prefix));
1201 fragment_start = range.start;
1202 new_ropes.push_fragment(&prefix, prefix.visible);
1203 new_fragments.push(prefix, &None);
1204 }
1205
1206 // Insert the new text before any existing fragments within the range.
1207 if !new_text.is_empty() {
1208 let mut old_start = old_fragments.start().1;
1209 if old_fragments.item().is_some_and(|f| f.visible) {
1210 old_start += fragment_start.0 - old_fragments.start().0.full_offset().0;
1211 }
1212 let new_start = new_fragments.summary().text.visible;
1213 let next_fragment_id = old_fragments
1214 .item()
1215 .map_or(Locator::max_ref(), |old_fragment| &old_fragment.id);
1216 Self::push_fragments_for_insertion(
1217 new_text,
1218 timestamp,
1219 &mut insertion_offset,
1220 &mut new_fragments,
1221 &mut new_insertions,
1222 &mut insertion_slices,
1223 &mut new_ropes,
1224 next_fragment_id,
1225 timestamp,
1226 );
1227 edits_patch.push(Edit {
1228 old: old_start..old_start,
1229 new: new_start..new_start + new_text.len(),
1230 });
1231 }
1232
1233 // Advance through every fragment that intersects this range, marking the intersecting
1234 // portions as deleted.
1235 while fragment_start < range.end {
1236 let fragment = old_fragments.item().unwrap();
1237 let fragment_end = old_fragments.end().0.full_offset();
1238 let mut intersection = fragment.clone();
1239 let intersection_end = cmp::min(range.end, fragment_end);
1240 if version.observed(fragment.timestamp) {
1241 intersection.len = (intersection_end.0 - fragment_start.0) as u32;
1242 intersection.insertion_offset +=
1243 (fragment_start - old_fragments.start().0.full_offset()) as u32;
1244 intersection.id =
1245 Locator::between(&new_fragments.summary().max_id, &intersection.id);
1246 if fragment.was_visible(version, &self.undo_map) {
1247 intersection.deletions.push(timestamp);
1248 intersection.visible = false;
1249 insertion_slices
1250 .push(InsertionSlice::from_fragment(timestamp, &intersection));
1251 }
1252 }
1253 if intersection.len > 0 {
1254 if fragment.visible && !intersection.visible {
1255 let old_start = old_fragments.start().1
1256 + (fragment_start.0 - old_fragments.start().0.full_offset().0);
1257 let new_start = new_fragments.summary().text.visible;
1258 edits_patch.push(Edit {
1259 old: old_start..old_start + intersection.len as usize,
1260 new: new_start..new_start,
1261 });
1262 }
1263 new_insertions.push(InsertionFragment::insert_new(&intersection));
1264 new_ropes.push_fragment(&intersection, fragment.visible);
1265 new_fragments.push(intersection, &None);
1266 fragment_start = intersection_end;
1267 }
1268 if fragment_end <= range.end {
1269 old_fragments.next();
1270 }
1271 }
1272 }
1273
1274 // If the current fragment has been partially consumed, then consume the rest of it
1275 // and advance to the next fragment before slicing.
1276 if fragment_start > old_fragments.start().0.full_offset() {
1277 let fragment_end = old_fragments.end().0.full_offset();
1278 if fragment_end > fragment_start {
1279 let mut suffix = old_fragments.item().unwrap().clone();
1280 suffix.len = (fragment_end.0 - fragment_start.0) as u32;
1281 suffix.insertion_offset +=
1282 (fragment_start - old_fragments.start().0.full_offset()) as u32;
1283 new_insertions.push(InsertionFragment::insert_new(&suffix));
1284 new_ropes.push_fragment(&suffix, suffix.visible);
1285 new_fragments.push(suffix, &None);
1286 }
1287 old_fragments.next();
1288 }
1289
1290 let suffix = old_fragments.suffix();
1291 new_ropes.append(suffix.summary().text);
1292 new_fragments.append(suffix, &None);
1293 let (visible_text, deleted_text) = new_ropes.finish();
1294 drop(old_fragments);
1295
1296 self.snapshot.fragments = new_fragments.to_sum_tree(&None);
1297 self.snapshot.visible_text = visible_text;
1298 self.snapshot.deleted_text = deleted_text;
1299 self.snapshot.insertions.edit(new_insertions, ());
1300 self.snapshot.insertion_slices.extend(insertion_slices);
1301 self.subscriptions.publish_mut(&edits_patch)
1302 }
1303
1304 fn push_fragments_for_insertion(
1305 new_text: &str,
1306 timestamp: clock::Lamport,
1307 insertion_offset: &mut u32,
1308 new_fragments: &mut FragmentBuilder,
1309 new_insertions: &mut Vec<sum_tree::Edit<InsertionFragment>>,
1310 insertion_slices: &mut Vec<InsertionSlice>,
1311 new_ropes: &mut RopeBuilder,
1312 next_fragment_id: &Locator,
1313 edit_timestamp: clock::Lamport,
1314 ) {
1315 let mut text_offset = 0;
1316 while text_offset < new_text.len() {
1317 let target_end = new_text.len().min(text_offset + MAX_INSERTION_LEN);
1318 let chunk_end = if target_end == new_text.len() {
1319 target_end
1320 } else {
1321 new_text.floor_char_boundary(target_end)
1322 };
1323 if chunk_end == text_offset {
1324 break;
1325 }
1326 let chunk_len = chunk_end - text_offset;
1327
1328 let fragment = Fragment {
1329 id: Locator::between(&new_fragments.summary().max_id, next_fragment_id),
1330 timestamp,
1331 insertion_offset: *insertion_offset,
1332 len: chunk_len as u32,
1333 deletions: Default::default(),
1334 max_undos: Default::default(),
1335 visible: true,
1336 };
1337 insertion_slices.push(InsertionSlice::from_fragment(edit_timestamp, &fragment));
1338 new_insertions.push(InsertionFragment::insert_new(&fragment));
1339 new_fragments.push(fragment, &None);
1340
1341 *insertion_offset += chunk_len as u32;
1342 text_offset = chunk_end;
1343 }
1344 new_ropes.push_str(new_text);
1345 }
1346
1347 fn fragment_ids_for_edits<'a>(
1348 &'a self,
1349 edit_ids: impl Iterator<Item = &'a clock::Lamport>,
1350 ) -> Vec<&'a Locator> {
1351 // Get all of the insertion slices changed by the given edits.
1352 let mut insertion_slices = Vec::new();
1353 for edit_id in edit_ids {
1354 let insertion_slice = InsertionSlice {
1355 edit_id_value: edit_id.value,
1356 edit_id_replica_id: edit_id.replica_id,
1357 insertion_id_value: Lamport::MIN.value,
1358 insertion_id_replica_id: Lamport::MIN.replica_id,
1359 range: 0..0,
1360 };
1361 let slices = self
1362 .snapshot
1363 .insertion_slices
1364 .iter_from(&insertion_slice)
1365 .take_while(|slice| {
1366 Lamport {
1367 value: slice.edit_id_value,
1368 replica_id: slice.edit_id_replica_id,
1369 } == *edit_id
1370 });
1371 insertion_slices.extend(slices)
1372 }
1373 insertion_slices.sort_unstable_by_key(|s| {
1374 (
1375 Lamport {
1376 value: s.insertion_id_value,
1377 replica_id: s.insertion_id_replica_id,
1378 },
1379 s.range.start,
1380 Reverse(s.range.end),
1381 )
1382 });
1383
1384 // Get all of the fragments corresponding to these insertion slices.
1385 let mut fragment_ids = Vec::new();
1386 let mut insertions_cursor = self.insertions.cursor::<InsertionFragmentKey>(());
1387 for insertion_slice in &insertion_slices {
1388 let insertion_id = Lamport {
1389 value: insertion_slice.insertion_id_value,
1390 replica_id: insertion_slice.insertion_id_replica_id,
1391 };
1392 if insertion_id != insertions_cursor.start().timestamp
1393 || insertion_slice.range.start > insertions_cursor.start().split_offset
1394 {
1395 insertions_cursor.seek_forward(
1396 &InsertionFragmentKey {
1397 timestamp: insertion_id,
1398 split_offset: insertion_slice.range.start,
1399 },
1400 Bias::Left,
1401 );
1402 }
1403 while let Some(item) = insertions_cursor.item() {
1404 if item.timestamp != insertion_id || item.split_offset >= insertion_slice.range.end
1405 {
1406 break;
1407 }
1408 fragment_ids.push(&item.fragment_id);
1409 insertions_cursor.next();
1410 }
1411 }
1412 fragment_ids.sort_unstable();
1413 fragment_ids
1414 }
1415
1416 fn apply_undo(&mut self, undo: &UndoOperation) {
1417 self.snapshot.undo_map.insert(undo);
1418
1419 let mut edits = Patch::default();
1420 let mut old_fragments = self
1421 .fragments
1422 .cursor::<Dimensions<Option<&Locator>, usize>>(&None);
1423 let mut new_fragments = SumTree::new(&None);
1424 let mut new_ropes =
1425 RopeBuilder::new(self.visible_text.cursor(0), self.deleted_text.cursor(0));
1426
1427 for fragment_id in self.fragment_ids_for_edits(undo.counts.keys()) {
1428 let preceding_fragments = old_fragments.slice(&Some(fragment_id), Bias::Left);
1429 new_ropes.append(preceding_fragments.summary().text);
1430 new_fragments.append(preceding_fragments, &None);
1431
1432 if let Some(fragment) = old_fragments.item() {
1433 let mut fragment = fragment.clone();
1434 let fragment_was_visible = fragment.visible;
1435
1436 fragment.visible = fragment.is_visible(&self.undo_map);
1437 fragment.max_undos.observe(undo.timestamp);
1438
1439 let old_start = old_fragments.start().1;
1440 let new_start = new_fragments.summary().text.visible;
1441 if fragment_was_visible && !fragment.visible {
1442 edits.push(Edit {
1443 old: old_start..old_start + fragment.len as usize,
1444 new: new_start..new_start,
1445 });
1446 } else if !fragment_was_visible && fragment.visible {
1447 edits.push(Edit {
1448 old: old_start..old_start,
1449 new: new_start..new_start + fragment.len as usize,
1450 });
1451 }
1452 new_ropes.push_fragment(&fragment, fragment_was_visible);
1453 new_fragments.push(fragment, &None);
1454
1455 old_fragments.next();
1456 }
1457 }
1458
1459 let suffix = old_fragments.suffix();
1460 new_ropes.append(suffix.summary().text);
1461 new_fragments.append(suffix, &None);
1462
1463 drop(old_fragments);
1464 let (visible_text, deleted_text) = new_ropes.finish();
1465 self.snapshot.fragments = new_fragments;
1466 self.snapshot.visible_text = visible_text;
1467 self.snapshot.deleted_text = deleted_text;
1468 self.subscriptions.publish_mut(&edits);
1469 }
1470
1471 fn flush_deferred_ops(&mut self) {
1472 self.deferred_replicas.clear();
1473 let mut deferred_ops = Vec::new();
1474 for op in self.deferred_ops.drain().iter().cloned() {
1475 if self.can_apply_op(&op) {
1476 self.apply_op(op);
1477 } else {
1478 self.deferred_replicas.insert(op.replica_id());
1479 deferred_ops.push(op);
1480 }
1481 }
1482 self.deferred_ops.insert(deferred_ops);
1483 }
1484
1485 fn can_apply_op(&self, op: &Operation) -> bool {
1486 if self.deferred_replicas.contains(&op.replica_id()) {
1487 false
1488 } else {
1489 self.version.observed_all(match op {
1490 Operation::Edit(edit) => &edit.version,
1491 Operation::Undo(undo) => &undo.version,
1492 })
1493 }
1494 }
1495
1496 pub fn has_deferred_ops(&self) -> bool {
1497 !self.deferred_ops.is_empty()
1498 }
1499
1500 pub fn peek_undo_stack(&self) -> Option<&HistoryEntry> {
1501 self.history.undo_stack.last()
1502 }
1503
1504 pub fn peek_redo_stack(&self) -> Option<&HistoryEntry> {
1505 self.history.redo_stack.last()
1506 }
1507
1508 pub fn start_transaction(&mut self) -> Option<TransactionId> {
1509 self.start_transaction_at(Instant::now())
1510 }
1511
1512 pub fn start_transaction_at(&mut self, now: Instant) -> Option<TransactionId> {
1513 self.history
1514 .start_transaction(self.version.clone(), now, &mut self.lamport_clock)
1515 }
1516
1517 pub fn end_transaction(&mut self) -> Option<(TransactionId, clock::Global)> {
1518 self.end_transaction_at(Instant::now())
1519 }
1520
1521 pub fn end_transaction_at(&mut self, now: Instant) -> Option<(TransactionId, clock::Global)> {
1522 if let Some(entry) = self.history.end_transaction(now) {
1523 let since = entry.transaction.start.clone();
1524 let id = self.history.group().unwrap();
1525 Some((id, since))
1526 } else {
1527 None
1528 }
1529 }
1530
1531 pub fn finalize_last_transaction(&mut self) -> Option<&Transaction> {
1532 self.history.finalize_last_transaction()
1533 }
1534
1535 pub fn group_until_transaction(&mut self, transaction_id: TransactionId) {
1536 self.history.group_until(transaction_id);
1537 }
1538
1539 pub fn base_text(&self) -> &Rope {
1540 &self.history.base_text
1541 }
1542
1543 pub fn operations(&self) -> &TreeMap<clock::Lamport, Operation> {
1544 &self.history.operations
1545 }
1546
1547 pub fn undo(&mut self) -> Option<(TransactionId, Operation)> {
1548 if let Some(entry) = self.history.pop_undo() {
1549 let transaction = entry.transaction.clone();
1550 let transaction_id = transaction.id;
1551 let op = self.undo_or_redo(transaction);
1552 Some((transaction_id, op))
1553 } else {
1554 None
1555 }
1556 }
1557
1558 pub fn undo_transaction(&mut self, transaction_id: TransactionId) -> Option<Operation> {
1559 let transaction = self
1560 .history
1561 .remove_from_undo(transaction_id)?
1562 .transaction
1563 .clone();
1564 Some(self.undo_or_redo(transaction))
1565 }
1566
1567 pub fn undo_to_transaction(&mut self, transaction_id: TransactionId) -> Vec<Operation> {
1568 let transactions = self
1569 .history
1570 .remove_from_undo_until(transaction_id)
1571 .iter()
1572 .map(|entry| entry.transaction.clone())
1573 .collect::<Vec<_>>();
1574
1575 transactions
1576 .into_iter()
1577 .map(|transaction| self.undo_or_redo(transaction))
1578 .collect()
1579 }
1580
1581 pub fn forget_transaction(&mut self, transaction_id: TransactionId) -> Option<Transaction> {
1582 self.history.forget(transaction_id)
1583 }
1584
1585 pub fn get_transaction(&self, transaction_id: TransactionId) -> Option<&Transaction> {
1586 self.history.transaction(transaction_id)
1587 }
1588
1589 pub fn merge_transactions(&mut self, transaction: TransactionId, destination: TransactionId) {
1590 self.history.merge_transactions(transaction, destination);
1591 }
1592
1593 pub fn redo(&mut self) -> Option<(TransactionId, Operation)> {
1594 if let Some(entry) = self.history.pop_redo() {
1595 let transaction = entry.transaction.clone();
1596 let transaction_id = transaction.id;
1597 let op = self.undo_or_redo(transaction);
1598 Some((transaction_id, op))
1599 } else {
1600 None
1601 }
1602 }
1603
1604 pub fn redo_to_transaction(&mut self, transaction_id: TransactionId) -> Vec<Operation> {
1605 let transactions = self
1606 .history
1607 .remove_from_redo(transaction_id)
1608 .iter()
1609 .map(|entry| entry.transaction.clone())
1610 .collect::<Vec<_>>();
1611
1612 transactions
1613 .into_iter()
1614 .map(|transaction| self.undo_or_redo(transaction))
1615 .collect()
1616 }
1617
1618 fn undo_or_redo(&mut self, transaction: Transaction) -> Operation {
1619 let mut counts = HashMap::default();
1620 for edit_id in transaction.edit_ids {
1621 counts.insert(edit_id, self.undo_map.undo_count(edit_id).saturating_add(1));
1622 }
1623
1624 let operation = self.undo_operations(counts);
1625 self.history.push(operation.clone());
1626 operation
1627 }
1628
1629 pub fn undo_operations(&mut self, counts: HashMap<clock::Lamport, u32>) -> Operation {
1630 let timestamp = self.lamport_clock.tick();
1631 let version = self.version();
1632 self.snapshot.version.observe(timestamp);
1633 let undo = UndoOperation {
1634 timestamp,
1635 version,
1636 counts,
1637 };
1638 self.apply_undo(&undo);
1639 Operation::Undo(undo)
1640 }
1641
1642 pub fn push_transaction(&mut self, transaction: Transaction, now: Instant) {
1643 self.history.push_transaction(transaction, now);
1644 }
1645
1646 /// Differs from `push_transaction` in that it does not clear the redo stack.
1647 /// The caller responsible for
1648 /// Differs from `push_transaction` in that it does not clear the redo
1649 /// stack. Intended to be used to create a parent transaction to merge
1650 /// potential child transactions into.
1651 ///
1652 /// The caller is responsible for removing it from the undo history using
1653 /// `forget_transaction` if no edits are merged into it. Otherwise, if edits
1654 /// are merged into this transaction, the caller is responsible for ensuring
1655 /// the redo stack is cleared. The easiest way to ensure the redo stack is
1656 /// cleared is to create transactions with the usual `start_transaction` and
1657 /// `end_transaction` methods and merging the resulting transactions into
1658 /// the transaction created by this method
1659 pub fn push_empty_transaction(&mut self, now: Instant) -> TransactionId {
1660 self.history
1661 .push_empty_transaction(self.version.clone(), now, &mut self.lamport_clock)
1662 }
1663
1664 pub fn edited_ranges_for_transaction_id<D>(
1665 &self,
1666 transaction_id: TransactionId,
1667 ) -> impl '_ + Iterator<Item = Range<D>>
1668 where
1669 D: TextDimension,
1670 {
1671 self.history
1672 .transaction(transaction_id)
1673 .into_iter()
1674 .flat_map(|transaction| self.edited_ranges_for_transaction(transaction))
1675 }
1676
1677 pub fn edited_ranges_for_edit_ids<'a, D>(
1678 &'a self,
1679 edit_ids: impl IntoIterator<Item = &'a clock::Lamport>,
1680 ) -> impl 'a + Iterator<Item = Range<D>>
1681 where
1682 D: TextDimension,
1683 {
1684 // get fragment ranges
1685 let mut cursor = self
1686 .fragments
1687 .cursor::<Dimensions<Option<&Locator>, usize>>(&None);
1688 let offset_ranges = self
1689 .fragment_ids_for_edits(edit_ids.into_iter())
1690 .into_iter()
1691 .filter_map(move |fragment_id| {
1692 cursor.seek_forward(&Some(fragment_id), Bias::Left);
1693 let fragment = cursor.item()?;
1694 let start_offset = cursor.start().1;
1695 let end_offset = start_offset
1696 + if fragment.visible {
1697 fragment.len as usize
1698 } else {
1699 0
1700 };
1701 Some(start_offset..end_offset)
1702 });
1703
1704 // combine adjacent ranges
1705 let mut prev_range: Option<Range<usize>> = None;
1706 let disjoint_ranges = offset_ranges
1707 .map(Some)
1708 .chain([None])
1709 .filter_map(move |range| {
1710 if let Some((range, prev_range)) = range.as_ref().zip(prev_range.as_mut())
1711 && prev_range.end == range.start
1712 {
1713 prev_range.end = range.end;
1714 return None;
1715 }
1716 let result = prev_range.clone();
1717 prev_range = range;
1718 result
1719 });
1720
1721 // convert to the desired text dimension.
1722 let mut position = D::zero(());
1723 let mut rope_cursor = self.visible_text.cursor(0);
1724 disjoint_ranges.map(move |range| {
1725 position.add_assign(&rope_cursor.summary(range.start));
1726 let start = position;
1727 position.add_assign(&rope_cursor.summary(range.end));
1728 let end = position;
1729 start..end
1730 })
1731 }
1732
1733 pub fn edited_ranges_for_transaction<'a, D>(
1734 &'a self,
1735 transaction: &'a Transaction,
1736 ) -> impl 'a + Iterator<Item = Range<D>>
1737 where
1738 D: TextDimension,
1739 {
1740 self.edited_ranges_for_edit_ids(&transaction.edit_ids)
1741 }
1742
1743 pub fn subscribe(&mut self) -> Subscription<usize> {
1744 self.subscriptions.subscribe()
1745 }
1746
1747 pub fn wait_for_edits<It: IntoIterator<Item = clock::Lamport>>(
1748 &mut self,
1749 edit_ids: It,
1750 ) -> impl 'static + Future<Output = Result<()>> + use<It> {
1751 let mut futures = Vec::new();
1752 for edit_id in edit_ids {
1753 if !self.version.observed(edit_id) {
1754 let (tx, rx) = oneshot::channel();
1755 self.edit_id_resolvers.entry(edit_id).or_default().push(tx);
1756 futures.push(rx);
1757 }
1758 }
1759
1760 async move {
1761 for mut future in futures {
1762 if future.recv().await.is_none() {
1763 anyhow::bail!("gave up waiting for edits");
1764 }
1765 }
1766 Ok(())
1767 }
1768 }
1769
1770 pub fn wait_for_anchors<It: IntoIterator<Item = Anchor>>(
1771 &mut self,
1772 anchors: It,
1773 ) -> impl 'static + Future<Output = Result<()>> + use<It> {
1774 let mut futures = Vec::new();
1775 for anchor in anchors {
1776 if !self.version.observed(anchor.timestamp()) && !anchor.is_max() && !anchor.is_min() {
1777 let (tx, rx) = oneshot::channel();
1778 self.edit_id_resolvers
1779 .entry(anchor.timestamp())
1780 .or_default()
1781 .push(tx);
1782 futures.push(rx);
1783 }
1784 }
1785
1786 async move {
1787 for mut future in futures {
1788 if future.recv().await.is_none() {
1789 anyhow::bail!("gave up waiting for anchors");
1790 }
1791 }
1792 Ok(())
1793 }
1794 }
1795
1796 pub fn wait_for_version(
1797 &mut self,
1798 version: clock::Global,
1799 ) -> impl Future<Output = Result<()>> + use<> {
1800 let mut rx = None;
1801 if !self.snapshot.version.observed_all(&version) {
1802 let channel = oneshot::channel();
1803 self.wait_for_version_txs.push((version, channel.0));
1804 rx = Some(channel.1);
1805 }
1806 async move {
1807 if let Some(mut rx) = rx
1808 && rx.recv().await.is_none()
1809 {
1810 anyhow::bail!("gave up waiting for version");
1811 }
1812 Ok(())
1813 }
1814 }
1815
1816 pub fn give_up_waiting(&mut self) {
1817 self.edit_id_resolvers.clear();
1818 self.wait_for_version_txs.clear();
1819 }
1820
1821 fn resolve_edit(&mut self, edit_id: clock::Lamport) {
1822 for mut tx in self
1823 .edit_id_resolvers
1824 .remove(&edit_id)
1825 .into_iter()
1826 .flatten()
1827 {
1828 tx.try_send(()).ok();
1829 }
1830 }
1831
1832 pub fn set_group_interval(&mut self, group_interval: Duration) {
1833 self.history.group_interval = group_interval;
1834 }
1835}
1836
1837#[cfg(any(test, feature = "test-support"))]
1838impl Buffer {
1839 #[track_caller]
1840 pub fn edit_via_marked_text(&mut self, marked_string: &str) {
1841 let edits = self.edits_for_marked_text(marked_string);
1842 self.edit(edits);
1843 }
1844
1845 #[track_caller]
1846 pub fn edits_for_marked_text(&self, marked_string: &str) -> Vec<(Range<usize>, String)> {
1847 let old_text = self.text();
1848 let (new_text, mut ranges) = util::test::marked_text_ranges(marked_string, false);
1849 if ranges.is_empty() {
1850 ranges.push(0..new_text.len());
1851 }
1852
1853 assert_eq!(
1854 old_text[..ranges[0].start],
1855 new_text[..ranges[0].start],
1856 "invalid edit"
1857 );
1858
1859 let mut delta = 0;
1860 let mut edits = Vec::new();
1861 let mut ranges = ranges.into_iter().peekable();
1862
1863 while let Some(inserted_range) = ranges.next() {
1864 let new_start = inserted_range.start;
1865 let old_start = (new_start as isize - delta) as usize;
1866
1867 let following_text = if let Some(next_range) = ranges.peek() {
1868 &new_text[inserted_range.end..next_range.start]
1869 } else {
1870 &new_text[inserted_range.end..]
1871 };
1872
1873 let inserted_len = inserted_range.len();
1874 let deleted_len = old_text[old_start..]
1875 .find(following_text)
1876 .expect("invalid edit");
1877
1878 let old_range = old_start..old_start + deleted_len;
1879 edits.push((old_range, new_text[inserted_range].to_string()));
1880 delta += inserted_len as isize - deleted_len as isize;
1881 }
1882
1883 assert_eq!(
1884 old_text.len() as isize + delta,
1885 new_text.len() as isize,
1886 "invalid edit"
1887 );
1888
1889 edits
1890 }
1891
1892 pub fn check_invariants(&self) {
1893 // Ensure every fragment is ordered by locator in the fragment tree and corresponds
1894 // to an insertion fragment in the insertions tree.
1895 let mut prev_fragment_id = Locator::min();
1896 for fragment in self.snapshot.fragments.items(&None) {
1897 assert!(fragment.id > prev_fragment_id);
1898 prev_fragment_id = fragment.id.clone();
1899
1900 let insertion_fragment = self
1901 .snapshot
1902 .insertions
1903 .get(
1904 &InsertionFragmentKey {
1905 timestamp: fragment.timestamp,
1906 split_offset: fragment.insertion_offset,
1907 },
1908 (),
1909 )
1910 .unwrap();
1911 assert_eq!(
1912 insertion_fragment.fragment_id, fragment.id,
1913 "fragment: {:?}\ninsertion: {:?}",
1914 fragment, insertion_fragment
1915 );
1916 }
1917
1918 let mut cursor = self.snapshot.fragments.cursor::<Option<&Locator>>(&None);
1919 for insertion_fragment in self.snapshot.insertions.cursor::<()>(()) {
1920 cursor.seek(&Some(&insertion_fragment.fragment_id), Bias::Left);
1921 let fragment = cursor.item().unwrap();
1922 assert_eq!(insertion_fragment.fragment_id, fragment.id);
1923 assert_eq!(insertion_fragment.split_offset, fragment.insertion_offset);
1924 }
1925
1926 let fragment_summary = self.snapshot.fragments.summary();
1927 assert_eq!(
1928 fragment_summary.text.visible,
1929 self.snapshot.visible_text.len()
1930 );
1931 assert_eq!(
1932 fragment_summary.text.deleted,
1933 self.snapshot.deleted_text.len()
1934 );
1935
1936 assert!(!self.text().contains("\r\n"));
1937 }
1938
1939 pub fn random_byte_range(&self, start_offset: usize, rng: &mut impl rand::Rng) -> Range<usize> {
1940 let end = self.clip_offset(rng.random_range(start_offset..=self.len()), Bias::Right);
1941 let start = self.clip_offset(rng.random_range(start_offset..=end), Bias::Right);
1942 start..end
1943 }
1944
1945 pub fn get_random_edits<T>(
1946 &self,
1947 rng: &mut T,
1948 edit_count: usize,
1949 ) -> Vec<(Range<usize>, Arc<str>)>
1950 where
1951 T: rand::Rng,
1952 {
1953 let mut edits: Vec<(Range<usize>, Arc<str>)> = Vec::new();
1954 let mut last_end = None;
1955 for _ in 0..edit_count {
1956 if last_end.is_some_and(|last_end| last_end >= self.len()) {
1957 break;
1958 }
1959 let new_start = last_end.map_or(0, |last_end| last_end + 1);
1960 let range = self.random_byte_range(new_start, rng);
1961 last_end = Some(range.end);
1962
1963 let new_text_len = rng.random_range(0..10);
1964 let new_text: String = RandomCharIter::new(&mut *rng).take(new_text_len).collect();
1965
1966 edits.push((range, new_text.into()));
1967 }
1968 edits
1969 }
1970
1971 pub fn randomly_edit<T>(
1972 &mut self,
1973 rng: &mut T,
1974 edit_count: usize,
1975 ) -> (Vec<(Range<usize>, Arc<str>)>, Operation)
1976 where
1977 T: rand::Rng,
1978 {
1979 let mut edits = self.get_random_edits(rng, edit_count);
1980 log::info!("mutating buffer {:?} with {:?}", self.replica_id, edits);
1981
1982 let op = self.edit(edits.iter().cloned());
1983 if let Operation::Edit(edit) = &op {
1984 assert_eq!(edits.len(), edit.new_text.len());
1985 for (edit, new_text) in edits.iter_mut().zip(&edit.new_text) {
1986 edit.1 = new_text.clone();
1987 }
1988 } else {
1989 unreachable!()
1990 }
1991
1992 (edits, op)
1993 }
1994
1995 pub fn randomly_undo_redo(&mut self, rng: &mut impl rand::Rng) -> Vec<Operation> {
1996 use rand::prelude::*;
1997
1998 let mut ops = Vec::new();
1999 for _ in 0..rng.random_range(1..=5) {
2000 if let Some(entry) = self.history.undo_stack.choose(rng) {
2001 let transaction = entry.transaction.clone();
2002 log::info!(
2003 "undoing buffer {:?} transaction {:?}",
2004 self.replica_id,
2005 transaction
2006 );
2007 ops.push(self.undo_or_redo(transaction));
2008 }
2009 }
2010 ops
2011 }
2012}
2013
2014impl Deref for Buffer {
2015 type Target = BufferSnapshot;
2016
2017 fn deref(&self) -> &Self::Target {
2018 &self.snapshot
2019 }
2020}
2021
2022impl BufferSnapshot {
2023 pub fn as_rope(&self) -> &Rope {
2024 &self.visible_text
2025 }
2026
2027 pub fn rope_for_version(&self, version: &clock::Global) -> Rope {
2028 let mut rope = Rope::new();
2029
2030 let mut cursor = self
2031 .fragments
2032 .filter::<_, FragmentTextSummary>(&None, move |summary| {
2033 !version.observed_all(&summary.max_version)
2034 });
2035 cursor.next();
2036
2037 let mut visible_cursor = self.visible_text.cursor(0);
2038 let mut deleted_cursor = self.deleted_text.cursor(0);
2039
2040 while let Some(fragment) = cursor.item() {
2041 if cursor.start().visible > visible_cursor.offset() {
2042 let text = visible_cursor.slice(cursor.start().visible);
2043 rope.append(text);
2044 }
2045
2046 if fragment.was_visible(version, &self.undo_map) {
2047 if fragment.visible {
2048 let text = visible_cursor.slice(cursor.end().visible);
2049 rope.append(text);
2050 } else {
2051 deleted_cursor.seek_forward(cursor.start().deleted);
2052 let text = deleted_cursor.slice(cursor.end().deleted);
2053 rope.append(text);
2054 }
2055 } else if fragment.visible {
2056 visible_cursor.seek_forward(cursor.end().visible);
2057 }
2058
2059 cursor.next();
2060 }
2061
2062 if cursor.start().visible > visible_cursor.offset() {
2063 let text = visible_cursor.slice(cursor.start().visible);
2064 rope.append(text);
2065 }
2066
2067 rope
2068 }
2069
2070 pub fn remote_id(&self) -> BufferId {
2071 self.remote_id
2072 }
2073
2074 pub fn replica_id(&self) -> ReplicaId {
2075 self.replica_id
2076 }
2077
2078 pub fn row_count(&self) -> u32 {
2079 self.max_point().row + 1
2080 }
2081
2082 pub fn len(&self) -> usize {
2083 self.visible_text.len()
2084 }
2085
2086 pub fn is_empty(&self) -> bool {
2087 self.len() == 0
2088 }
2089
2090 pub fn chars(&self) -> impl Iterator<Item = char> + '_ {
2091 self.chars_at(0)
2092 }
2093
2094 pub fn chars_for_range<T: ToOffset>(&self, range: Range<T>) -> impl Iterator<Item = char> + '_ {
2095 self.text_for_range(range).flat_map(str::chars)
2096 }
2097
2098 pub fn reversed_chars_for_range<T: ToOffset>(
2099 &self,
2100 range: Range<T>,
2101 ) -> impl Iterator<Item = char> + '_ {
2102 self.reversed_chunks_in_range(range)
2103 .flat_map(|chunk| chunk.chars().rev())
2104 }
2105
2106 pub fn contains_str_at<T>(&self, position: T, needle: &str) -> bool
2107 where
2108 T: ToOffset,
2109 {
2110 let position = position.to_offset(self);
2111 position == self.clip_offset(position, Bias::Left)
2112 && self
2113 .bytes_in_range(position..self.len())
2114 .flatten()
2115 .copied()
2116 .take(needle.len())
2117 .eq(needle.bytes())
2118 }
2119
2120 pub fn common_prefix_at<T>(&self, position: T, needle: &str) -> Range<T>
2121 where
2122 T: ToOffset + TextDimension,
2123 {
2124 let offset = position.to_offset(self);
2125 let common_prefix_len = needle
2126 .char_indices()
2127 .map(|(index, _)| index)
2128 .chain([needle.len()])
2129 .take_while(|&len| len <= offset)
2130 .filter(|&len| {
2131 let left = self
2132 .chars_for_range(offset - len..offset)
2133 .flat_map(char::to_lowercase);
2134 let right = needle[..len].chars().flat_map(char::to_lowercase);
2135 left.eq(right)
2136 })
2137 .last()
2138 .unwrap_or(0);
2139 let start_offset = offset - common_prefix_len;
2140 let start = self.text_summary_for_range(0..start_offset);
2141 start..position
2142 }
2143
2144 pub fn text(&self) -> String {
2145 self.visible_text.to_string()
2146 }
2147
2148 pub fn line_ending(&self) -> LineEnding {
2149 self.line_ending
2150 }
2151
2152 pub fn deleted_text(&self) -> String {
2153 self.deleted_text.to_string()
2154 }
2155
2156 pub fn text_summary(&self) -> TextSummary {
2157 self.visible_text.summary()
2158 }
2159
2160 pub fn max_point(&self) -> Point {
2161 self.visible_text.max_point()
2162 }
2163
2164 pub fn max_point_utf16(&self) -> PointUtf16 {
2165 self.visible_text.max_point_utf16()
2166 }
2167
2168 pub fn point_to_offset(&self, point: Point) -> usize {
2169 self.visible_text.point_to_offset(point)
2170 }
2171
2172 pub fn point_to_offset_utf16(&self, point: Point) -> OffsetUtf16 {
2173 self.visible_text.point_to_offset_utf16(point)
2174 }
2175
2176 pub fn point_utf16_to_offset_utf16(&self, point: PointUtf16) -> OffsetUtf16 {
2177 self.visible_text.point_utf16_to_offset_utf16(point)
2178 }
2179
2180 pub fn point_utf16_to_offset(&self, point: PointUtf16) -> usize {
2181 self.visible_text.point_utf16_to_offset(point)
2182 }
2183
2184 pub fn unclipped_point_utf16_to_offset(&self, point: Unclipped<PointUtf16>) -> usize {
2185 self.visible_text.unclipped_point_utf16_to_offset(point)
2186 }
2187
2188 pub fn unclipped_point_utf16_to_point(&self, point: Unclipped<PointUtf16>) -> Point {
2189 self.visible_text.unclipped_point_utf16_to_point(point)
2190 }
2191
2192 pub fn offset_utf16_to_offset(&self, offset: OffsetUtf16) -> usize {
2193 self.visible_text.offset_utf16_to_offset(offset)
2194 }
2195
2196 pub fn offset_to_offset_utf16(&self, offset: usize) -> OffsetUtf16 {
2197 self.visible_text.offset_to_offset_utf16(offset)
2198 }
2199
2200 pub fn offset_to_point(&self, offset: usize) -> Point {
2201 self.visible_text.offset_to_point(offset)
2202 }
2203
2204 pub fn offset_to_point_utf16(&self, offset: usize) -> PointUtf16 {
2205 self.visible_text.offset_to_point_utf16(offset)
2206 }
2207
2208 pub fn point_to_point_utf16(&self, point: Point) -> PointUtf16 {
2209 self.visible_text.point_to_point_utf16(point)
2210 }
2211
2212 pub fn point_utf16_to_point(&self, point: PointUtf16) -> Point {
2213 self.visible_text.point_utf16_to_point(point)
2214 }
2215
2216 pub fn version(&self) -> &clock::Global {
2217 &self.version
2218 }
2219
2220 pub fn chars_at<T: ToOffset>(&self, position: T) -> impl Iterator<Item = char> + '_ {
2221 let offset = position.to_offset(self);
2222 self.visible_text.chars_at(offset)
2223 }
2224
2225 pub fn reversed_chars_at<T: ToOffset>(&self, position: T) -> impl Iterator<Item = char> + '_ {
2226 let offset = position.to_offset(self);
2227 self.visible_text.reversed_chars_at(offset)
2228 }
2229
2230 pub fn reversed_chunks_in_range<T: ToOffset>(&self, range: Range<T>) -> rope::Chunks<'_> {
2231 let range = range.start.to_offset(self)..range.end.to_offset(self);
2232 self.visible_text.reversed_chunks_in_range(range)
2233 }
2234
2235 pub fn bytes_in_range<T: ToOffset>(&self, range: Range<T>) -> rope::Bytes<'_> {
2236 let start = range.start.to_offset(self);
2237 let end = range.end.to_offset(self);
2238 self.visible_text.bytes_in_range(start..end)
2239 }
2240
2241 pub fn reversed_bytes_in_range<T: ToOffset>(&self, range: Range<T>) -> rope::Bytes<'_> {
2242 let start = range.start.to_offset(self);
2243 let end = range.end.to_offset(self);
2244 self.visible_text.reversed_bytes_in_range(start..end)
2245 }
2246
2247 pub fn text_for_range<T: ToOffset>(&self, range: Range<T>) -> Chunks<'_> {
2248 let start = range.start.to_offset(self);
2249 let end = range.end.to_offset(self);
2250 self.visible_text.chunks_in_range(start..end)
2251 }
2252
2253 pub fn line_len(&self, row: u32) -> u32 {
2254 let row_start_offset = Point::new(row, 0).to_offset(self);
2255 let row_end_offset = if row >= self.max_point().row {
2256 self.len()
2257 } else {
2258 Point::new(row + 1, 0).to_previous_offset(self)
2259 };
2260 (row_end_offset - row_start_offset) as u32
2261 }
2262
2263 /// A function to convert character offsets from e.g. user's `go.mod:22:33` input into byte-offset Point columns.
2264 pub fn point_from_external_input(&self, row: u32, characters: u32) -> Point {
2265 const MAX_BYTES_IN_UTF_8: u32 = 4;
2266
2267 let row = row.min(self.max_point().row);
2268 let start = Point::new(row, 0);
2269 let end = self.clip_point(
2270 Point::new(
2271 row,
2272 characters
2273 .saturating_mul(MAX_BYTES_IN_UTF_8)
2274 .saturating_add(1),
2275 ),
2276 Bias::Right,
2277 );
2278 let range = start..end;
2279 let mut point = range.start;
2280 let mut remaining_columns = characters;
2281
2282 for chunk in self.text_for_range(range) {
2283 for character in chunk.chars() {
2284 if remaining_columns == 0 {
2285 return point;
2286 }
2287 remaining_columns -= 1;
2288 point.column += character.len_utf8() as u32;
2289 }
2290 }
2291 point
2292 }
2293
2294 pub fn line_indents_in_row_range(
2295 &self,
2296 row_range: Range<u32>,
2297 ) -> impl Iterator<Item = (u32, LineIndent)> + '_ {
2298 let start = Point::new(row_range.start, 0).to_offset(self);
2299 let end = Point::new(row_range.end, self.line_len(row_range.end)).to_offset(self);
2300
2301 let mut chunks = self.as_rope().chunks_in_range(start..end);
2302 let mut row = row_range.start;
2303 let mut done = false;
2304 std::iter::from_fn(move || {
2305 if done {
2306 None
2307 } else {
2308 let indent = (row, LineIndent::from_chunks(&mut chunks));
2309 done = !chunks.next_line();
2310 row += 1;
2311 Some(indent)
2312 }
2313 })
2314 }
2315
2316 /// Returns the line indents in the given row range, exclusive of end row, in reversed order.
2317 pub fn reversed_line_indents_in_row_range(
2318 &self,
2319 row_range: Range<u32>,
2320 ) -> impl Iterator<Item = (u32, LineIndent)> + '_ {
2321 let start = Point::new(row_range.start, 0).to_offset(self);
2322
2323 let end_point;
2324 let end;
2325 if row_range.end > row_range.start {
2326 end_point = Point::new(row_range.end - 1, self.line_len(row_range.end - 1));
2327 end = end_point.to_offset(self);
2328 } else {
2329 end_point = Point::new(row_range.start, 0);
2330 end = start;
2331 };
2332
2333 let mut chunks = self.as_rope().chunks_in_range(start..end);
2334 // Move the cursor to the start of the last line if it's not empty.
2335 chunks.seek(end);
2336 if end_point.column > 0 {
2337 chunks.prev_line();
2338 }
2339
2340 let mut row = end_point.row;
2341 let mut done = false;
2342 std::iter::from_fn(move || {
2343 if done {
2344 None
2345 } else {
2346 let initial_offset = chunks.offset();
2347 let indent = (row, LineIndent::from_chunks(&mut chunks));
2348 if chunks.offset() > initial_offset {
2349 chunks.prev_line();
2350 }
2351 done = !chunks.prev_line();
2352 if !done {
2353 row -= 1;
2354 }
2355
2356 Some(indent)
2357 }
2358 })
2359 }
2360
2361 pub fn line_indent_for_row(&self, row: u32) -> LineIndent {
2362 LineIndent::from_iter(self.chars_at(Point::new(row, 0)))
2363 }
2364
2365 pub fn is_line_blank(&self, row: u32) -> bool {
2366 self.text_for_range(Point::new(row, 0)..Point::new(row, self.line_len(row)))
2367 .all(|chunk| chunk.matches(|c: char| !c.is_whitespace()).next().is_none())
2368 }
2369
2370 pub fn text_summary_for_range<D, O: ToOffset>(&self, range: Range<O>) -> D
2371 where
2372 D: TextDimension,
2373 {
2374 self.visible_text
2375 .cursor(range.start.to_offset(self))
2376 .summary(range.end.to_offset(self))
2377 }
2378
2379 pub fn summaries_for_anchors<'a, D, A>(&'a self, anchors: A) -> impl 'a + Iterator<Item = D>
2380 where
2381 D: 'a + TextDimension,
2382 A: 'a + IntoIterator<Item = Anchor>,
2383 {
2384 let anchors = anchors.into_iter();
2385 self.summaries_for_anchors_with_payload::<D, _, ()>(anchors.map(|a| (a, ())))
2386 .map(|d| d.0)
2387 }
2388
2389 pub fn summaries_for_anchors_with_payload<'a, D, A, T>(
2390 &'a self,
2391 anchors: A,
2392 ) -> impl 'a + Iterator<Item = (D, T)>
2393 where
2394 D: 'a + TextDimension,
2395 A: 'a + IntoIterator<Item = (Anchor, T)>,
2396 {
2397 let anchors = anchors.into_iter();
2398 let mut fragment_cursor = self
2399 .fragments
2400 .cursor::<Dimensions<Option<&Locator>, usize>>(&None);
2401 let mut text_cursor = self.visible_text.cursor(0);
2402 let mut position = D::zero(());
2403
2404 anchors.map(move |(anchor, payload)| {
2405 if anchor.is_min() {
2406 return (D::zero(()), payload);
2407 } else if anchor.is_max() {
2408 return (D::from_text_summary(&self.visible_text.summary()), payload);
2409 }
2410
2411 let Some(insertion) = self.try_find_fragment(&anchor) else {
2412 panic!(
2413 "invalid insertion for buffer {}@{:?} with anchor {:?}",
2414 self.remote_id(),
2415 self.version,
2416 anchor
2417 );
2418 };
2419 // TODO verbose debug because we are seeing is_max return false unexpectedly,
2420 // remove this once that is understood and fixed
2421 assert_eq!(
2422 insertion.timestamp,
2423 anchor.timestamp(),
2424 "invalid insertion for buffer {}@{:?}. anchor: {:?}, {:?}, {:?}, {:?}, {:?}. timestamp: {:?}, offset: {:?}, bias: {:?}",
2425 self.remote_id(),
2426 self.version,
2427 anchor.timestamp_replica_id,
2428 anchor.timestamp_value,
2429 anchor.offset,
2430 anchor.bias,
2431 anchor.buffer_id,
2432 anchor.timestamp() == clock::Lamport::MAX,
2433 anchor.offset == u32::MAX,
2434 anchor.bias == Bias::Right,
2435 );
2436
2437 fragment_cursor.seek_forward(&Some(&insertion.fragment_id), Bias::Left);
2438 let fragment = fragment_cursor.item().unwrap();
2439 let mut fragment_offset = fragment_cursor.start().1;
2440 if fragment.visible {
2441 fragment_offset += (anchor.offset - insertion.split_offset) as usize;
2442 }
2443
2444 position.add_assign(&text_cursor.summary(fragment_offset));
2445 (position, payload)
2446 })
2447 }
2448
2449 pub fn summary_for_anchor<D>(&self, anchor: &Anchor) -> D
2450 where
2451 D: TextDimension,
2452 {
2453 self.text_summary_for_range(0..self.offset_for_anchor(anchor))
2454 }
2455
2456 pub fn offset_for_anchor(&self, anchor: &Anchor) -> usize {
2457 if anchor.is_min() {
2458 0
2459 } else if anchor.is_max() {
2460 self.visible_text.len()
2461 } else {
2462 debug_assert_eq!(anchor.buffer_id, self.remote_id);
2463 debug_assert!(
2464 self.version.observed(anchor.timestamp()),
2465 "Anchor timestamp {:?} not observed by buffer {:?}",
2466 anchor.timestamp(),
2467 self.version
2468 );
2469 let item = self.try_find_fragment(anchor);
2470 let Some(insertion) =
2471 item.filter(|insertion| insertion.timestamp == anchor.timestamp())
2472 else {
2473 self.panic_bad_anchor(anchor);
2474 };
2475
2476 let (start, _, item) = self
2477 .fragments
2478 .find::<Dimensions<Option<&Locator>, usize>, _>(
2479 &None,
2480 &Some(&insertion.fragment_id),
2481 Bias::Left,
2482 );
2483 let fragment = item.unwrap();
2484 let mut fragment_offset = start.1;
2485 if fragment.visible {
2486 fragment_offset += (anchor.offset - insertion.split_offset) as usize;
2487 }
2488 fragment_offset
2489 }
2490 }
2491
2492 #[cold]
2493 fn panic_bad_anchor(&self, anchor: &Anchor) -> ! {
2494 if anchor.buffer_id != self.remote_id {
2495 panic!(
2496 "invalid anchor - buffer id does not match: anchor {anchor:?}; buffer id: {}, version: {:?}",
2497 self.remote_id, self.version
2498 );
2499 } else if !self.version.observed(anchor.timestamp()) {
2500 panic!(
2501 "invalid anchor - snapshot has not observed lamport: {:?}; version: {:?}",
2502 anchor, self.version
2503 );
2504 } else {
2505 panic!(
2506 "invalid anchor {:?}. buffer id: {}, version: {:?}",
2507 anchor, self.remote_id, self.version
2508 );
2509 }
2510 }
2511
2512 fn fragment_id_for_anchor(&self, anchor: &Anchor) -> &Locator {
2513 self.try_fragment_id_for_anchor(anchor)
2514 .unwrap_or_else(|| self.panic_bad_anchor(anchor))
2515 }
2516
2517 fn try_fragment_id_for_anchor(&self, anchor: &Anchor) -> Option<&Locator> {
2518 if anchor.is_min() {
2519 Some(Locator::min_ref())
2520 } else if anchor.is_max() {
2521 Some(Locator::max_ref())
2522 } else {
2523 let item = self.try_find_fragment(anchor);
2524 item.filter(|insertion| {
2525 !cfg!(debug_assertions) || insertion.timestamp == anchor.timestamp()
2526 })
2527 .map(|insertion| &insertion.fragment_id)
2528 }
2529 }
2530
2531 fn try_find_fragment(&self, anchor: &Anchor) -> Option<&InsertionFragment> {
2532 let anchor_key = InsertionFragmentKey {
2533 timestamp: anchor.timestamp(),
2534 split_offset: anchor.offset,
2535 };
2536 match self.insertions.find_with_prev::<InsertionFragmentKey, _>(
2537 (),
2538 &anchor_key,
2539 anchor.bias,
2540 ) {
2541 (_, _, Some((prev, insertion))) => {
2542 let comparison = sum_tree::KeyedItem::key(insertion).cmp(&anchor_key);
2543 if comparison == Ordering::Greater
2544 || (anchor.bias == Bias::Left
2545 && comparison == Ordering::Equal
2546 && anchor.offset > 0)
2547 {
2548 prev
2549 } else {
2550 Some(insertion)
2551 }
2552 }
2553 _ => self.insertions.last(),
2554 }
2555 }
2556
2557 /// Returns an anchor range for the given input position range that is anchored to the text in the range.
2558 pub fn anchor_range_inside<T: ToOffset>(&self, position: Range<T>) -> Range<Anchor> {
2559 self.anchor_after(position.start)..self.anchor_before(position.end)
2560 }
2561
2562 /// Returns an anchor range for the given input position range that is anchored to the text before and after.
2563 pub fn anchor_range_outside<T: ToOffset>(&self, position: Range<T>) -> Range<Anchor> {
2564 self.anchor_before(position.start)..self.anchor_after(position.end)
2565 }
2566
2567 /// Returns an anchor for the given input position that is anchored to the text before the position.
2568 pub fn anchor_before<T: ToOffset>(&self, position: T) -> Anchor {
2569 self.anchor_at(position, Bias::Left)
2570 }
2571
2572 /// Returns an anchor for the given input position that is anchored to the text after the position.
2573 pub fn anchor_after<T: ToOffset>(&self, position: T) -> Anchor {
2574 self.anchor_at(position, Bias::Right)
2575 }
2576
2577 pub fn anchor_at<T: ToOffset>(&self, position: T, bias: Bias) -> Anchor {
2578 self.anchor_at_offset(position.to_offset(self), bias)
2579 }
2580
2581 fn anchor_at_offset(&self, mut offset: usize, bias: Bias) -> Anchor {
2582 if bias == Bias::Left && offset == 0 {
2583 Anchor::min_for_buffer(self.remote_id)
2584 } else if bias == Bias::Right
2585 && ((!cfg!(debug_assertions) && offset >= self.len()) || offset == self.len())
2586 {
2587 Anchor::max_for_buffer(self.remote_id)
2588 } else {
2589 if !self
2590 .visible_text
2591 .assert_char_boundary::<{ cfg!(debug_assertions) }>(offset)
2592 {
2593 offset = match bias {
2594 Bias::Left => self.visible_text.floor_char_boundary(offset),
2595 Bias::Right => self.visible_text.ceil_char_boundary(offset),
2596 };
2597 }
2598 let (start, _, item) = self.fragments.find::<usize, _>(&None, &offset, bias);
2599 let Some(fragment) = item else {
2600 // We got a bad offset, likely out of bounds
2601 debug_panic!(
2602 "Failed to find fragment at offset {} (len: {})",
2603 offset,
2604 self.len()
2605 );
2606 return Anchor::max_for_buffer(self.remote_id);
2607 };
2608 let overshoot = offset - start;
2609 Anchor::new(
2610 fragment.timestamp,
2611 fragment.insertion_offset + overshoot as u32,
2612 bias,
2613 self.remote_id,
2614 )
2615 }
2616 }
2617
2618 pub fn can_resolve(&self, anchor: &Anchor) -> bool {
2619 anchor.is_min()
2620 || anchor.is_max()
2621 || (self.remote_id == anchor.buffer_id && self.version.observed(anchor.timestamp()))
2622 }
2623
2624 pub fn clip_offset(&self, offset: usize, bias: Bias) -> usize {
2625 self.visible_text.clip_offset(offset, bias)
2626 }
2627
2628 pub fn clip_point(&self, point: Point, bias: Bias) -> Point {
2629 self.visible_text.clip_point(point, bias)
2630 }
2631
2632 pub fn clip_offset_utf16(&self, offset: OffsetUtf16, bias: Bias) -> OffsetUtf16 {
2633 self.visible_text.clip_offset_utf16(offset, bias)
2634 }
2635
2636 pub fn clip_point_utf16(&self, point: Unclipped<PointUtf16>, bias: Bias) -> PointUtf16 {
2637 self.visible_text.clip_point_utf16(point, bias)
2638 }
2639
2640 pub fn edits_since<'a, D>(
2641 &'a self,
2642 since: &'a clock::Global,
2643 ) -> impl 'a + Iterator<Item = Edit<D>>
2644 where
2645 D: TextDimension + Ord,
2646 {
2647 self.edits_since_in_range(
2648 since,
2649 Anchor::min_for_buffer(self.remote_id)..Anchor::max_for_buffer(self.remote_id),
2650 )
2651 }
2652
2653 pub fn anchored_edits_since<'a, D>(
2654 &'a self,
2655 since: &'a clock::Global,
2656 ) -> impl 'a + Iterator<Item = (Edit<D>, Range<Anchor>)>
2657 where
2658 D: TextDimension + Ord,
2659 {
2660 self.anchored_edits_since_in_range(
2661 since,
2662 Anchor::min_for_buffer(self.remote_id)..Anchor::max_for_buffer(self.remote_id),
2663 )
2664 }
2665
2666 pub fn edits_since_in_range<'a, D>(
2667 &'a self,
2668 since: &'a clock::Global,
2669 range: Range<Anchor>,
2670 ) -> impl 'a + Iterator<Item = Edit<D>>
2671 where
2672 D: TextDimension + Ord,
2673 {
2674 self.anchored_edits_since_in_range(since, range)
2675 .map(|item| item.0)
2676 }
2677
2678 pub fn anchored_edits_since_in_range<'a, D>(
2679 &'a self,
2680 since: &'a clock::Global,
2681 range: Range<Anchor>,
2682 ) -> impl 'a + Iterator<Item = (Edit<D>, Range<Anchor>)>
2683 where
2684 D: TextDimension + Ord,
2685 {
2686 if *since == self.version {
2687 return None.into_iter().flatten();
2688 }
2689 let mut cursor = self.fragments.filter(&None, move |summary| {
2690 !since.observed_all(&summary.max_version)
2691 });
2692 cursor.next();
2693 let fragments_cursor = Some(cursor);
2694 let start_fragment_id = self.fragment_id_for_anchor(&range.start);
2695 let (start, _, item) = self
2696 .fragments
2697 .find::<Dimensions<Option<&Locator>, FragmentTextSummary>, _>(
2698 &None,
2699 &Some(start_fragment_id),
2700 Bias::Left,
2701 );
2702 let mut visible_start = start.1.visible;
2703 let mut deleted_start = start.1.deleted;
2704 if let Some(fragment) = item {
2705 let overshoot = (range.start.offset - fragment.insertion_offset) as usize;
2706 if fragment.visible {
2707 visible_start += overshoot;
2708 } else {
2709 deleted_start += overshoot;
2710 }
2711 }
2712 let end_fragment_id = self.fragment_id_for_anchor(&range.end);
2713
2714 Some(Edits {
2715 visible_cursor: self.visible_text.cursor(visible_start),
2716 deleted_cursor: self.deleted_text.cursor(deleted_start),
2717 fragments_cursor,
2718 undos: &self.undo_map,
2719 since,
2720 old_end: D::zero(()),
2721 new_end: D::zero(()),
2722 range: (start_fragment_id, range.start.offset)..(end_fragment_id, range.end.offset),
2723 buffer_id: self.remote_id,
2724 })
2725 .into_iter()
2726 .flatten()
2727 }
2728
2729 pub fn has_edits_since_in_range(&self, since: &clock::Global, range: Range<Anchor>) -> bool {
2730 if *since != self.version {
2731 let start_fragment_id = self.fragment_id_for_anchor(&range.start);
2732 let end_fragment_id = self.fragment_id_for_anchor(&range.end);
2733 let mut cursor = self.fragments.filter::<_, usize>(&None, move |summary| {
2734 !since.observed_all(&summary.max_version)
2735 });
2736 cursor.next();
2737 while let Some(fragment) = cursor.item() {
2738 if fragment.id > *end_fragment_id {
2739 break;
2740 }
2741 if fragment.id > *start_fragment_id {
2742 let was_visible = fragment.was_visible(since, &self.undo_map);
2743 let is_visible = fragment.visible;
2744 if was_visible != is_visible {
2745 return true;
2746 }
2747 }
2748 cursor.next();
2749 }
2750 }
2751 false
2752 }
2753
2754 pub fn has_edits_since(&self, since: &clock::Global) -> bool {
2755 if *since != self.version {
2756 let mut cursor = self.fragments.filter::<_, usize>(&None, move |summary| {
2757 !since.observed_all(&summary.max_version)
2758 });
2759 cursor.next();
2760 while let Some(fragment) = cursor.item() {
2761 let was_visible = fragment.was_visible(since, &self.undo_map);
2762 let is_visible = fragment.visible;
2763 if was_visible != is_visible {
2764 return true;
2765 }
2766 cursor.next();
2767 }
2768 }
2769 false
2770 }
2771
2772 pub fn range_to_version(&self, range: Range<usize>, version: &clock::Global) -> Range<usize> {
2773 let mut offsets = self.offsets_to_version([range.start, range.end], version);
2774 offsets.next().unwrap()..offsets.next().unwrap()
2775 }
2776
2777 /// Converts the given sequence of offsets into their corresponding offsets
2778 /// at a prior version of this buffer.
2779 pub fn offsets_to_version<'a>(
2780 &'a self,
2781 offsets: impl 'a + IntoIterator<Item = usize>,
2782 version: &'a clock::Global,
2783 ) -> impl 'a + Iterator<Item = usize> {
2784 let mut edits = self.edits_since(version).peekable();
2785 let mut last_old_end = 0;
2786 let mut last_new_end = 0;
2787 offsets.into_iter().map(move |new_offset| {
2788 while let Some(edit) = edits.peek() {
2789 if edit.new.start > new_offset {
2790 break;
2791 }
2792
2793 if edit.new.end <= new_offset {
2794 last_new_end = edit.new.end;
2795 last_old_end = edit.old.end;
2796 edits.next();
2797 continue;
2798 }
2799
2800 let overshoot = new_offset - edit.new.start;
2801 return (edit.old.start + overshoot).min(edit.old.end);
2802 }
2803
2804 last_old_end + new_offset.saturating_sub(last_new_end)
2805 })
2806 }
2807
2808 /// Visually annotates a position or range with the `Debug` representation of a value. The
2809 /// callsite of this function is used as a key - previous annotations will be removed.
2810 #[cfg(debug_assertions)]
2811 #[track_caller]
2812 pub fn debug<R, V>(&self, ranges: &R, value: V)
2813 where
2814 R: debug::ToDebugRanges,
2815 V: std::fmt::Debug,
2816 {
2817 self.debug_with_key(std::panic::Location::caller(), ranges, value);
2818 }
2819
2820 /// Visually annotates a position or range with the `Debug` representation of a value. Previous
2821 /// debug annotations with the same key will be removed. The key is also used to determine the
2822 /// annotation's color.
2823 #[cfg(debug_assertions)]
2824 pub fn debug_with_key<K, R, V>(&self, key: &K, ranges: &R, value: V)
2825 where
2826 K: std::hash::Hash + 'static,
2827 R: debug::ToDebugRanges,
2828 V: std::fmt::Debug,
2829 {
2830 let ranges = ranges
2831 .to_debug_ranges(self)
2832 .into_iter()
2833 .map(|range| self.anchor_after(range.start)..self.anchor_before(range.end))
2834 .collect();
2835 debug::GlobalDebugRanges::with_locked(|debug_ranges| {
2836 debug_ranges.insert(key, ranges, format!("{value:?}").into());
2837 });
2838 }
2839}
2840
2841struct FragmentBuilder {
2842 fragments: Vec<Fragment>,
2843 summary: FragmentSummary,
2844}
2845
2846impl FragmentBuilder {
2847 fn new(init: SumTree<Fragment>) -> Self {
2848 Self {
2849 summary: init.summary().clone(),
2850 fragments: init.iter().cloned().collect(),
2851 }
2852 }
2853 fn append(&mut self, items: SumTree<Fragment>, cx: &Option<clock::Global>) {
2854 self.summary.add_summary(items.summary(), cx);
2855 self.fragments.extend(items.iter().cloned());
2856 }
2857 fn push(&mut self, fragment: Fragment, cx: &Option<clock::Global>) {
2858 self.append(SumTree::from_item(fragment, cx), cx);
2859 }
2860 fn to_sum_tree(self, cx: &Option<clock::Global>) -> SumTree<Fragment> {
2861 if self.fragments.len() > 1024 {
2862 SumTree::from_par_iter(self.fragments, cx)
2863 } else {
2864 SumTree::from_iter(self.fragments, cx)
2865 }
2866 }
2867 fn summary(&self) -> &FragmentSummary {
2868 &self.summary
2869 }
2870}
2871
2872struct RopeBuilder<'a> {
2873 old_visible_cursor: rope::Cursor<'a>,
2874 old_deleted_cursor: rope::Cursor<'a>,
2875 new_visible: Rope,
2876 new_deleted: Rope,
2877}
2878
2879impl<'a> RopeBuilder<'a> {
2880 fn new(old_visible_cursor: rope::Cursor<'a>, old_deleted_cursor: rope::Cursor<'a>) -> Self {
2881 Self {
2882 old_visible_cursor,
2883 old_deleted_cursor,
2884 new_visible: Rope::new(),
2885 new_deleted: Rope::new(),
2886 }
2887 }
2888
2889 fn append(&mut self, len: FragmentTextSummary) {
2890 self.push(len.visible, true, true);
2891 self.push(len.deleted, false, false);
2892 }
2893
2894 fn push_fragment(&mut self, fragment: &Fragment, was_visible: bool) {
2895 debug_assert!(fragment.len > 0);
2896 self.push(fragment.len as usize, was_visible, fragment.visible)
2897 }
2898
2899 fn push(&mut self, len: usize, was_visible: bool, is_visible: bool) {
2900 let text = if was_visible {
2901 self.old_visible_cursor
2902 .slice(self.old_visible_cursor.offset() + len)
2903 } else {
2904 self.old_deleted_cursor
2905 .slice(self.old_deleted_cursor.offset() + len)
2906 };
2907 if is_visible {
2908 self.new_visible.append(text);
2909 } else {
2910 self.new_deleted.append(text);
2911 }
2912 }
2913
2914 fn push_str(&mut self, text: &str) {
2915 self.new_visible.push(text);
2916 }
2917
2918 fn finish(mut self) -> (Rope, Rope) {
2919 self.new_visible.append(self.old_visible_cursor.suffix());
2920 self.new_deleted.append(self.old_deleted_cursor.suffix());
2921 (self.new_visible, self.new_deleted)
2922 }
2923}
2924
2925impl<D: TextDimension + Ord, F: FnMut(&FragmentSummary) -> bool> Iterator for Edits<'_, D, F> {
2926 type Item = (Edit<D>, Range<Anchor>);
2927
2928 fn next(&mut self) -> Option<Self::Item> {
2929 let mut pending_edit: Option<Self::Item> = None;
2930 let cursor = self.fragments_cursor.as_mut()?;
2931
2932 while let Some(fragment) = cursor.item() {
2933 if fragment.id < *self.range.start.0 {
2934 cursor.next();
2935 continue;
2936 } else if fragment.id > *self.range.end.0 {
2937 break;
2938 }
2939
2940 if cursor.start().visible > self.visible_cursor.offset() {
2941 let summary = self.visible_cursor.summary(cursor.start().visible);
2942 self.old_end.add_assign(&summary);
2943 self.new_end.add_assign(&summary);
2944 }
2945
2946 if pending_edit
2947 .as_ref()
2948 .is_some_and(|(change, _)| change.new.end < self.new_end)
2949 {
2950 break;
2951 }
2952
2953 let start_anchor = Anchor::new(
2954 fragment.timestamp,
2955 fragment.insertion_offset,
2956 Bias::Right,
2957 self.buffer_id,
2958 );
2959 let end_anchor = Anchor::new(
2960 fragment.timestamp,
2961 fragment.insertion_offset + fragment.len,
2962 Bias::Left,
2963 self.buffer_id,
2964 );
2965
2966 if !fragment.was_visible(self.since, self.undos) && fragment.visible {
2967 let mut visible_end = cursor.end().visible;
2968 if fragment.id == *self.range.end.0 {
2969 visible_end = cmp::min(
2970 visible_end,
2971 cursor.start().visible
2972 + (self.range.end.1 - fragment.insertion_offset) as usize,
2973 );
2974 }
2975
2976 let fragment_summary = self.visible_cursor.summary(visible_end);
2977 let mut new_end = self.new_end;
2978 new_end.add_assign(&fragment_summary);
2979 if let Some((edit, range)) = pending_edit.as_mut() {
2980 edit.new.end = new_end;
2981 range.end = end_anchor;
2982 } else {
2983 pending_edit = Some((
2984 Edit {
2985 old: self.old_end..self.old_end,
2986 new: self.new_end..new_end,
2987 },
2988 start_anchor..end_anchor,
2989 ));
2990 }
2991
2992 self.new_end = new_end;
2993 } else if fragment.was_visible(self.since, self.undos) && !fragment.visible {
2994 let mut deleted_end = cursor.end().deleted;
2995 if fragment.id == *self.range.end.0 {
2996 deleted_end = cmp::min(
2997 deleted_end,
2998 cursor.start().deleted
2999 + (self.range.end.1 - fragment.insertion_offset) as usize,
3000 );
3001 }
3002
3003 if cursor.start().deleted > self.deleted_cursor.offset() {
3004 self.deleted_cursor.seek_forward(cursor.start().deleted);
3005 }
3006 let fragment_summary = self.deleted_cursor.summary(deleted_end);
3007 let mut old_end = self.old_end;
3008 old_end.add_assign(&fragment_summary);
3009 if let Some((edit, range)) = pending_edit.as_mut() {
3010 edit.old.end = old_end;
3011 range.end = end_anchor;
3012 } else {
3013 pending_edit = Some((
3014 Edit {
3015 old: self.old_end..old_end,
3016 new: self.new_end..self.new_end,
3017 },
3018 start_anchor..end_anchor,
3019 ));
3020 }
3021
3022 self.old_end = old_end;
3023 }
3024
3025 cursor.next();
3026 }
3027
3028 pending_edit
3029 }
3030}
3031
3032impl Fragment {
3033 fn is_visible(&self, undos: &UndoMap) -> bool {
3034 !undos.is_undone(self.timestamp) && self.deletions.iter().all(|d| undos.is_undone(*d))
3035 }
3036
3037 fn was_visible(&self, version: &clock::Global, undos: &UndoMap) -> bool {
3038 (version.observed(self.timestamp) && !undos.was_undone(self.timestamp, version))
3039 && self
3040 .deletions
3041 .iter()
3042 .all(|d| !version.observed(*d) || undos.was_undone(*d, version))
3043 }
3044}
3045
3046impl sum_tree::Item for Fragment {
3047 type Summary = FragmentSummary;
3048
3049 fn summary(&self, _cx: &Option<clock::Global>) -> Self::Summary {
3050 let mut max_version = clock::Global::new();
3051 max_version.observe(self.timestamp);
3052 for deletion in &self.deletions {
3053 max_version.observe(*deletion);
3054 }
3055 max_version.join(&self.max_undos);
3056
3057 let mut min_insertion_version = clock::Global::new();
3058 min_insertion_version.observe(self.timestamp);
3059 let max_insertion_version = min_insertion_version.clone();
3060 if self.visible {
3061 FragmentSummary {
3062 max_id: self.id.clone(),
3063 text: FragmentTextSummary {
3064 visible: self.len as usize,
3065 deleted: 0,
3066 },
3067 max_version,
3068 min_insertion_version,
3069 max_insertion_version,
3070 }
3071 } else {
3072 FragmentSummary {
3073 max_id: self.id.clone(),
3074 text: FragmentTextSummary {
3075 visible: 0,
3076 deleted: self.len as usize,
3077 },
3078 max_version,
3079 min_insertion_version,
3080 max_insertion_version,
3081 }
3082 }
3083 }
3084}
3085
3086impl sum_tree::Summary for FragmentSummary {
3087 type Context<'a> = &'a Option<clock::Global>;
3088
3089 fn zero(_cx: Self::Context<'_>) -> Self {
3090 Default::default()
3091 }
3092
3093 fn add_summary(&mut self, other: &Self, _: Self::Context<'_>) {
3094 self.max_id.assign(&other.max_id);
3095 self.text.visible += &other.text.visible;
3096 self.text.deleted += &other.text.deleted;
3097 self.max_version.join(&other.max_version);
3098 self.min_insertion_version
3099 .meet(&other.min_insertion_version);
3100 self.max_insertion_version
3101 .join(&other.max_insertion_version);
3102 }
3103}
3104
3105impl Default for FragmentSummary {
3106 fn default() -> Self {
3107 FragmentSummary {
3108 max_id: Locator::min(),
3109 text: FragmentTextSummary::default(),
3110 max_version: clock::Global::new(),
3111 min_insertion_version: clock::Global::new(),
3112 max_insertion_version: clock::Global::new(),
3113 }
3114 }
3115}
3116
3117impl sum_tree::Item for InsertionFragment {
3118 type Summary = InsertionFragmentKey;
3119
3120 fn summary(&self, _cx: ()) -> Self::Summary {
3121 InsertionFragmentKey {
3122 timestamp: self.timestamp,
3123 split_offset: self.split_offset,
3124 }
3125 }
3126}
3127
3128impl sum_tree::KeyedItem for InsertionFragment {
3129 type Key = InsertionFragmentKey;
3130
3131 fn key(&self) -> Self::Key {
3132 sum_tree::Item::summary(self, ())
3133 }
3134}
3135
3136impl InsertionFragment {
3137 fn new(fragment: &Fragment) -> Self {
3138 Self {
3139 timestamp: fragment.timestamp,
3140 split_offset: fragment.insertion_offset,
3141 fragment_id: fragment.id.clone(),
3142 }
3143 }
3144
3145 fn insert_new(fragment: &Fragment) -> sum_tree::Edit<Self> {
3146 sum_tree::Edit::Insert(Self::new(fragment))
3147 }
3148}
3149
3150impl sum_tree::ContextLessSummary for InsertionFragmentKey {
3151 fn zero() -> Self {
3152 InsertionFragmentKey {
3153 timestamp: Lamport::MIN,
3154 split_offset: 0,
3155 }
3156 }
3157
3158 fn add_summary(&mut self, summary: &Self) {
3159 *self = *summary;
3160 }
3161}
3162
3163#[derive(Copy, Clone, Debug, Default, PartialEq, Eq, PartialOrd, Ord, Hash)]
3164pub struct FullOffset(pub usize);
3165
3166impl ops::AddAssign<usize> for FullOffset {
3167 fn add_assign(&mut self, rhs: usize) {
3168 self.0 += rhs;
3169 }
3170}
3171
3172impl ops::Add<usize> for FullOffset {
3173 type Output = Self;
3174
3175 fn add(mut self, rhs: usize) -> Self::Output {
3176 self += rhs;
3177 self
3178 }
3179}
3180
3181impl ops::Sub for FullOffset {
3182 type Output = usize;
3183
3184 fn sub(self, rhs: Self) -> Self::Output {
3185 self.0 - rhs.0
3186 }
3187}
3188
3189impl sum_tree::Dimension<'_, FragmentSummary> for usize {
3190 fn zero(_: &Option<clock::Global>) -> Self {
3191 Default::default()
3192 }
3193
3194 fn add_summary(&mut self, summary: &FragmentSummary, _: &Option<clock::Global>) {
3195 *self += summary.text.visible;
3196 }
3197}
3198
3199impl sum_tree::Dimension<'_, FragmentSummary> for FullOffset {
3200 fn zero(_: &Option<clock::Global>) -> Self {
3201 Default::default()
3202 }
3203
3204 fn add_summary(&mut self, summary: &FragmentSummary, _: &Option<clock::Global>) {
3205 self.0 += summary.text.visible + summary.text.deleted;
3206 }
3207}
3208
3209impl<'a> sum_tree::Dimension<'a, FragmentSummary> for Option<&'a Locator> {
3210 fn zero(_: &Option<clock::Global>) -> Self {
3211 Default::default()
3212 }
3213
3214 fn add_summary(&mut self, summary: &'a FragmentSummary, _: &Option<clock::Global>) {
3215 *self = Some(&summary.max_id);
3216 }
3217}
3218
3219impl sum_tree::SeekTarget<'_, FragmentSummary, FragmentTextSummary> for usize {
3220 fn cmp(
3221 &self,
3222 cursor_location: &FragmentTextSummary,
3223 _: &Option<clock::Global>,
3224 ) -> cmp::Ordering {
3225 Ord::cmp(self, &cursor_location.visible)
3226 }
3227}
3228
3229#[derive(Copy, Clone, Debug, Eq, PartialEq)]
3230enum VersionedFullOffset {
3231 Offset(FullOffset),
3232 Invalid,
3233}
3234
3235impl VersionedFullOffset {
3236 fn full_offset(&self) -> FullOffset {
3237 if let Self::Offset(position) = self {
3238 *position
3239 } else {
3240 panic!("invalid version")
3241 }
3242 }
3243}
3244
3245impl Default for VersionedFullOffset {
3246 fn default() -> Self {
3247 Self::Offset(Default::default())
3248 }
3249}
3250
3251impl<'a> sum_tree::Dimension<'a, FragmentSummary> for VersionedFullOffset {
3252 fn zero(_cx: &Option<clock::Global>) -> Self {
3253 Default::default()
3254 }
3255
3256 fn add_summary(&mut self, summary: &'a FragmentSummary, cx: &Option<clock::Global>) {
3257 if let Self::Offset(offset) = self {
3258 let version = cx.as_ref().unwrap();
3259 if version.observed_all(&summary.max_insertion_version) {
3260 *offset += summary.text.visible + summary.text.deleted;
3261 } else if version.observed_any(&summary.min_insertion_version) {
3262 *self = Self::Invalid;
3263 }
3264 }
3265 }
3266}
3267
3268impl sum_tree::SeekTarget<'_, FragmentSummary, Self> for VersionedFullOffset {
3269 fn cmp(&self, cursor_position: &Self, _: &Option<clock::Global>) -> cmp::Ordering {
3270 match (self, cursor_position) {
3271 (Self::Offset(a), Self::Offset(b)) => Ord::cmp(a, b),
3272 (Self::Offset(_), Self::Invalid) => cmp::Ordering::Less,
3273 (Self::Invalid, _) => unreachable!(),
3274 }
3275 }
3276}
3277
3278impl Operation {
3279 fn replica_id(&self) -> ReplicaId {
3280 operation_queue::Operation::lamport_timestamp(self).replica_id
3281 }
3282
3283 pub fn timestamp(&self) -> clock::Lamport {
3284 match self {
3285 Operation::Edit(edit) => edit.timestamp,
3286 Operation::Undo(undo) => undo.timestamp,
3287 }
3288 }
3289
3290 pub fn as_edit(&self) -> Option<&EditOperation> {
3291 match self {
3292 Operation::Edit(edit) => Some(edit),
3293 _ => None,
3294 }
3295 }
3296
3297 pub fn is_edit(&self) -> bool {
3298 matches!(self, Operation::Edit { .. })
3299 }
3300}
3301
3302impl operation_queue::Operation for Operation {
3303 fn lamport_timestamp(&self) -> clock::Lamport {
3304 match self {
3305 Operation::Edit(edit) => edit.timestamp,
3306 Operation::Undo(undo) => undo.timestamp,
3307 }
3308 }
3309}
3310
3311pub trait ToOffset {
3312 fn to_offset(&self, snapshot: &BufferSnapshot) -> usize;
3313 /// Turns this point into the next offset in the buffer that comes after this, respecting utf8 boundaries.
3314 fn to_next_offset(&self, snapshot: &BufferSnapshot) -> usize {
3315 snapshot
3316 .visible_text
3317 .ceil_char_boundary(self.to_offset(snapshot) + 1)
3318 }
3319 /// Turns this point into the previous offset in the buffer that comes before this, respecting utf8 boundaries.
3320 fn to_previous_offset(&self, snapshot: &BufferSnapshot) -> usize {
3321 snapshot
3322 .visible_text
3323 .floor_char_boundary(self.to_offset(snapshot).saturating_sub(1))
3324 }
3325}
3326
3327impl ToOffset for Point {
3328 #[inline]
3329 fn to_offset(&self, snapshot: &BufferSnapshot) -> usize {
3330 snapshot.point_to_offset(*self)
3331 }
3332}
3333
3334impl ToOffset for usize {
3335 #[track_caller]
3336 fn to_offset(&self, snapshot: &BufferSnapshot) -> usize {
3337 if !snapshot
3338 .as_rope()
3339 .assert_char_boundary::<{ cfg!(debug_assertions) }>(*self)
3340 {
3341 snapshot.as_rope().floor_char_boundary(*self)
3342 } else {
3343 *self
3344 }
3345 }
3346}
3347
3348impl ToOffset for Anchor {
3349 #[inline]
3350 fn to_offset(&self, snapshot: &BufferSnapshot) -> usize {
3351 snapshot.summary_for_anchor(self)
3352 }
3353}
3354
3355impl<T: ToOffset> ToOffset for &T {
3356 #[inline]
3357 fn to_offset(&self, content: &BufferSnapshot) -> usize {
3358 (*self).to_offset(content)
3359 }
3360}
3361
3362impl ToOffset for PointUtf16 {
3363 #[inline]
3364 fn to_offset(&self, snapshot: &BufferSnapshot) -> usize {
3365 snapshot.point_utf16_to_offset(*self)
3366 }
3367}
3368
3369impl ToOffset for Unclipped<PointUtf16> {
3370 #[inline]
3371 fn to_offset(&self, snapshot: &BufferSnapshot) -> usize {
3372 snapshot.unclipped_point_utf16_to_offset(*self)
3373 }
3374}
3375
3376pub trait ToPoint {
3377 fn to_point(&self, snapshot: &BufferSnapshot) -> Point;
3378}
3379
3380impl ToPoint for Anchor {
3381 #[inline]
3382 fn to_point(&self, snapshot: &BufferSnapshot) -> Point {
3383 snapshot.summary_for_anchor(self)
3384 }
3385}
3386
3387impl ToPoint for usize {
3388 #[inline]
3389 fn to_point(&self, snapshot: &BufferSnapshot) -> Point {
3390 snapshot.offset_to_point(*self)
3391 }
3392}
3393
3394impl ToPoint for Point {
3395 #[inline]
3396 fn to_point(&self, _: &BufferSnapshot) -> Point {
3397 *self
3398 }
3399}
3400
3401impl ToPoint for Unclipped<PointUtf16> {
3402 #[inline]
3403 fn to_point(&self, snapshot: &BufferSnapshot) -> Point {
3404 snapshot.unclipped_point_utf16_to_point(*self)
3405 }
3406}
3407
3408pub trait ToPointUtf16 {
3409 fn to_point_utf16(&self, snapshot: &BufferSnapshot) -> PointUtf16;
3410}
3411
3412impl ToPointUtf16 for Anchor {
3413 #[inline]
3414 fn to_point_utf16(&self, snapshot: &BufferSnapshot) -> PointUtf16 {
3415 snapshot.summary_for_anchor(self)
3416 }
3417}
3418
3419impl ToPointUtf16 for usize {
3420 #[inline]
3421 fn to_point_utf16(&self, snapshot: &BufferSnapshot) -> PointUtf16 {
3422 snapshot.offset_to_point_utf16(*self)
3423 }
3424}
3425
3426impl ToPointUtf16 for PointUtf16 {
3427 #[inline]
3428 fn to_point_utf16(&self, _: &BufferSnapshot) -> PointUtf16 {
3429 *self
3430 }
3431}
3432
3433impl ToPointUtf16 for Point {
3434 #[inline]
3435 fn to_point_utf16(&self, snapshot: &BufferSnapshot) -> PointUtf16 {
3436 snapshot.point_to_point_utf16(*self)
3437 }
3438}
3439
3440pub trait ToOffsetUtf16 {
3441 fn to_offset_utf16(&self, snapshot: &BufferSnapshot) -> OffsetUtf16;
3442}
3443
3444impl ToOffsetUtf16 for Anchor {
3445 #[inline]
3446 fn to_offset_utf16(&self, snapshot: &BufferSnapshot) -> OffsetUtf16 {
3447 snapshot.summary_for_anchor(self)
3448 }
3449}
3450
3451impl ToOffsetUtf16 for usize {
3452 #[inline]
3453 fn to_offset_utf16(&self, snapshot: &BufferSnapshot) -> OffsetUtf16 {
3454 snapshot.offset_to_offset_utf16(*self)
3455 }
3456}
3457
3458impl ToOffsetUtf16 for OffsetUtf16 {
3459 #[inline]
3460 fn to_offset_utf16(&self, _snapshot: &BufferSnapshot) -> OffsetUtf16 {
3461 *self
3462 }
3463}
3464
3465pub trait FromAnchor {
3466 fn from_anchor(anchor: &Anchor, snapshot: &BufferSnapshot) -> Self;
3467}
3468
3469impl FromAnchor for Anchor {
3470 #[inline]
3471 fn from_anchor(anchor: &Anchor, _snapshot: &BufferSnapshot) -> Self {
3472 *anchor
3473 }
3474}
3475
3476impl FromAnchor for Point {
3477 #[inline]
3478 fn from_anchor(anchor: &Anchor, snapshot: &BufferSnapshot) -> Self {
3479 snapshot.summary_for_anchor(anchor)
3480 }
3481}
3482
3483impl FromAnchor for PointUtf16 {
3484 #[inline]
3485 fn from_anchor(anchor: &Anchor, snapshot: &BufferSnapshot) -> Self {
3486 snapshot.summary_for_anchor(anchor)
3487 }
3488}
3489
3490impl FromAnchor for usize {
3491 #[inline]
3492 fn from_anchor(anchor: &Anchor, snapshot: &BufferSnapshot) -> Self {
3493 snapshot.summary_for_anchor(anchor)
3494 }
3495}
3496
3497#[derive(Clone, Copy, Debug, PartialEq)]
3498pub enum LineEnding {
3499 Unix,
3500 Windows,
3501}
3502
3503impl Default for LineEnding {
3504 fn default() -> Self {
3505 #[cfg(unix)]
3506 return Self::Unix;
3507
3508 #[cfg(not(unix))]
3509 return Self::Windows;
3510 }
3511}
3512
3513impl LineEnding {
3514 pub fn as_str(&self) -> &'static str {
3515 match self {
3516 LineEnding::Unix => "\n",
3517 LineEnding::Windows => "\r\n",
3518 }
3519 }
3520
3521 pub fn label(&self) -> &'static str {
3522 match self {
3523 LineEnding::Unix => "LF",
3524 LineEnding::Windows => "CRLF",
3525 }
3526 }
3527
3528 pub fn detect(text: &str) -> Self {
3529 let mut max_ix = cmp::min(text.len(), 1000);
3530 while !text.is_char_boundary(max_ix) {
3531 max_ix -= 1;
3532 }
3533
3534 if let Some(ix) = text[..max_ix].find(['\n']) {
3535 if ix > 0 && text.as_bytes()[ix - 1] == b'\r' {
3536 Self::Windows
3537 } else {
3538 Self::Unix
3539 }
3540 } else {
3541 Self::default()
3542 }
3543 }
3544
3545 pub fn normalize(text: &mut String) {
3546 if let Cow::Owned(replaced) = LINE_SEPARATORS_REGEX.replace_all(text, "\n") {
3547 *text = replaced;
3548 }
3549 }
3550
3551 pub fn normalize_arc(text: Arc<str>) -> Arc<str> {
3552 if let Cow::Owned(replaced) = LINE_SEPARATORS_REGEX.replace_all(&text, "\n") {
3553 replaced.into()
3554 } else {
3555 text
3556 }
3557 }
3558
3559 pub fn normalize_cow(text: Cow<str>) -> Cow<str> {
3560 if let Cow::Owned(replaced) = LINE_SEPARATORS_REGEX.replace_all(&text, "\n") {
3561 replaced.into()
3562 } else {
3563 text
3564 }
3565 }
3566}
3567
3568pub fn chunks_with_line_ending(rope: &Rope, line_ending: LineEnding) -> impl Iterator<Item = &str> {
3569 rope.chunks().flat_map(move |chunk| {
3570 let mut newline = false;
3571 let end_with_newline = chunk.ends_with('\n').then_some(line_ending.as_str());
3572 chunk
3573 .lines()
3574 .flat_map(move |line| {
3575 let ending = if newline {
3576 Some(line_ending.as_str())
3577 } else {
3578 None
3579 };
3580 newline = true;
3581 ending.into_iter().chain([line])
3582 })
3583 .chain(end_with_newline)
3584 })
3585}
3586
3587#[cfg(debug_assertions)]
3588pub mod debug {
3589 use super::*;
3590 use parking_lot::Mutex;
3591 use std::any::TypeId;
3592 use std::hash::{Hash, Hasher};
3593
3594 static GLOBAL_DEBUG_RANGES: Mutex<Option<GlobalDebugRanges>> = Mutex::new(None);
3595
3596 pub struct GlobalDebugRanges {
3597 pub ranges: Vec<DebugRange>,
3598 key_to_occurrence_index: HashMap<Key, usize>,
3599 next_occurrence_index: usize,
3600 }
3601
3602 pub struct DebugRange {
3603 key: Key,
3604 pub ranges: Vec<Range<Anchor>>,
3605 pub value: Arc<str>,
3606 pub occurrence_index: usize,
3607 }
3608
3609 #[derive(Debug, Clone, PartialEq, Eq, Hash)]
3610 struct Key {
3611 type_id: TypeId,
3612 hash: u64,
3613 }
3614
3615 impl GlobalDebugRanges {
3616 pub fn with_locked<R>(f: impl FnOnce(&mut Self) -> R) -> R {
3617 let mut state = GLOBAL_DEBUG_RANGES.lock();
3618 if state.is_none() {
3619 *state = Some(GlobalDebugRanges {
3620 ranges: Vec::new(),
3621 key_to_occurrence_index: HashMap::default(),
3622 next_occurrence_index: 0,
3623 });
3624 }
3625 if let Some(global_debug_ranges) = state.as_mut() {
3626 f(global_debug_ranges)
3627 } else {
3628 unreachable!()
3629 }
3630 }
3631
3632 pub fn insert<K: Hash + 'static>(
3633 &mut self,
3634 key: &K,
3635 ranges: Vec<Range<Anchor>>,
3636 value: Arc<str>,
3637 ) {
3638 let occurrence_index = *self
3639 .key_to_occurrence_index
3640 .entry(Key::new(key))
3641 .or_insert_with(|| {
3642 let occurrence_index = self.next_occurrence_index;
3643 self.next_occurrence_index += 1;
3644 occurrence_index
3645 });
3646 let key = Key::new(key);
3647 let existing = self
3648 .ranges
3649 .iter()
3650 .enumerate()
3651 .rfind(|(_, existing)| existing.key == key);
3652 if let Some((existing_ix, _)) = existing {
3653 self.ranges.remove(existing_ix);
3654 }
3655 self.ranges.push(DebugRange {
3656 ranges,
3657 key,
3658 value,
3659 occurrence_index,
3660 });
3661 }
3662
3663 pub fn remove<K: Hash + 'static>(&mut self, key: &K) {
3664 self.remove_impl(&Key::new(key));
3665 }
3666
3667 fn remove_impl(&mut self, key: &Key) {
3668 let existing = self
3669 .ranges
3670 .iter()
3671 .enumerate()
3672 .rfind(|(_, existing)| &existing.key == key);
3673 if let Some((existing_ix, _)) = existing {
3674 self.ranges.remove(existing_ix);
3675 }
3676 }
3677
3678 pub fn remove_all_with_key_type<K: 'static>(&mut self) {
3679 self.ranges
3680 .retain(|item| item.key.type_id != TypeId::of::<K>());
3681 }
3682 }
3683
3684 impl Key {
3685 fn new<K: Hash + 'static>(key: &K) -> Self {
3686 let type_id = TypeId::of::<K>();
3687 let mut hasher = collections::FxHasher::default();
3688 key.hash(&mut hasher);
3689 Key {
3690 type_id,
3691 hash: hasher.finish(),
3692 }
3693 }
3694 }
3695
3696 pub trait ToDebugRanges {
3697 fn to_debug_ranges(&self, snapshot: &BufferSnapshot) -> Vec<Range<usize>>;
3698 }
3699
3700 impl<T: ToOffset> ToDebugRanges for T {
3701 fn to_debug_ranges(&self, snapshot: &BufferSnapshot) -> Vec<Range<usize>> {
3702 [self.to_offset(snapshot)].to_debug_ranges(snapshot)
3703 }
3704 }
3705
3706 impl<T: ToOffset + Clone> ToDebugRanges for Range<T> {
3707 fn to_debug_ranges(&self, snapshot: &BufferSnapshot) -> Vec<Range<usize>> {
3708 [self.clone()].to_debug_ranges(snapshot)
3709 }
3710 }
3711
3712 impl<T: ToOffset> ToDebugRanges for Vec<T> {
3713 fn to_debug_ranges(&self, snapshot: &BufferSnapshot) -> Vec<Range<usize>> {
3714 self.as_slice().to_debug_ranges(snapshot)
3715 }
3716 }
3717
3718 impl<T: ToOffset> ToDebugRanges for Vec<Range<T>> {
3719 fn to_debug_ranges(&self, snapshot: &BufferSnapshot) -> Vec<Range<usize>> {
3720 self.as_slice().to_debug_ranges(snapshot)
3721 }
3722 }
3723
3724 impl<T: ToOffset> ToDebugRanges for [T] {
3725 fn to_debug_ranges(&self, snapshot: &BufferSnapshot) -> Vec<Range<usize>> {
3726 self.iter()
3727 .map(|item| {
3728 let offset = item.to_offset(snapshot);
3729 offset..offset
3730 })
3731 .collect()
3732 }
3733 }
3734
3735 impl<T: ToOffset> ToDebugRanges for [Range<T>] {
3736 fn to_debug_ranges(&self, snapshot: &BufferSnapshot) -> Vec<Range<usize>> {
3737 self.iter()
3738 .map(|range| range.start.to_offset(snapshot)..range.end.to_offset(snapshot))
3739 .collect()
3740 }
3741 }
3742}