1mod anchor;
2mod point;
3pub mod rope;
4mod selection;
5
6pub use anchor::*;
7use parking_lot::Mutex;
8pub use point::*;
9pub use rope::{Chunks, Rope, TextSummary};
10use seahash::SeaHasher;
11pub use selection::*;
12use similar::{ChangeTag, TextDiff};
13use tree_sitter::{InputEdit, Parser, QueryCursor};
14
15use crate::{
16 editor::Bias,
17 language::{Language, Tree},
18 operation_queue::{self, OperationQueue},
19 settings::{StyleId, ThemeMap},
20 sum_tree::{self, FilterCursor, SeekBias, SumTree},
21 time::{self, ReplicaId},
22 worktree::FileHandle,
23};
24use anyhow::{anyhow, Result};
25use gpui::{AppContext, Entity, ModelContext, Task};
26use lazy_static::lazy_static;
27use std::{
28 cell::RefCell,
29 cmp,
30 hash::BuildHasher,
31 iter::{self, Iterator},
32 mem,
33 ops::{Deref, DerefMut, Range},
34 str,
35 sync::Arc,
36 time::{Duration, Instant, SystemTime, UNIX_EPOCH},
37};
38
39const UNDO_GROUP_INTERVAL: Duration = Duration::from_millis(300);
40
41#[derive(Clone, Default)]
42struct DeterministicState;
43
44impl BuildHasher for DeterministicState {
45 type Hasher = SeaHasher;
46
47 fn build_hasher(&self) -> Self::Hasher {
48 SeaHasher::new()
49 }
50}
51
52#[cfg(test)]
53type HashMap<K, V> = std::collections::HashMap<K, V, DeterministicState>;
54
55#[cfg(test)]
56type HashSet<T> = std::collections::HashSet<T, DeterministicState>;
57
58#[cfg(not(test))]
59type HashMap<K, V> = std::collections::HashMap<K, V>;
60
61#[cfg(not(test))]
62type HashSet<T> = std::collections::HashSet<T>;
63
64thread_local! {
65 static PARSER: RefCell<Parser> = RefCell::new(Parser::new());
66}
67
68lazy_static! {
69 static ref QUERY_CURSORS: Mutex<Vec<QueryCursor>> = Default::default();
70}
71
72struct QueryCursorHandle(Option<QueryCursor>);
73
74impl QueryCursorHandle {
75 fn new() -> Self {
76 QueryCursorHandle(Some(
77 QUERY_CURSORS
78 .lock()
79 .pop()
80 .unwrap_or_else(|| QueryCursor::new()),
81 ))
82 }
83}
84
85impl Deref for QueryCursorHandle {
86 type Target = QueryCursor;
87
88 fn deref(&self) -> &Self::Target {
89 self.0.as_ref().unwrap()
90 }
91}
92
93impl DerefMut for QueryCursorHandle {
94 fn deref_mut(&mut self) -> &mut Self::Target {
95 self.0.as_mut().unwrap()
96 }
97}
98
99impl Drop for QueryCursorHandle {
100 fn drop(&mut self) {
101 let mut cursor = self.0.take().unwrap();
102 cursor.set_byte_range(0..usize::MAX);
103 cursor.set_point_range(Point::zero().into()..Point::MAX.into());
104 QUERY_CURSORS.lock().push(cursor)
105 }
106}
107
108pub struct Buffer {
109 fragments: SumTree<Fragment>,
110 visible_text: Rope,
111 deleted_text: Rope,
112 insertion_splits: HashMap<time::Local, SumTree<InsertionSplit>>,
113 pub version: time::Global,
114 saved_version: time::Global,
115 saved_mtime: SystemTime,
116 last_edit: time::Local,
117 undo_map: UndoMap,
118 history: History,
119 file: Option<FileHandle>,
120 language: Option<Arc<Language>>,
121 syntax_tree: Mutex<Option<SyntaxTree>>,
122 is_parsing: bool,
123 selections: HashMap<SelectionSetId, Arc<[Selection]>>,
124 pub selections_last_update: SelectionsVersion,
125 deferred_ops: OperationQueue<Operation>,
126 deferred_replicas: HashSet<ReplicaId>,
127 replica_id: ReplicaId,
128 local_clock: time::Local,
129 lamport_clock: time::Lamport,
130}
131
132#[derive(Clone)]
133struct SyntaxTree {
134 tree: Tree,
135 parsed: bool,
136 version: time::Global,
137}
138
139#[derive(Clone)]
140struct Transaction {
141 start: time::Global,
142 buffer_was_dirty: bool,
143 edits: Vec<time::Local>,
144 selections_before: Option<(SelectionSetId, Arc<[Selection]>)>,
145 selections_after: Option<(SelectionSetId, Arc<[Selection]>)>,
146 first_edit_at: Instant,
147 last_edit_at: Instant,
148}
149
150#[derive(Clone)]
151pub struct History {
152 // TODO: Turn this into a String or Rope, maybe.
153 pub base_text: Arc<str>,
154 ops: HashMap<time::Local, EditOperation>,
155 undo_stack: Vec<Transaction>,
156 redo_stack: Vec<Transaction>,
157 transaction_depth: usize,
158 group_interval: Duration,
159}
160
161impl History {
162 pub fn new(base_text: Arc<str>) -> Self {
163 Self {
164 base_text,
165 ops: Default::default(),
166 undo_stack: Vec::new(),
167 redo_stack: Vec::new(),
168 transaction_depth: 0,
169 group_interval: UNDO_GROUP_INTERVAL,
170 }
171 }
172
173 fn push(&mut self, op: EditOperation) {
174 self.ops.insert(op.id, op);
175 }
176
177 fn start_transaction(
178 &mut self,
179 start: time::Global,
180 buffer_was_dirty: bool,
181 selections: Option<(SelectionSetId, Arc<[Selection]>)>,
182 now: Instant,
183 ) {
184 self.transaction_depth += 1;
185 if self.transaction_depth == 1 {
186 self.undo_stack.push(Transaction {
187 start,
188 buffer_was_dirty,
189 edits: Vec::new(),
190 selections_before: selections,
191 selections_after: None,
192 first_edit_at: now,
193 last_edit_at: now,
194 });
195 }
196 }
197
198 fn end_transaction(
199 &mut self,
200 selections: Option<(SelectionSetId, Arc<[Selection]>)>,
201 now: Instant,
202 ) -> Option<&Transaction> {
203 assert_ne!(self.transaction_depth, 0);
204 self.transaction_depth -= 1;
205 if self.transaction_depth == 0 {
206 let transaction = self.undo_stack.last_mut().unwrap();
207 transaction.selections_after = selections;
208 transaction.last_edit_at = now;
209 Some(transaction)
210 } else {
211 None
212 }
213 }
214
215 fn group(&mut self) {
216 let mut new_len = self.undo_stack.len();
217 let mut transactions = self.undo_stack.iter_mut();
218
219 if let Some(mut transaction) = transactions.next_back() {
220 for prev_transaction in transactions.next_back() {
221 if transaction.first_edit_at - prev_transaction.last_edit_at <= self.group_interval
222 {
223 prev_transaction.edits.append(&mut transaction.edits);
224 prev_transaction.last_edit_at = transaction.last_edit_at;
225 prev_transaction.selections_after = transaction.selections_after.take();
226 transaction = prev_transaction;
227 new_len -= 1;
228 } else {
229 break;
230 }
231 }
232 }
233
234 self.undo_stack.truncate(new_len);
235 }
236
237 fn push_undo(&mut self, edit_id: time::Local) {
238 assert_ne!(self.transaction_depth, 0);
239 self.undo_stack.last_mut().unwrap().edits.push(edit_id);
240 }
241
242 fn pop_undo(&mut self) -> Option<&Transaction> {
243 assert_eq!(self.transaction_depth, 0);
244 if let Some(transaction) = self.undo_stack.pop() {
245 self.redo_stack.push(transaction);
246 self.redo_stack.last()
247 } else {
248 None
249 }
250 }
251
252 fn pop_redo(&mut self) -> Option<&Transaction> {
253 assert_eq!(self.transaction_depth, 0);
254 if let Some(transaction) = self.redo_stack.pop() {
255 self.undo_stack.push(transaction);
256 self.undo_stack.last()
257 } else {
258 None
259 }
260 }
261}
262
263#[derive(Clone, Default, Debug)]
264struct UndoMap(HashMap<time::Local, Vec<UndoOperation>>);
265
266impl UndoMap {
267 fn insert(&mut self, undo: UndoOperation) {
268 self.0.entry(undo.edit_id).or_default().push(undo);
269 }
270
271 fn is_undone(&self, edit_id: time::Local) -> bool {
272 self.undo_count(edit_id) % 2 == 1
273 }
274
275 fn was_undone(&self, edit_id: time::Local, version: &time::Global) -> bool {
276 let undo_count = self
277 .0
278 .get(&edit_id)
279 .unwrap_or(&Vec::new())
280 .iter()
281 .filter(|undo| version.observed(undo.id))
282 .map(|undo| undo.count)
283 .max()
284 .unwrap_or(0);
285 undo_count % 2 == 1
286 }
287
288 fn undo_count(&self, edit_id: time::Local) -> u32 {
289 self.0
290 .get(&edit_id)
291 .unwrap_or(&Vec::new())
292 .iter()
293 .map(|undo| undo.count)
294 .max()
295 .unwrap_or(0)
296 }
297}
298
299struct Edits<'a, F: Fn(&FragmentSummary) -> bool> {
300 deleted_text: &'a Rope,
301 cursor: FilterCursor<'a, F, Fragment, FragmentTextSummary>,
302 undos: &'a UndoMap,
303 since: time::Global,
304 delta: isize,
305}
306
307#[derive(Clone, Debug, Default, Eq, PartialEq)]
308pub struct Edit {
309 pub old_range: Range<usize>,
310 pub new_range: Range<usize>,
311 pub old_lines: Point,
312}
313
314impl Edit {
315 pub fn delta(&self) -> isize {
316 (self.new_range.end - self.new_range.start) as isize
317 - (self.old_range.end - self.old_range.start) as isize
318 }
319
320 pub fn old_extent(&self) -> usize {
321 self.old_range.end - self.old_range.start
322 }
323}
324
325struct Diff {
326 base_version: time::Global,
327 new_text: Arc<str>,
328 changes: Vec<(ChangeTag, usize)>,
329}
330
331#[derive(Clone, Eq, PartialEq, Debug)]
332pub struct Insertion {
333 id: time::Local,
334 parent_id: time::Local,
335 offset_in_parent: usize,
336 lamport_timestamp: time::Lamport,
337}
338
339#[derive(Eq, PartialEq, Clone, Debug)]
340struct Fragment {
341 id: FragmentId,
342 insertion: Arc<Insertion>,
343 range_in_insertion: Range<usize>,
344 len: usize,
345 deletions: HashSet<time::Local>,
346 max_undos: time::Global,
347 visible: bool,
348}
349
350#[derive(Eq, PartialEq, Clone, Debug)]
351pub struct FragmentSummary {
352 text: FragmentTextSummary,
353 max_fragment_id: FragmentId,
354 max_version: time::Global,
355 min_insertion_version: time::Global,
356 max_insertion_version: time::Global,
357}
358
359#[derive(Default, Clone, Debug, PartialEq, Eq)]
360struct FragmentTextSummary {
361 visible: usize,
362 deleted: usize,
363}
364
365impl<'a> sum_tree::Dimension<'a, FragmentSummary> for FragmentTextSummary {
366 fn add_summary(&mut self, summary: &'a FragmentSummary, _: &Option<time::Global>) {
367 self.visible += summary.text.visible;
368 self.deleted += summary.text.deleted;
369 }
370}
371
372#[derive(Eq, PartialEq, Clone, Debug)]
373struct InsertionSplit {
374 extent: usize,
375 fragment_id: FragmentId,
376}
377
378#[derive(Eq, PartialEq, Clone, Debug)]
379struct InsertionSplitSummary {
380 extent: usize,
381}
382
383#[derive(Clone, Debug, Eq, PartialEq)]
384pub enum Operation {
385 Edit {
386 edit: EditOperation,
387 lamport_timestamp: time::Lamport,
388 },
389 Undo {
390 undo: UndoOperation,
391 lamport_timestamp: time::Lamport,
392 },
393 UpdateSelections {
394 set_id: SelectionSetId,
395 selections: Option<Arc<[Selection]>>,
396 lamport_timestamp: time::Lamport,
397 },
398}
399
400#[derive(Clone, Debug, Eq, PartialEq)]
401pub struct EditOperation {
402 id: time::Local,
403 version: time::Global,
404 ranges: Vec<Range<usize>>,
405 new_text: Option<String>,
406}
407
408#[derive(Copy, Clone, Debug, Eq, PartialEq)]
409pub struct UndoOperation {
410 id: time::Local,
411 edit_id: time::Local,
412 count: u32,
413}
414
415impl Buffer {
416 pub fn new<T: Into<Arc<str>>>(
417 replica_id: ReplicaId,
418 base_text: T,
419 cx: &mut ModelContext<Self>,
420 ) -> Self {
421 Self::build(replica_id, History::new(base_text.into()), None, None, cx)
422 }
423
424 pub fn from_history(
425 replica_id: ReplicaId,
426 history: History,
427 file: Option<FileHandle>,
428 language: Option<Arc<Language>>,
429 cx: &mut ModelContext<Self>,
430 ) -> Self {
431 Self::build(replica_id, history, file, language, cx)
432 }
433
434 fn build(
435 replica_id: ReplicaId,
436 history: History,
437 file: Option<FileHandle>,
438 language: Option<Arc<Language>>,
439 cx: &mut ModelContext<Self>,
440 ) -> Self {
441 let saved_mtime;
442 if let Some(file) = file.as_ref() {
443 saved_mtime = file.mtime();
444 file.observe_from_model(cx, |this, file, cx| {
445 let version = this.version.clone();
446 if this.version == this.saved_version {
447 if file.is_deleted() {
448 cx.emit(Event::Dirtied);
449 } else {
450 cx.spawn(|handle, mut cx| async move {
451 let (current_version, history) = handle.read_with(&cx, |this, cx| {
452 (this.version.clone(), file.load_history(cx.as_ref()))
453 });
454 if let (Ok(history), true) = (history.await, current_version == version)
455 {
456 let diff = handle
457 .read_with(&cx, |this, cx| this.diff(history.base_text, cx))
458 .await;
459 handle.update(&mut cx, |this, cx| {
460 if let Some(_ops) = this.set_text_via_diff(diff, cx) {
461 this.saved_version = this.version.clone();
462 this.saved_mtime = file.mtime();
463 cx.emit(Event::Reloaded);
464 }
465 });
466 }
467 })
468 .detach();
469 }
470 }
471 cx.emit(Event::FileHandleChanged);
472 });
473 } else {
474 saved_mtime = UNIX_EPOCH;
475 }
476
477 let mut visible_text = Rope::new();
478 let mut insertion_splits = HashMap::default();
479 let mut fragments = SumTree::new();
480
481 let base_text = Rope::from(history.base_text.as_ref());
482 let base_insertion = Arc::new(Insertion {
483 id: time::Local::default(),
484 parent_id: time::Local::default(),
485 offset_in_parent: 0,
486 lamport_timestamp: time::Lamport::default(),
487 });
488
489 insertion_splits.insert(
490 base_insertion.id,
491 SumTree::from_item(
492 InsertionSplit {
493 fragment_id: FragmentId::min_value().clone(),
494 extent: 0,
495 },
496 &(),
497 ),
498 );
499 fragments.push(
500 Fragment::new(
501 FragmentId::min_value().clone(),
502 base_insertion.clone(),
503 0..0,
504 ),
505 &None,
506 );
507
508 if base_text.len() > 0 {
509 let base_fragment_id =
510 FragmentId::between(&FragmentId::min_value(), &FragmentId::max_value());
511 let range_in_insertion = 0..base_text.len();
512
513 visible_text = base_text.clone();
514 insertion_splits.get_mut(&base_insertion.id).unwrap().push(
515 InsertionSplit {
516 fragment_id: base_fragment_id.clone(),
517 extent: range_in_insertion.end,
518 },
519 &(),
520 );
521 fragments.push(
522 Fragment::new(base_fragment_id, base_insertion, range_in_insertion.clone()),
523 &None,
524 );
525 }
526
527 let mut result = Self {
528 visible_text,
529 deleted_text: Rope::new(),
530 fragments,
531 insertion_splits,
532 version: time::Global::new(),
533 saved_version: time::Global::new(),
534 last_edit: time::Local::default(),
535 undo_map: Default::default(),
536 history,
537 file,
538 syntax_tree: Mutex::new(None),
539 is_parsing: false,
540 language,
541 saved_mtime,
542 selections: HashMap::default(),
543 selections_last_update: 0,
544 deferred_ops: OperationQueue::new(),
545 deferred_replicas: HashSet::default(),
546 replica_id,
547 local_clock: time::Local::new(replica_id),
548 lamport_clock: time::Lamport::new(replica_id),
549 };
550 result.reparse(cx);
551 result
552 }
553
554 pub fn snapshot(&self) -> Snapshot {
555 Snapshot {
556 text: self.visible_text.clone(),
557 tree: self.syntax_tree(),
558 language: self.language.clone(),
559 query_cursor: QueryCursorHandle::new(),
560 }
561 }
562
563 pub fn file(&self) -> Option<&FileHandle> {
564 self.file.as_ref()
565 }
566
567 pub fn save(
568 &mut self,
569 new_file: Option<FileHandle>,
570 cx: &mut ModelContext<Self>,
571 ) -> Task<Result<()>> {
572 let text = self.visible_text.clone();
573 let version = self.version.clone();
574 let file = self.file.clone();
575
576 cx.spawn(|handle, mut cx| async move {
577 if let Some(file) = new_file.as_ref().or(file.as_ref()) {
578 let result = cx.read(|cx| file.save(text, cx.as_ref())).await;
579 if result.is_ok() {
580 handle.update(&mut cx, |me, cx| me.did_save(version, new_file, cx));
581 }
582 result
583 } else {
584 Ok(())
585 }
586 })
587 }
588
589 fn did_save(
590 &mut self,
591 version: time::Global,
592 file: Option<FileHandle>,
593 cx: &mut ModelContext<Buffer>,
594 ) {
595 if file.is_some() {
596 self.file = file;
597 }
598 if let Some(file) = &self.file {
599 self.saved_mtime = file.mtime();
600 }
601 self.saved_version = version;
602 cx.emit(Event::Saved);
603 }
604
605 pub fn syntax_tree(&self) -> Option<Tree> {
606 if let Some(syntax_tree) = self.syntax_tree.lock().as_mut() {
607 let mut edited = false;
608 let mut delta = 0_isize;
609 for Edit {
610 old_range,
611 new_range,
612 old_lines,
613 } in self.edits_since(syntax_tree.version.clone())
614 {
615 let start_offset = (old_range.start as isize + delta) as usize;
616 let start_point = self.visible_text.to_point(start_offset);
617 let old_bytes = old_range.end - old_range.start;
618 let new_bytes = new_range.end - new_range.start;
619 syntax_tree.tree.edit(&InputEdit {
620 start_byte: start_offset,
621 old_end_byte: start_offset + old_bytes,
622 new_end_byte: start_offset + new_bytes,
623 start_position: start_point.into(),
624 old_end_position: (start_point + old_lines).into(),
625 new_end_position: self.visible_text.to_point(start_offset + new_bytes).into(),
626 });
627 delta += new_bytes as isize - old_bytes as isize;
628 edited = true;
629 }
630 syntax_tree.parsed &= !edited;
631 syntax_tree.version = self.version();
632 Some(syntax_tree.tree.clone())
633 } else {
634 None
635 }
636 }
637
638 pub fn is_parsing(&self) -> bool {
639 self.is_parsing
640 }
641
642 fn should_reparse(&self) -> bool {
643 if let Some(syntax_tree) = self.syntax_tree.lock().as_ref() {
644 !syntax_tree.parsed || syntax_tree.version != self.version
645 } else {
646 self.language.is_some()
647 }
648 }
649
650 fn reparse(&mut self, cx: &mut ModelContext<Self>) {
651 // Avoid spawning a new parsing task if the buffer is already being reparsed
652 // due to an earlier edit.
653 if self.is_parsing {
654 return;
655 }
656
657 if let Some(language) = self.language.clone() {
658 self.is_parsing = true;
659 cx.spawn(|handle, mut cx| async move {
660 while handle.read_with(&cx, |this, _| this.should_reparse()) {
661 // The parse tree is out of date, so grab the syntax tree to synchronously
662 // splice all the edits that have happened since the last parse.
663 let new_tree = handle.update(&mut cx, |this, _| this.syntax_tree());
664 let (new_text, new_version) = handle
665 .read_with(&cx, |this, _| (this.visible_text.clone(), this.version()));
666
667 // Parse the current text in a background thread.
668 let new_tree = cx
669 .background_executor()
670 .spawn({
671 let language = language.clone();
672 async move { Self::parse_text(&new_text, new_tree, &language) }
673 })
674 .await;
675
676 handle.update(&mut cx, |this, cx| {
677 *this.syntax_tree.lock() = Some(SyntaxTree {
678 tree: new_tree,
679 parsed: true,
680 version: new_version,
681 });
682 cx.emit(Event::Reparsed);
683 cx.notify();
684 });
685 }
686 handle.update(&mut cx, |this, _| this.is_parsing = false);
687 })
688 .detach();
689 }
690 }
691
692 fn parse_text(text: &Rope, old_tree: Option<Tree>, language: &Language) -> Tree {
693 PARSER.with(|parser| {
694 let mut parser = parser.borrow_mut();
695 parser
696 .set_language(language.grammar)
697 .expect("incompatible grammar");
698 let mut chunks = text.chunks_in_range(0..text.len());
699 let tree = parser
700 .parse_with(
701 &mut move |offset, _| {
702 chunks.seek(offset);
703 chunks.next().unwrap_or("").as_bytes()
704 },
705 old_tree.as_ref(),
706 )
707 .unwrap();
708 tree
709 })
710 }
711
712 pub fn range_for_syntax_ancestor<T: ToOffset>(&self, range: Range<T>) -> Option<Range<usize>> {
713 if let Some(tree) = self.syntax_tree() {
714 let root = tree.root_node();
715 let range = range.start.to_offset(self)..range.end.to_offset(self);
716 let mut node = root.descendant_for_byte_range(range.start, range.end);
717 while node.map_or(false, |n| n.byte_range() == range) {
718 node = node.unwrap().parent();
719 }
720 node.map(|n| n.byte_range())
721 } else {
722 None
723 }
724 }
725
726 pub fn enclosing_bracket_ranges<T: ToOffset>(
727 &self,
728 range: Range<T>,
729 ) -> Option<(Range<usize>, Range<usize>)> {
730 let (lang, tree) = self.language.as_ref().zip(self.syntax_tree())?;
731 let open_capture_ix = lang.brackets_query.capture_index_for_name("open")?;
732 let close_capture_ix = lang.brackets_query.capture_index_for_name("close")?;
733
734 // Find bracket pairs that *inclusively* contain the given range.
735 let range = range.start.to_offset(self).saturating_sub(1)..range.end.to_offset(self) + 1;
736 let mut cursor = QueryCursorHandle::new();
737 let matches = cursor.set_byte_range(range).matches(
738 &lang.brackets_query,
739 tree.root_node(),
740 TextProvider(&self.visible_text),
741 );
742
743 // Get the ranges of the innermost pair of brackets.
744 matches
745 .filter_map(|mat| {
746 let open = mat.nodes_for_capture_index(open_capture_ix).next()?;
747 let close = mat.nodes_for_capture_index(close_capture_ix).next()?;
748 Some((open.byte_range(), close.byte_range()))
749 })
750 .min_by_key(|(open_range, close_range)| close_range.end - open_range.start)
751 }
752
753 fn diff(&self, new_text: Arc<str>, cx: &AppContext) -> Task<Diff> {
754 // TODO: it would be nice to not allocate here.
755 let old_text = self.text();
756 let base_version = self.version();
757 cx.background_executor().spawn(async move {
758 let changes = TextDiff::from_lines(old_text.as_str(), new_text.as_ref())
759 .iter_all_changes()
760 .map(|c| (c.tag(), c.value().len()))
761 .collect::<Vec<_>>();
762 Diff {
763 base_version,
764 new_text,
765 changes,
766 }
767 })
768 }
769
770 fn set_text_via_diff(
771 &mut self,
772 diff: Diff,
773 cx: &mut ModelContext<Self>,
774 ) -> Option<Vec<Operation>> {
775 if self.version == diff.base_version {
776 self.start_transaction(None).unwrap();
777 let mut operations = Vec::new();
778 let mut offset = 0;
779 for (tag, len) in diff.changes {
780 let range = offset..(offset + len);
781 match tag {
782 ChangeTag::Equal => offset += len,
783 ChangeTag::Delete => {
784 operations.extend_from_slice(&self.edit(Some(range), "", Some(cx)).unwrap())
785 }
786 ChangeTag::Insert => {
787 operations.extend_from_slice(
788 &self
789 .edit(Some(offset..offset), &diff.new_text[range], Some(cx))
790 .unwrap(),
791 );
792 offset += len;
793 }
794 }
795 }
796 self.end_transaction(None, Some(cx)).unwrap();
797 Some(operations)
798 } else {
799 None
800 }
801 }
802
803 pub fn is_dirty(&self) -> bool {
804 self.version > self.saved_version || self.file.as_ref().map_or(false, |f| f.is_deleted())
805 }
806
807 pub fn has_conflict(&self) -> bool {
808 self.version > self.saved_version
809 && self
810 .file
811 .as_ref()
812 .map_or(false, |f| f.mtime() > self.saved_mtime)
813 }
814
815 pub fn version(&self) -> time::Global {
816 self.version.clone()
817 }
818
819 pub fn text_summary(&self) -> TextSummary {
820 self.visible_text.summary()
821 }
822
823 pub fn text_summary_for_range(&self, range: Range<usize>) -> TextSummary {
824 self.visible_text.cursor(range.start).summary(range.end)
825 }
826
827 pub fn len(&self) -> usize {
828 self.fragments.extent::<usize>(&None)
829 }
830
831 pub fn line_len(&self, row: u32) -> u32 {
832 let row_start_offset = Point::new(row, 0).to_offset(self);
833 let row_end_offset = if row >= self.max_point().row {
834 self.len()
835 } else {
836 Point::new(row + 1, 0).to_offset(self) - 1
837 };
838 (row_end_offset - row_start_offset) as u32
839 }
840
841 pub fn max_point(&self) -> Point {
842 self.visible_text.max_point()
843 }
844
845 pub fn row_count(&self) -> u32 {
846 self.max_point().row + 1
847 }
848
849 pub fn text(&self) -> String {
850 self.text_for_range(0..self.len()).collect()
851 }
852
853 pub fn text_for_range<'a, T: ToOffset>(&'a self, range: Range<T>) -> Chunks<'a> {
854 let start = range.start.to_offset(self);
855 let end = range.end.to_offset(self);
856 self.visible_text.chunks_in_range(start..end)
857 }
858
859 pub fn chars(&self) -> impl Iterator<Item = char> + '_ {
860 self.chars_at(0)
861 }
862
863 pub fn chars_at<T: ToOffset>(&self, position: T) -> impl Iterator<Item = char> + '_ {
864 let offset = position.to_offset(self);
865 self.visible_text.chars_at(offset)
866 }
867
868 pub fn selections_changed_since(&self, since: SelectionsVersion) -> bool {
869 self.selections_last_update != since
870 }
871
872 pub fn edits_since<'a>(&'a self, since: time::Global) -> impl 'a + Iterator<Item = Edit> {
873 let since_2 = since.clone();
874 let cursor = self.fragments.filter(
875 move |summary| summary.max_version.changed_since(&since_2),
876 &None,
877 );
878
879 Edits {
880 deleted_text: &self.deleted_text,
881 cursor,
882 undos: &self.undo_map,
883 since,
884 delta: 0,
885 }
886 }
887
888 pub fn deferred_ops_len(&self) -> usize {
889 self.deferred_ops.len()
890 }
891
892 pub fn start_transaction(&mut self, set_id: Option<SelectionSetId>) -> Result<()> {
893 self.start_transaction_at(set_id, Instant::now())
894 }
895
896 fn start_transaction_at(&mut self, set_id: Option<SelectionSetId>, now: Instant) -> Result<()> {
897 let selections = if let Some(set_id) = set_id {
898 let selections = self
899 .selections
900 .get(&set_id)
901 .ok_or_else(|| anyhow!("invalid selection set {:?}", set_id))?;
902 Some((set_id, selections.clone()))
903 } else {
904 None
905 };
906 self.history
907 .start_transaction(self.version.clone(), self.is_dirty(), selections, now);
908 Ok(())
909 }
910
911 pub fn end_transaction(
912 &mut self,
913 set_id: Option<SelectionSetId>,
914 cx: Option<&mut ModelContext<Self>>,
915 ) -> Result<()> {
916 self.end_transaction_at(set_id, Instant::now(), cx)
917 }
918
919 fn end_transaction_at(
920 &mut self,
921 set_id: Option<SelectionSetId>,
922 now: Instant,
923 cx: Option<&mut ModelContext<Self>>,
924 ) -> Result<()> {
925 let selections = if let Some(set_id) = set_id {
926 let selections = self
927 .selections
928 .get(&set_id)
929 .ok_or_else(|| anyhow!("invalid selection set {:?}", set_id))?;
930 Some((set_id, selections.clone()))
931 } else {
932 None
933 };
934
935 if let Some(transaction) = self.history.end_transaction(selections, now) {
936 let since = transaction.start.clone();
937 let was_dirty = transaction.buffer_was_dirty;
938 self.history.group();
939
940 if let Some(cx) = cx {
941 cx.notify();
942
943 if self.edits_since(since).next().is_some() {
944 self.did_edit(was_dirty, cx);
945 self.reparse(cx);
946 }
947 }
948 }
949
950 Ok(())
951 }
952
953 pub fn edit<I, S, T>(
954 &mut self,
955 old_ranges: I,
956 new_text: T,
957 cx: Option<&mut ModelContext<Self>>,
958 ) -> Result<Vec<Operation>>
959 where
960 I: IntoIterator<Item = Range<S>>,
961 S: ToOffset,
962 T: Into<String>,
963 {
964 self.start_transaction_at(None, Instant::now())?;
965
966 let new_text = new_text.into();
967 let old_ranges = old_ranges
968 .into_iter()
969 .map(|range| range.start.to_offset(self)..range.end.to_offset(self))
970 .collect::<Vec<Range<usize>>>();
971
972 let new_text = if new_text.len() > 0 {
973 Some(new_text)
974 } else {
975 None
976 };
977
978 let has_new_text = new_text.is_some();
979 let ops = self.splice_fragments(
980 old_ranges
981 .into_iter()
982 .filter(|old_range| has_new_text || old_range.end > old_range.start),
983 new_text.into(),
984 );
985
986 for op in &ops {
987 if let Operation::Edit { edit, .. } = op {
988 self.history.push(edit.clone());
989 self.history.push_undo(edit.id);
990 }
991 }
992
993 if let Some(op) = ops.last() {
994 if let Operation::Edit { edit, .. } = op {
995 self.last_edit = edit.id;
996 self.version.observe(edit.id);
997 } else {
998 unreachable!()
999 }
1000 }
1001
1002 self.end_transaction_at(None, Instant::now(), cx)?;
1003
1004 Ok(ops)
1005 }
1006
1007 fn did_edit(&self, was_dirty: bool, cx: &mut ModelContext<Self>) {
1008 cx.emit(Event::Edited);
1009 if !was_dirty {
1010 cx.emit(Event::Dirtied);
1011 }
1012 }
1013
1014 pub fn add_selection_set(
1015 &mut self,
1016 selections: impl Into<Arc<[Selection]>>,
1017 cx: Option<&mut ModelContext<Self>>,
1018 ) -> (SelectionSetId, Operation) {
1019 let selections = selections.into();
1020 let lamport_timestamp = self.lamport_clock.tick();
1021 self.selections
1022 .insert(lamport_timestamp, Arc::clone(&selections));
1023 self.selections_last_update += 1;
1024
1025 if let Some(cx) = cx {
1026 cx.notify();
1027 }
1028
1029 (
1030 lamport_timestamp,
1031 Operation::UpdateSelections {
1032 set_id: lamport_timestamp,
1033 selections: Some(selections),
1034 lamport_timestamp,
1035 },
1036 )
1037 }
1038
1039 pub fn update_selection_set(
1040 &mut self,
1041 set_id: SelectionSetId,
1042 selections: impl Into<Arc<[Selection]>>,
1043 cx: Option<&mut ModelContext<Self>>,
1044 ) -> Result<Operation> {
1045 let selections = selections.into();
1046 self.selections.insert(set_id, selections.clone());
1047
1048 let lamport_timestamp = self.lamport_clock.tick();
1049 self.selections_last_update += 1;
1050
1051 if let Some(cx) = cx {
1052 cx.notify();
1053 }
1054
1055 Ok(Operation::UpdateSelections {
1056 set_id,
1057 selections: Some(selections),
1058 lamport_timestamp,
1059 })
1060 }
1061
1062 pub fn remove_selection_set(
1063 &mut self,
1064 set_id: SelectionSetId,
1065 cx: Option<&mut ModelContext<Self>>,
1066 ) -> Result<Operation> {
1067 self.selections
1068 .remove(&set_id)
1069 .ok_or_else(|| anyhow!("invalid selection set id {:?}", set_id))?;
1070 let lamport_timestamp = self.lamport_clock.tick();
1071 self.selections_last_update += 1;
1072
1073 if let Some(cx) = cx {
1074 cx.notify();
1075 }
1076
1077 Ok(Operation::UpdateSelections {
1078 set_id,
1079 selections: None,
1080 lamport_timestamp,
1081 })
1082 }
1083
1084 pub fn selections(&self, set_id: SelectionSetId) -> Result<&[Selection]> {
1085 self.selections
1086 .get(&set_id)
1087 .map(|s| s.as_ref())
1088 .ok_or_else(|| anyhow!("invalid selection set id {:?}", set_id))
1089 }
1090
1091 pub fn apply_ops<I: IntoIterator<Item = Operation>>(
1092 &mut self,
1093 ops: I,
1094 cx: Option<&mut ModelContext<Self>>,
1095 ) -> Result<()> {
1096 let was_dirty = self.is_dirty();
1097 let old_version = self.version.clone();
1098
1099 let mut deferred_ops = Vec::new();
1100 for op in ops {
1101 if self.can_apply_op(&op) {
1102 self.apply_op(op)?;
1103 } else {
1104 self.deferred_replicas.insert(op.replica_id());
1105 deferred_ops.push(op);
1106 }
1107 }
1108 self.deferred_ops.insert(deferred_ops);
1109 self.flush_deferred_ops()?;
1110
1111 if let Some(cx) = cx {
1112 cx.notify();
1113 if self.edits_since(old_version).next().is_some() {
1114 self.did_edit(was_dirty, cx);
1115 self.reparse(cx);
1116 }
1117 }
1118
1119 Ok(())
1120 }
1121
1122 fn apply_op(&mut self, op: Operation) -> Result<()> {
1123 match op {
1124 Operation::Edit {
1125 edit,
1126 lamport_timestamp,
1127 ..
1128 } => {
1129 if !self.version.observed(edit.id) {
1130 self.apply_edit(
1131 edit.version,
1132 edit.ranges,
1133 edit.new_text.as_deref(),
1134 edit.id,
1135 lamport_timestamp,
1136 )?;
1137 self.version.observe(edit.id);
1138 self.history.push(edit);
1139 }
1140 }
1141 Operation::Undo {
1142 undo,
1143 lamport_timestamp,
1144 } => {
1145 if !self.version.observed(undo.id) {
1146 self.apply_undo(undo)?;
1147 self.version.observe(undo.id);
1148 self.lamport_clock.observe(lamport_timestamp);
1149 }
1150 }
1151 Operation::UpdateSelections {
1152 set_id,
1153 selections,
1154 lamport_timestamp,
1155 } => {
1156 if let Some(selections) = selections {
1157 self.selections.insert(set_id, selections);
1158 } else {
1159 self.selections.remove(&set_id);
1160 }
1161 self.lamport_clock.observe(lamport_timestamp);
1162 self.selections_last_update += 1;
1163 }
1164 }
1165 Ok(())
1166 }
1167
1168 fn apply_edit(
1169 &mut self,
1170 version: time::Global,
1171 ranges: Vec<Range<usize>>,
1172 mut new_text: Option<&str>,
1173 local_timestamp: time::Local,
1174 lamport_timestamp: time::Lamport,
1175 ) -> Result<()> {
1176 let mut old_visible_text = mem::take(&mut self.visible_text);
1177 let mut old_deleted_text = mem::take(&mut self.deleted_text);
1178 let mut old_fragments = mem::take(&mut self.fragments);
1179 let mut old_fragments = old_fragments.cursor::<VersionedOffset, VersionedOffset>();
1180
1181 let version = Some(version);
1182
1183 let mut new_fragments = SumTree::new();
1184 let mut new_ropes =
1185 RopeBuilder::new(old_visible_text.cursor(0), old_deleted_text.cursor(0));
1186 let mut pending_fragment = None;
1187 let mut pending_fragment_start_offset = 0;
1188
1189 for range in ranges {
1190 let preceding_fragments = old_fragments.slice(
1191 &VersionedOffset::Offset(range.start),
1192 SeekBias::Right,
1193 &version,
1194 );
1195 new_fragments.push_tree(preceding_fragments, &None);
1196 new_ropes.push_tree(new_fragments.summary().text);
1197
1198 let mut fragment_start_offset = old_fragments.start().offset();
1199 let mut fragment_end_offset = old_fragments.end(&version).offset();
1200 let mut fragment = if let Some(fragment) = old_fragments.item() {
1201 fragment.clone()
1202 } else {
1203 todo!()
1204 };
1205
1206 if fragment_start_offset < range.start {
1207 let prefix_fragment = Fragment {
1208 len: range.start - fragment_start_offset,
1209 visible: fragment.visible,
1210 deletions: fragment.deletions.clone(),
1211 max_undos: fragment.max_undos.clone(),
1212
1213 // TODO - remove
1214 id: fragment.id.clone(),
1215 insertion: fragment.insertion.clone(),
1216 range_in_insertion: Default::default(),
1217 };
1218
1219 new_ropes.push_fragment(&prefix_fragment, prefix_fragment.visible);
1220 new_fragments.push(prefix_fragment, &None);
1221 fragment.len -= prefix_fragment.len;
1222 }
1223
1224 let suffix_fragment = if fragment_end_offset > range.end {
1225 fragment.visible = false;
1226
1227 //
1228
1229 Some(Fragment {});
1230 } else {
1231 None
1232 };
1233 }
1234
1235 let (visible_text, deleted_text) = new_ropes.finish();
1236 new_fragments.push_tree(old_fragments.suffix(&None), &None);
1237
1238 self.fragments = new_fragments;
1239 self.visible_text = visible_text;
1240 self.deleted_text = deleted_text;
1241 self.local_clock.observe(local_timestamp);
1242 self.lamport_clock.observe(lamport_timestamp);
1243 Ok(())
1244 }
1245
1246 // let mut new_fragments = fragments_cursor.slice(
1247 // &VersionedOffset::Offset(range.start),
1248 // SeekBias::Left,
1249 // &version_cx,
1250 // );
1251 // new_ropes.push_tree(new_fragments.summary().text);
1252
1253 // if range.start == fragments_cursor.end(&version_cx).offset() {
1254 // let fragment = fragments_cursor.item().unwrap().clone();
1255 // new_ropes.push_fragment(&fragment, fragment.visible);
1256 // new_fragments.push(fragment, &None);
1257 // fragments_cursor.next(&None);
1258 // }
1259
1260 // while let Some(fragment) = fragments_cursor.item() {
1261 // let fragment_start_offset = fragments_cursor.start().offset();
1262 // let fragment_end_offset = fragments_cursor.end(&version_cx).offset();
1263
1264 // if new_text.is_none() && fragment_start_offset > range.end {
1265 // break;
1266 // }
1267
1268 // if fragment_start_offset
1269
1270 // if cursor_start_offset < range.start || cursor_end_offset > range.end {
1271 // let split_start = if start_fragment_id == fragment.id {
1272 // start_offset
1273 // } else {
1274 // fragment.range_in_insertion.start
1275 // };
1276 // let split_end = if end_fragment_id == fragment.id {
1277 // end_offset
1278 // } else {
1279 // fragment.range_in_insertion.end
1280 // };
1281 // let (before_range, within_range, after_range) = self.split_fragment(
1282 // fragments_cursor.prev_item().as_ref().unwrap(),
1283 // &fragment,
1284 // split_start..split_end,
1285 // );
1286 // let insertion = if let Some(new_text) = new_text {
1287 // let prev_fragment = fragments_cursor.prev_item();
1288 // Some(self.build_fragment_to_insert(
1289 // before_range.as_ref().or(prev_fragment).unwrap(),
1290 // within_range.as_ref().or(after_range.as_ref()),
1291 // new_text,
1292 // local_timestamp,
1293 // lamport_timestamp,
1294 // ))
1295 // } else {
1296 // None
1297 // };
1298 // if let Some(fragment) = before_range {
1299 // new_ropes.push_fragment(&fragment, fragment.visible);
1300 // new_fragments.push(fragment, &None);
1301 // }
1302 // if let Some(fragment) = insertion {
1303 // new_ropes.push_str(new_text.take().unwrap());
1304 // new_fragments.push(fragment, &None);
1305 // }
1306 // if let Some(mut fragment) = within_range {
1307 // let fragment_was_visible = fragment.visible;
1308 // if fragment.was_visible(&version_in_range, &self.undo_map) {
1309 // fragment.deletions.insert(local_timestamp);
1310 // if fragment.visible {
1311 // fragment.visible = false;
1312 // }
1313 // }
1314 // new_ropes.push_fragment(&fragment, fragment_was_visible);
1315 // new_fragments.push(fragment, &None);
1316 // }
1317 // if let Some(fragment) = after_range {
1318 // new_ropes.push_fragment(&fragment, fragment.visible);
1319 // new_fragments.push(fragment, &None);
1320 // }
1321 // } else {
1322 // if new_text.is_some() && lamport_timestamp > fragment.insertion.lamport_timestamp {
1323 // let new_text = new_text.take().unwrap();
1324 // let fragment = self.build_fragment_to_insert(
1325 // fragments_cursor.prev_item().as_ref().unwrap(),
1326 // Some(&fragment),
1327 // new_text,
1328 // local_timestamp,
1329 // lamport_timestamp,
1330 // );
1331 // new_ropes.push_str(new_text);
1332 // new_fragments.push(fragment, &None);
1333 // }
1334
1335 // let fragment_was_visible = fragment.visible;
1336 // if fragment.id < end_fragment_id
1337 // && fragment.was_visible(&version_in_range, &self.undo_map)
1338 // {
1339 // fragment.deletions.insert(local_timestamp);
1340 // if fragment.visible {
1341 // fragment.visible = false;
1342 // }
1343 // }
1344
1345 // new_ropes.push_fragment(&fragment, fragment_was_visible);
1346 // new_fragments.push(fragment, &None);
1347 // }
1348 // fragments_cursor.next(&None);
1349 // }
1350
1351 // if let Some(new_text) = new_text {
1352 // let fragment = self.build_fragment_to_insert(
1353 // fragments_cursor.prev_item().as_ref().unwrap(),
1354 // None,
1355 // new_text,
1356 // local_timestamp,
1357 // lamport_timestamp,
1358 // );
1359 // new_ropes.push_str(new_text);
1360 // new_fragments.push(fragment, &None);
1361 // }
1362
1363 // let (visible_text, deleted_text) = new_ropes.finish();
1364 // new_fragments.push_tree(fragments_cursor.suffix(&None), &None);
1365
1366 // self.fragments = new_fragments;
1367 // self.visible_text = visible_text;
1368 // self.deleted_text = deleted_text;
1369 // self.local_clock.observe(local_timestamp);
1370 // self.lamport_clock.observe(lamport_timestamp);
1371 // Ok(())
1372 // }
1373
1374 pub fn undo(&mut self, mut cx: Option<&mut ModelContext<Self>>) -> Vec<Operation> {
1375 let was_dirty = self.is_dirty();
1376 let old_version = self.version.clone();
1377
1378 let mut ops = Vec::new();
1379 if let Some(transaction) = self.history.pop_undo() {
1380 let selections = transaction.selections_before.clone();
1381 for edit_id in transaction.edits.clone() {
1382 ops.push(self.undo_or_redo(edit_id).unwrap());
1383 }
1384
1385 if let Some((set_id, selections)) = selections {
1386 let _ = self.update_selection_set(set_id, selections, cx.as_deref_mut());
1387 }
1388 }
1389
1390 if let Some(cx) = cx {
1391 cx.notify();
1392 if self.edits_since(old_version).next().is_some() {
1393 self.did_edit(was_dirty, cx);
1394 self.reparse(cx);
1395 }
1396 }
1397
1398 ops
1399 }
1400
1401 pub fn redo(&mut self, mut cx: Option<&mut ModelContext<Self>>) -> Vec<Operation> {
1402 let was_dirty = self.is_dirty();
1403 let old_version = self.version.clone();
1404
1405 let mut ops = Vec::new();
1406 if let Some(transaction) = self.history.pop_redo() {
1407 let selections = transaction.selections_after.clone();
1408 for edit_id in transaction.edits.clone() {
1409 ops.push(self.undo_or_redo(edit_id).unwrap());
1410 }
1411
1412 if let Some((set_id, selections)) = selections {
1413 let _ = self.update_selection_set(set_id, selections, cx.as_deref_mut());
1414 }
1415 }
1416
1417 if let Some(cx) = cx {
1418 cx.notify();
1419 if self.edits_since(old_version).next().is_some() {
1420 self.did_edit(was_dirty, cx);
1421 self.reparse(cx);
1422 }
1423 }
1424
1425 ops
1426 }
1427
1428 fn undo_or_redo(&mut self, edit_id: time::Local) -> Result<Operation> {
1429 let undo = UndoOperation {
1430 id: self.local_clock.tick(),
1431 edit_id,
1432 count: self.undo_map.undo_count(edit_id) + 1,
1433 };
1434 self.apply_undo(undo)?;
1435 self.version.observe(undo.id);
1436
1437 Ok(Operation::Undo {
1438 undo,
1439 lamport_timestamp: self.lamport_clock.tick(),
1440 })
1441 }
1442
1443 fn apply_undo(&mut self, undo: UndoOperation) -> Result<()> {
1444 let mut new_fragments;
1445 let mut old_visible_text = Rope::new();
1446 let mut old_deleted_text = Rope::new();
1447 mem::swap(&mut old_visible_text, &mut self.visible_text);
1448 mem::swap(&mut old_deleted_text, &mut self.deleted_text);
1449 let mut new_ropes =
1450 RopeBuilder::new(old_visible_text.cursor(0), old_deleted_text.cursor(0));
1451
1452 self.undo_map.insert(undo);
1453 let edit = &self.history.ops[&undo.edit_id];
1454 let start_fragment_id = self.resolve_fragment_id(edit.start_id, edit.start_offset)?;
1455 let end_fragment_id = self.resolve_fragment_id(edit.end_id, edit.end_offset)?;
1456
1457 let mut fragments_cursor = self.fragments.cursor::<FragmentIdRef, ()>();
1458
1459 if edit.start_id == edit.end_id && edit.start_offset == edit.end_offset {
1460 let splits = &self.insertion_splits[&undo.edit_id];
1461 let mut insertion_splits = splits.cursor::<(), ()>().map(|s| &s.fragment_id).peekable();
1462
1463 let first_split_id = insertion_splits.next().unwrap();
1464 new_fragments =
1465 fragments_cursor.slice(&FragmentIdRef::new(first_split_id), SeekBias::Left, &None);
1466 new_ropes.push_tree(new_fragments.summary().text);
1467
1468 loop {
1469 let mut fragment = fragments_cursor.item().unwrap().clone();
1470 let was_visible = fragment.visible;
1471 fragment.visible = fragment.is_visible(&self.undo_map);
1472 fragment.max_undos.observe(undo.id);
1473
1474 new_ropes.push_fragment(&fragment, was_visible);
1475 new_fragments.push(fragment.clone(), &None);
1476
1477 fragments_cursor.next(&None);
1478 if let Some(split_id) = insertion_splits.next() {
1479 let slice = fragments_cursor.slice(
1480 &FragmentIdRef::new(split_id),
1481 SeekBias::Left,
1482 &None,
1483 );
1484 new_ropes.push_tree(slice.summary().text);
1485 new_fragments.push_tree(slice, &None);
1486 } else {
1487 break;
1488 }
1489 }
1490 } else {
1491 new_fragments = fragments_cursor.slice(
1492 &FragmentIdRef::new(&start_fragment_id),
1493 SeekBias::Left,
1494 &None,
1495 );
1496 new_ropes.push_tree(new_fragments.summary().text);
1497
1498 while let Some(fragment) = fragments_cursor.item() {
1499 if fragment.id > end_fragment_id {
1500 break;
1501 } else {
1502 let mut fragment = fragment.clone();
1503 let fragment_was_visible = fragment.visible;
1504 if edit.version_in_range.observed(fragment.insertion.id)
1505 || fragment.insertion.id == undo.edit_id
1506 {
1507 fragment.visible = fragment.is_visible(&self.undo_map);
1508 fragment.max_undos.observe(undo.id);
1509 }
1510
1511 new_ropes.push_fragment(&fragment, fragment_was_visible);
1512 new_fragments.push(fragment, &None);
1513 fragments_cursor.next(&None);
1514 }
1515 }
1516 }
1517
1518 new_fragments.push_tree(fragments_cursor.suffix(&None), &None);
1519 let (visible_text, deleted_text) = new_ropes.finish();
1520 drop(fragments_cursor);
1521
1522 self.fragments = new_fragments;
1523 self.visible_text = visible_text;
1524 self.deleted_text = deleted_text;
1525
1526 Ok(())
1527 }
1528
1529 fn flush_deferred_ops(&mut self) -> Result<()> {
1530 self.deferred_replicas.clear();
1531 let mut deferred_ops = Vec::new();
1532 for op in self.deferred_ops.drain().cursor().cloned() {
1533 if self.can_apply_op(&op) {
1534 self.apply_op(op)?;
1535 } else {
1536 self.deferred_replicas.insert(op.replica_id());
1537 deferred_ops.push(op);
1538 }
1539 }
1540 self.deferred_ops.insert(deferred_ops);
1541 Ok(())
1542 }
1543
1544 fn can_apply_op(&self, op: &Operation) -> bool {
1545 if self.deferred_replicas.contains(&op.replica_id()) {
1546 false
1547 } else {
1548 match op {
1549 Operation::Edit { edit, .. } => {
1550 self.version.observed(edit.start_id)
1551 && self.version.observed(edit.end_id)
1552 && edit.version_in_range <= self.version
1553 }
1554 Operation::Undo { undo, .. } => self.version.observed(undo.edit_id),
1555 Operation::UpdateSelections { selections, .. } => {
1556 if let Some(selections) = selections {
1557 selections.iter().all(|selection| {
1558 let contains_start = match &selection.start {
1559 Anchor::Middle { version, .. } => self.version >= *version,
1560 _ => true,
1561 };
1562 let contains_end = match &selection.end {
1563 Anchor::Middle { version, .. } => self.version >= *version,
1564 _ => true,
1565 };
1566 contains_start && contains_end
1567 })
1568 } else {
1569 true
1570 }
1571 }
1572 }
1573 }
1574 }
1575
1576 fn resolve_fragment_id(&self, edit_id: time::Local, offset: usize) -> Result<FragmentId> {
1577 let split_tree = self
1578 .insertion_splits
1579 .get(&edit_id)
1580 .ok_or_else(|| anyhow!("invalid operation"))?;
1581 let mut cursor = split_tree.cursor::<usize, ()>();
1582 cursor.seek(&offset, SeekBias::Left, &());
1583 Ok(cursor
1584 .item()
1585 .ok_or_else(|| anyhow!("invalid operation"))?
1586 .fragment_id
1587 .clone())
1588 }
1589
1590 fn splice_fragments<I>(&mut self, mut old_ranges: I, new_text: Option<String>) -> Vec<Operation>
1591 where
1592 I: Iterator<Item = Range<usize>>,
1593 {
1594 let mut cur_range = old_ranges.next();
1595 if cur_range.is_none() {
1596 return Vec::new();
1597 }
1598
1599 let version = &self.version;
1600 let mut ops = Vec::with_capacity(old_ranges.size_hint().0);
1601
1602 let mut old_fragments = SumTree::new();
1603 let mut old_visible_text = Rope::new();
1604 let mut old_deleted_text = Rope::new();
1605 mem::swap(&mut old_visible_text, &mut self.visible_text);
1606 mem::swap(&mut old_deleted_text, &mut self.deleted_text);
1607 mem::swap(&mut old_fragments, &mut self.fragments);
1608
1609 let mut fragments_cursor = old_fragments.cursor::<usize, usize>();
1610 let mut new_fragments =
1611 fragments_cursor.slice(&cur_range.as_ref().unwrap().start, SeekBias::Right, &None);
1612
1613 let mut new_ropes =
1614 RopeBuilder::new(old_visible_text.cursor(0), old_deleted_text.cursor(0));
1615 new_ropes.push_tree(new_fragments.summary().text);
1616
1617 let mut start_id = None;
1618 let mut start_offset = None;
1619 let mut end_id = None;
1620 let mut end_offset = None;
1621 let mut version_in_range = time::Global::new();
1622
1623 let mut local_timestamp = self.local_clock.tick();
1624 let mut lamport_timestamp = self.lamport_clock.tick();
1625
1626 while cur_range.is_some() && fragments_cursor.item().is_some() {
1627 let mut fragment = fragments_cursor.item().unwrap().clone();
1628 let fragment_summary = fragments_cursor.item_summary().unwrap();
1629 let mut fragment_start = *fragments_cursor.start();
1630 let mut fragment_end = fragment_start + fragment.visible_len();
1631 let fragment_was_visible = fragment.visible;
1632
1633 let old_split_tree = self
1634 .insertion_splits
1635 .remove(&fragment.insertion.id)
1636 .unwrap();
1637 let mut splits_cursor = old_split_tree.cursor::<usize, ()>();
1638 let mut new_split_tree =
1639 splits_cursor.slice(&fragment.range_in_insertion.start, SeekBias::Right, &());
1640
1641 // Find all splices that start or end within the current fragment. Then, split the
1642 // fragment and reassemble it in both trees accounting for the deleted and the newly
1643 // inserted text.
1644 while cur_range.as_ref().map_or(false, |r| r.start < fragment_end) {
1645 let range = cur_range.clone().unwrap();
1646 if range.start > fragment_start {
1647 let mut prefix = fragment.clone();
1648 prefix.range_in_insertion.end =
1649 prefix.range_in_insertion.start + (range.start - fragment_start);
1650 prefix.id =
1651 FragmentId::between(&new_fragments.last().unwrap().id, &fragment.id);
1652 fragment.range_in_insertion.start = prefix.range_in_insertion.end;
1653
1654 new_ropes.push_fragment(&prefix, prefix.visible);
1655 new_fragments.push(prefix.clone(), &None);
1656 new_split_tree.push(
1657 InsertionSplit {
1658 extent: prefix.range_in_insertion.end - prefix.range_in_insertion.start,
1659 fragment_id: prefix.id,
1660 },
1661 &(),
1662 );
1663 fragment_start = range.start;
1664 }
1665
1666 if range.end == fragment_start {
1667 end_id = Some(new_fragments.last().unwrap().insertion.id);
1668 end_offset = Some(new_fragments.last().unwrap().range_in_insertion.end);
1669 } else if range.end == fragment_end {
1670 end_id = Some(fragment.insertion.id);
1671 end_offset = Some(fragment.range_in_insertion.end);
1672 }
1673
1674 if range.start == fragment_start {
1675 start_id = Some(new_fragments.last().unwrap().insertion.id);
1676 start_offset = Some(new_fragments.last().unwrap().range_in_insertion.end);
1677
1678 if let Some(new_text) = new_text.clone() {
1679 let new_fragment = self.build_fragment_to_insert(
1680 &new_fragments.last().unwrap(),
1681 Some(&fragment),
1682 &new_text,
1683 local_timestamp,
1684 lamport_timestamp,
1685 );
1686
1687 new_ropes.push_str(&new_text);
1688 new_fragments.push(new_fragment, &None);
1689 }
1690 }
1691
1692 if range.end < fragment_end {
1693 if range.end > fragment_start {
1694 let mut prefix = fragment.clone();
1695 prefix.range_in_insertion.end =
1696 prefix.range_in_insertion.start + (range.end - fragment_start);
1697 prefix.id =
1698 FragmentId::between(&new_fragments.last().unwrap().id, &fragment.id);
1699 version_in_range.join(&fragment_summary.max_version);
1700 if prefix.visible {
1701 prefix.deletions.insert(local_timestamp);
1702 prefix.visible = false;
1703 }
1704 fragment.range_in_insertion.start = prefix.range_in_insertion.end;
1705 new_ropes.push_fragment(&prefix, fragment_was_visible);
1706 new_fragments.push(prefix.clone(), &None);
1707 new_split_tree.push(
1708 InsertionSplit {
1709 extent: prefix.range_in_insertion.end
1710 - prefix.range_in_insertion.start,
1711 fragment_id: prefix.id,
1712 },
1713 &(),
1714 );
1715 fragment_start = range.end;
1716 end_id = Some(fragment.insertion.id);
1717 end_offset = Some(fragment.range_in_insertion.start);
1718 }
1719 } else {
1720 version_in_range.join(&fragment_summary.max_version);
1721 if fragment.visible {
1722 fragment.deletions.insert(local_timestamp);
1723 fragment.visible = false;
1724 }
1725 }
1726
1727 // If the splice ends inside this fragment, we can advance to the next splice and
1728 // check if it also intersects the current fragment. Otherwise we break out of the
1729 // loop and find the first fragment that the splice does not contain fully.
1730 if range.end <= fragment_end {
1731 ops.push(Operation::Edit {
1732 edit: EditOperation {
1733 id: local_timestamp,
1734 version,
1735 range,
1736 new_text: new_text.clone(),
1737 },
1738 lamport_timestamp,
1739 });
1740
1741 start_id = None;
1742 start_offset = None;
1743 end_id = None;
1744 end_offset = None;
1745 version_in_range = time::Global::new();
1746 cur_range = old_ranges.next();
1747 if cur_range.is_some() {
1748 local_timestamp = self.local_clock.tick();
1749 lamport_timestamp = self.lamport_clock.tick();
1750 }
1751 } else {
1752 break;
1753 }
1754 }
1755 new_split_tree.push(
1756 InsertionSplit {
1757 extent: fragment.range_in_insertion.end - fragment.range_in_insertion.start,
1758 fragment_id: fragment.id.clone(),
1759 },
1760 &(),
1761 );
1762 splits_cursor.next(&());
1763 new_split_tree.push_tree(
1764 splits_cursor.slice(&old_split_tree.extent::<usize>(&()), SeekBias::Right, &()),
1765 &(),
1766 );
1767 self.insertion_splits
1768 .insert(fragment.insertion.id, new_split_tree);
1769
1770 new_ropes.push_fragment(&fragment, fragment_was_visible);
1771 new_fragments.push(fragment, &None);
1772
1773 // Scan forward until we find a fragment that is not fully contained by the current splice.
1774 fragments_cursor.next(&None);
1775 if let Some(range) = cur_range.clone() {
1776 while let Some(fragment) = fragments_cursor.item() {
1777 let fragment_summary = fragments_cursor.item_summary().unwrap();
1778 let fragment_was_visible = fragment.visible;
1779 fragment_start = *fragments_cursor.start();
1780 fragment_end = fragment_start + fragment.visible_len();
1781 if range.start < fragment_start && range.end >= fragment_end {
1782 let mut new_fragment = fragment.clone();
1783 version_in_range.join(&fragment_summary.max_version);
1784 if new_fragment.visible {
1785 new_fragment.deletions.insert(local_timestamp);
1786 new_fragment.visible = false;
1787 }
1788
1789 new_ropes.push_fragment(&new_fragment, fragment_was_visible);
1790 new_fragments.push(new_fragment, &None);
1791 fragments_cursor.next(&None);
1792
1793 if range.end == fragment_end {
1794 end_id = Some(fragment.insertion.id);
1795 end_offset = Some(fragment.range_in_insertion.end);
1796 ops.push(Operation::Edit {
1797 edit: EditOperation {
1798 id: local_timestamp,
1799 start_id: start_id.unwrap(),
1800 start_offset: start_offset.unwrap(),
1801 end_id: end_id.unwrap(),
1802 end_offset: end_offset.unwrap(),
1803 version_in_range,
1804 new_text: new_text.clone(),
1805 },
1806 lamport_timestamp,
1807 });
1808
1809 start_id = None;
1810 start_offset = None;
1811 end_id = None;
1812 end_offset = None;
1813 version_in_range = time::Global::new();
1814
1815 cur_range = old_ranges.next();
1816 if cur_range.is_some() {
1817 local_timestamp = self.local_clock.tick();
1818 lamport_timestamp = self.lamport_clock.tick();
1819 }
1820 break;
1821 }
1822 } else {
1823 break;
1824 }
1825 }
1826
1827 // If the splice we are currently evaluating starts after the end of the fragment
1828 // that the cursor is parked at, we should seek to the next splice's start range
1829 // and push all the fragments in between into the new tree.
1830 if cur_range.as_ref().map_or(false, |r| r.start > fragment_end) {
1831 let slice = fragments_cursor.slice(
1832 &cur_range.as_ref().unwrap().start,
1833 SeekBias::Right,
1834 &None,
1835 );
1836 new_ropes.push_tree(slice.summary().text);
1837 new_fragments.push_tree(slice, &None);
1838 }
1839 }
1840 }
1841
1842 // Handle range that is at the end of the buffer if it exists. There should never be
1843 // multiple because ranges must be disjoint.
1844 if cur_range.is_some() {
1845 debug_assert_eq!(old_ranges.next(), None);
1846 let last_fragment = new_fragments.last().unwrap();
1847 ops.push(Operation::Edit {
1848 edit: EditOperation {
1849 id: local_timestamp,
1850 start_id: last_fragment.insertion.id,
1851 start_offset: last_fragment.range_in_insertion.end,
1852 end_id: last_fragment.insertion.id,
1853 end_offset: last_fragment.range_in_insertion.end,
1854 version_in_range: time::Global::new(),
1855 // TODO: avoid cloning the String.
1856 new_text: new_text.clone(),
1857 },
1858 lamport_timestamp,
1859 });
1860
1861 if let Some(new_text) = new_text {
1862 let new_fragment = self.build_fragment_to_insert(
1863 &last_fragment,
1864 None,
1865 &new_text,
1866 local_timestamp,
1867 lamport_timestamp,
1868 );
1869
1870 new_ropes.push_str(&new_text);
1871 new_fragments.push(new_fragment, &None);
1872 }
1873 }
1874
1875 new_fragments.push_tree(fragments_cursor.suffix(&None), &None);
1876 let (visible_text, deleted_text) = new_ropes.finish();
1877
1878 self.fragments = new_fragments;
1879 self.visible_text = visible_text;
1880 self.deleted_text = deleted_text;
1881 ops
1882 }
1883
1884 fn split_fragment(
1885 &mut self,
1886 prev_fragment: &Fragment,
1887 fragment: &Fragment,
1888 range: Range<usize>,
1889 ) -> (Option<Fragment>, Option<Fragment>, Option<Fragment>) {
1890 debug_assert!(range.start >= fragment.range_in_insertion.start);
1891 debug_assert!(range.start <= fragment.range_in_insertion.end);
1892 debug_assert!(range.end <= fragment.range_in_insertion.end);
1893 debug_assert!(range.end >= fragment.range_in_insertion.start);
1894
1895 if range.end == fragment.range_in_insertion.start {
1896 (None, None, Some(fragment.clone()))
1897 } else if range.start == fragment.range_in_insertion.end {
1898 (Some(fragment.clone()), None, None)
1899 } else if range.start == fragment.range_in_insertion.start
1900 && range.end == fragment.range_in_insertion.end
1901 {
1902 (None, Some(fragment.clone()), None)
1903 } else {
1904 let mut prefix = fragment.clone();
1905
1906 let after_range = if range.end < fragment.range_in_insertion.end {
1907 let mut suffix = prefix.clone();
1908 suffix.range_in_insertion.start = range.end;
1909 prefix.range_in_insertion.end = range.end;
1910 prefix.id = FragmentId::between(&prev_fragment.id, &suffix.id);
1911 Some(suffix)
1912 } else {
1913 None
1914 };
1915
1916 let within_range = if range.start != range.end {
1917 let mut suffix = prefix.clone();
1918 suffix.range_in_insertion.start = range.start;
1919 prefix.range_in_insertion.end = range.start;
1920 prefix.id = FragmentId::between(&prev_fragment.id, &suffix.id);
1921 Some(suffix)
1922 } else {
1923 None
1924 };
1925
1926 let before_range = if range.start > fragment.range_in_insertion.start {
1927 Some(prefix)
1928 } else {
1929 None
1930 };
1931
1932 let old_split_tree = self
1933 .insertion_splits
1934 .remove(&fragment.insertion.id)
1935 .unwrap();
1936 let mut cursor = old_split_tree.cursor::<usize, ()>();
1937 let mut new_split_tree =
1938 cursor.slice(&fragment.range_in_insertion.start, SeekBias::Right, &());
1939
1940 if let Some(ref fragment) = before_range {
1941 new_split_tree.push(
1942 InsertionSplit {
1943 extent: range.start - fragment.range_in_insertion.start,
1944 fragment_id: fragment.id.clone(),
1945 },
1946 &(),
1947 );
1948 }
1949
1950 if let Some(ref fragment) = within_range {
1951 new_split_tree.push(
1952 InsertionSplit {
1953 extent: range.end - range.start,
1954 fragment_id: fragment.id.clone(),
1955 },
1956 &(),
1957 );
1958 }
1959
1960 if let Some(ref fragment) = after_range {
1961 new_split_tree.push(
1962 InsertionSplit {
1963 extent: fragment.range_in_insertion.end - range.end,
1964 fragment_id: fragment.id.clone(),
1965 },
1966 &(),
1967 );
1968 }
1969
1970 cursor.next(&());
1971 new_split_tree.push_tree(
1972 cursor.slice(&old_split_tree.extent::<usize>(&()), SeekBias::Right, &()),
1973 &(),
1974 );
1975
1976 self.insertion_splits
1977 .insert(fragment.insertion.id, new_split_tree);
1978
1979 (before_range, within_range, after_range)
1980 }
1981 }
1982
1983 fn build_fragment_to_insert(
1984 &mut self,
1985 prev_fragment: &Fragment,
1986 next_fragment: Option<&Fragment>,
1987 text: &str,
1988 insertion_id: time::Local,
1989 lamport_timestamp: time::Lamport,
1990 ) -> Fragment {
1991 let new_fragment_id = FragmentId::between(
1992 &prev_fragment.id,
1993 next_fragment
1994 .map(|f| &f.id)
1995 .unwrap_or(&FragmentId::max_value()),
1996 );
1997
1998 let range_in_insertion = 0..text.len();
1999 let mut split_tree = SumTree::new();
2000 split_tree.push(
2001 InsertionSplit {
2002 extent: range_in_insertion.len(),
2003 fragment_id: new_fragment_id.clone(),
2004 },
2005 &(),
2006 );
2007 self.insertion_splits.insert(insertion_id, split_tree);
2008
2009 Fragment::new(
2010 new_fragment_id,
2011 Arc::new(Insertion {
2012 id: insertion_id,
2013 parent_id: prev_fragment.insertion.id,
2014 offset_in_parent: prev_fragment.range_in_insertion.end,
2015 lamport_timestamp,
2016 }),
2017 range_in_insertion,
2018 )
2019 }
2020
2021 pub fn anchor_before<T: ToOffset>(&self, position: T) -> Anchor {
2022 self.anchor_at(position, AnchorBias::Left)
2023 }
2024
2025 pub fn anchor_after<T: ToOffset>(&self, position: T) -> Anchor {
2026 self.anchor_at(position, AnchorBias::Right)
2027 }
2028
2029 pub fn anchor_at<T: ToOffset>(&self, position: T, bias: AnchorBias) -> Anchor {
2030 let offset = position.to_offset(self);
2031 let max_offset = self.len();
2032 assert!(offset <= max_offset, "offset is out of range");
2033
2034 if offset == 0 && bias == AnchorBias::Left {
2035 Anchor::Start
2036 } else if offset == max_offset && bias == AnchorBias::Right {
2037 Anchor::End
2038 } else {
2039 let mut cursor = self.fragments.cursor::<usize, FragmentTextSummary>();
2040 cursor.seek(&offset, bias.to_seek_bias(), &None);
2041 Anchor::Middle {
2042 offset: offset + cursor.start().deleted,
2043 bias,
2044 version: self.version(),
2045 }
2046 }
2047 }
2048
2049 fn summary_for_anchor(&self, anchor: &Anchor) -> TextSummary {
2050 match anchor {
2051 Anchor::Start => TextSummary::default(),
2052 Anchor::End => self.text_summary(),
2053 Anchor::Middle {
2054 offset,
2055 bias,
2056 version,
2057 } => {
2058 let mut cursor = self
2059 .fragments
2060 .cursor::<VersionedOffset, (VersionedOffset, usize)>();
2061 cursor.seek(
2062 &VersionedOffset::Offset(*offset),
2063 bias.to_seek_bias(),
2064 &Some(version.clone()),
2065 );
2066 let fragment = cursor.item().unwrap();
2067 let overshoot = if fragment.visible {
2068 offset - cursor.start().0.offset()
2069 } else {
2070 0
2071 };
2072
2073 self.text_summary_for_range(0..cursor.start().1 + overshoot)
2074 }
2075 }
2076 }
2077
2078 fn full_offset_for_anchor(&self, anchor: &Anchor) -> usize {
2079 match anchor {
2080 Anchor::Start => 0,
2081 Anchor::End => self.fragments.extent::<FullOffset>(&None).0,
2082 Anchor::Middle {
2083 offset,
2084 bias,
2085 version,
2086 } => {
2087 let mut cursor = self
2088 .fragments
2089 .cursor::<VersionedOffset, (VersionedOffset, FullOffset)>();
2090 cursor.seek(
2091 &VersionedOffset::Offset(*offset),
2092 bias.to_seek_bias(),
2093 &Some(version.clone()),
2094 );
2095 let full_offset = cursor.start().1;
2096 full_offset.0 + offset - cursor.start().0.offset()
2097 }
2098 }
2099 }
2100
2101 pub fn point_for_offset(&self, offset: usize) -> Result<Point> {
2102 if offset <= self.len() {
2103 Ok(self.text_summary_for_range(0..offset).lines)
2104 } else {
2105 Err(anyhow!("offset out of bounds"))
2106 }
2107 }
2108
2109 pub fn clip_point(&self, point: Point, bias: Bias) -> Point {
2110 self.visible_text.clip_point(point, bias)
2111 }
2112
2113 pub fn clip_offset(&self, offset: usize, bias: Bias) -> usize {
2114 self.visible_text.clip_offset(offset, bias)
2115 }
2116}
2117
2118impl Clone for Buffer {
2119 fn clone(&self) -> Self {
2120 Self {
2121 fragments: self.fragments.clone(),
2122 visible_text: self.visible_text.clone(),
2123 deleted_text: self.deleted_text.clone(),
2124 insertion_splits: self.insertion_splits.clone(),
2125 version: self.version.clone(),
2126 saved_version: self.saved_version.clone(),
2127 saved_mtime: self.saved_mtime,
2128 last_edit: self.last_edit.clone(),
2129 undo_map: self.undo_map.clone(),
2130 history: self.history.clone(),
2131 selections: self.selections.clone(),
2132 selections_last_update: self.selections_last_update.clone(),
2133 deferred_ops: self.deferred_ops.clone(),
2134 file: self.file.clone(),
2135 language: self.language.clone(),
2136 syntax_tree: Mutex::new(self.syntax_tree.lock().clone()),
2137 is_parsing: false,
2138 deferred_replicas: self.deferred_replicas.clone(),
2139 replica_id: self.replica_id,
2140 local_clock: self.local_clock.clone(),
2141 lamport_clock: self.lamport_clock.clone(),
2142 }
2143 }
2144}
2145
2146pub struct Snapshot {
2147 text: Rope,
2148 tree: Option<Tree>,
2149 language: Option<Arc<Language>>,
2150 query_cursor: QueryCursorHandle,
2151}
2152
2153impl Snapshot {
2154 pub fn len(&self) -> usize {
2155 self.text.len()
2156 }
2157
2158 pub fn text(&self) -> Rope {
2159 self.text.clone()
2160 }
2161
2162 pub fn text_for_range(&self, range: Range<usize>) -> Chunks {
2163 self.text.chunks_in_range(range)
2164 }
2165
2166 pub fn highlighted_text_for_range(&mut self, range: Range<usize>) -> HighlightedChunks {
2167 let chunks = self.text.chunks_in_range(range.clone());
2168 if let Some((language, tree)) = self.language.as_ref().zip(self.tree.as_ref()) {
2169 let captures = self.query_cursor.set_byte_range(range.clone()).captures(
2170 &language.highlight_query,
2171 tree.root_node(),
2172 TextProvider(&self.text),
2173 );
2174
2175 HighlightedChunks {
2176 range,
2177 chunks,
2178 highlights: Some(Highlights {
2179 captures,
2180 next_capture: None,
2181 stack: Default::default(),
2182 theme_mapping: language.theme_mapping(),
2183 }),
2184 }
2185 } else {
2186 HighlightedChunks {
2187 range,
2188 chunks,
2189 highlights: None,
2190 }
2191 }
2192 }
2193
2194 pub fn clip_offset(&self, offset: usize, bias: Bias) -> usize {
2195 self.text.clip_offset(offset, bias)
2196 }
2197
2198 pub fn clip_point(&self, point: Point, bias: Bias) -> Point {
2199 self.text.clip_point(point, bias)
2200 }
2201
2202 pub fn to_offset(&self, point: Point) -> usize {
2203 self.text.to_offset(point)
2204 }
2205
2206 pub fn to_point(&self, offset: usize) -> Point {
2207 self.text.to_point(offset)
2208 }
2209}
2210
2211struct RopeBuilder<'a> {
2212 old_visible_cursor: rope::Cursor<'a>,
2213 old_deleted_cursor: rope::Cursor<'a>,
2214 new_visible: Rope,
2215 new_deleted: Rope,
2216}
2217
2218impl<'a> RopeBuilder<'a> {
2219 fn new(old_visible_cursor: rope::Cursor<'a>, old_deleted_cursor: rope::Cursor<'a>) -> Self {
2220 Self {
2221 old_visible_cursor,
2222 old_deleted_cursor,
2223 new_visible: Rope::new(),
2224 new_deleted: Rope::new(),
2225 }
2226 }
2227
2228 fn push_tree(&mut self, len: FragmentTextSummary) {
2229 self.push(len.visible, true, true);
2230 self.push(len.deleted, false, false);
2231 }
2232
2233 fn push_fragment(&mut self, fragment: &Fragment, was_visible: bool) {
2234 self.push(fragment.len(), was_visible, fragment.visible)
2235 }
2236
2237 fn push(&mut self, len: usize, was_visible: bool, is_visible: bool) {
2238 let text = if was_visible {
2239 self.old_visible_cursor
2240 .slice(self.old_visible_cursor.offset() + len)
2241 } else {
2242 self.old_deleted_cursor
2243 .slice(self.old_deleted_cursor.offset() + len)
2244 };
2245 if is_visible {
2246 self.new_visible.append(text);
2247 } else {
2248 self.new_deleted.append(text);
2249 }
2250 }
2251
2252 fn push_str(&mut self, text: &str) {
2253 self.new_visible.push(text);
2254 }
2255
2256 fn finish(mut self) -> (Rope, Rope) {
2257 self.new_visible.append(self.old_visible_cursor.suffix());
2258 self.new_deleted.append(self.old_deleted_cursor.suffix());
2259 (self.new_visible, self.new_deleted)
2260 }
2261}
2262
2263#[derive(Clone, Debug, Eq, PartialEq)]
2264pub enum Event {
2265 Edited,
2266 Dirtied,
2267 Saved,
2268 FileHandleChanged,
2269 Reloaded,
2270 Reparsed,
2271}
2272
2273impl Entity for Buffer {
2274 type Event = Event;
2275}
2276
2277impl<'a, F: Fn(&FragmentSummary) -> bool> Iterator for Edits<'a, F> {
2278 type Item = Edit;
2279
2280 fn next(&mut self) -> Option<Self::Item> {
2281 let mut change: Option<Edit> = None;
2282
2283 while let Some(fragment) = self.cursor.item() {
2284 let new_offset = self.cursor.start().visible;
2285 let old_offset = (new_offset as isize - self.delta) as usize;
2286
2287 if !fragment.was_visible(&self.since, &self.undos) && fragment.visible {
2288 if let Some(ref mut change) = change {
2289 if change.new_range.end == new_offset {
2290 change.new_range.end += fragment.len();
2291 self.delta += fragment.len() as isize;
2292 } else {
2293 break;
2294 }
2295 } else {
2296 change = Some(Edit {
2297 old_range: old_offset..old_offset,
2298 new_range: new_offset..new_offset + fragment.len(),
2299 old_lines: Point::zero(),
2300 });
2301 self.delta += fragment.len() as isize;
2302 }
2303 } else if fragment.was_visible(&self.since, &self.undos) && !fragment.visible {
2304 let deleted_start = self.cursor.start().deleted;
2305 let old_lines = self.deleted_text.to_point(deleted_start + fragment.len())
2306 - self.deleted_text.to_point(deleted_start);
2307 if let Some(ref mut change) = change {
2308 if change.new_range.end == new_offset {
2309 change.old_range.end += fragment.len();
2310 change.old_lines += &old_lines;
2311 self.delta -= fragment.len() as isize;
2312 } else {
2313 break;
2314 }
2315 } else {
2316 change = Some(Edit {
2317 old_range: old_offset..old_offset + fragment.len(),
2318 new_range: new_offset..new_offset,
2319 old_lines,
2320 });
2321 self.delta -= fragment.len() as isize;
2322 }
2323 }
2324
2325 self.cursor.next(&None);
2326 }
2327
2328 change
2329 }
2330}
2331
2332struct ByteChunks<'a>(rope::Chunks<'a>);
2333
2334impl<'a> Iterator for ByteChunks<'a> {
2335 type Item = &'a [u8];
2336
2337 fn next(&mut self) -> Option<Self::Item> {
2338 self.0.next().map(str::as_bytes)
2339 }
2340}
2341
2342struct TextProvider<'a>(&'a Rope);
2343
2344impl<'a> tree_sitter::TextProvider<'a> for TextProvider<'a> {
2345 type I = ByteChunks<'a>;
2346
2347 fn text(&mut self, node: tree_sitter::Node) -> Self::I {
2348 ByteChunks(self.0.chunks_in_range(node.byte_range()))
2349 }
2350}
2351
2352struct Highlights<'a> {
2353 captures: tree_sitter::QueryCaptures<'a, 'a, TextProvider<'a>>,
2354 next_capture: Option<(tree_sitter::QueryMatch<'a, 'a>, usize)>,
2355 stack: Vec<(usize, StyleId)>,
2356 theme_mapping: ThemeMap,
2357}
2358
2359pub struct HighlightedChunks<'a> {
2360 range: Range<usize>,
2361 chunks: Chunks<'a>,
2362 highlights: Option<Highlights<'a>>,
2363}
2364
2365impl<'a> HighlightedChunks<'a> {
2366 pub fn seek(&mut self, offset: usize) {
2367 self.range.start = offset;
2368 self.chunks.seek(self.range.start);
2369 if let Some(highlights) = self.highlights.as_mut() {
2370 highlights
2371 .stack
2372 .retain(|(end_offset, _)| *end_offset > offset);
2373 if let Some((mat, capture_ix)) = &highlights.next_capture {
2374 let capture = mat.captures[*capture_ix as usize];
2375 if offset >= capture.node.start_byte() {
2376 let next_capture_end = capture.node.end_byte();
2377 if offset < next_capture_end {
2378 highlights.stack.push((
2379 next_capture_end,
2380 highlights.theme_mapping.get(capture.index),
2381 ));
2382 }
2383 highlights.next_capture.take();
2384 }
2385 }
2386 highlights.captures.set_byte_range(self.range.clone());
2387 }
2388 }
2389
2390 pub fn offset(&self) -> usize {
2391 self.range.start
2392 }
2393}
2394
2395impl<'a> Iterator for HighlightedChunks<'a> {
2396 type Item = (&'a str, StyleId);
2397
2398 fn next(&mut self) -> Option<Self::Item> {
2399 let mut next_capture_start = usize::MAX;
2400
2401 if let Some(highlights) = self.highlights.as_mut() {
2402 while let Some((parent_capture_end, _)) = highlights.stack.last() {
2403 if *parent_capture_end <= self.range.start {
2404 highlights.stack.pop();
2405 } else {
2406 break;
2407 }
2408 }
2409
2410 if highlights.next_capture.is_none() {
2411 highlights.next_capture = highlights.captures.next();
2412 }
2413
2414 while let Some((mat, capture_ix)) = highlights.next_capture.as_ref() {
2415 let capture = mat.captures[*capture_ix as usize];
2416 if self.range.start < capture.node.start_byte() {
2417 next_capture_start = capture.node.start_byte();
2418 break;
2419 } else {
2420 let style_id = highlights.theme_mapping.get(capture.index);
2421 highlights.stack.push((capture.node.end_byte(), style_id));
2422 highlights.next_capture = highlights.captures.next();
2423 }
2424 }
2425 }
2426
2427 if let Some(chunk) = self.chunks.peek() {
2428 let chunk_start = self.range.start;
2429 let mut chunk_end = (self.chunks.offset() + chunk.len()).min(next_capture_start);
2430 let mut style_id = StyleId::default();
2431 if let Some((parent_capture_end, parent_style_id)) =
2432 self.highlights.as_ref().and_then(|h| h.stack.last())
2433 {
2434 chunk_end = chunk_end.min(*parent_capture_end);
2435 style_id = *parent_style_id;
2436 }
2437
2438 let slice =
2439 &chunk[chunk_start - self.chunks.offset()..chunk_end - self.chunks.offset()];
2440 self.range.start = chunk_end;
2441 if self.range.start == self.chunks.offset() + chunk.len() {
2442 self.chunks.next().unwrap();
2443 }
2444
2445 Some((slice, style_id))
2446 } else {
2447 None
2448 }
2449 }
2450}
2451
2452#[derive(Ord, PartialOrd, Eq, PartialEq, Clone, Debug)]
2453struct FragmentId(Arc<[u16]>);
2454
2455lazy_static! {
2456 static ref FRAGMENT_ID_EMPTY: FragmentId = FragmentId(Arc::from([]));
2457 static ref FRAGMENT_ID_MIN_VALUE: FragmentId = FragmentId(Arc::from([0 as u16]));
2458 static ref FRAGMENT_ID_MAX_VALUE: FragmentId = FragmentId(Arc::from([u16::max_value()]));
2459}
2460
2461impl Default for FragmentId {
2462 fn default() -> Self {
2463 FRAGMENT_ID_EMPTY.clone()
2464 }
2465}
2466
2467impl FragmentId {
2468 fn min_value() -> &'static Self {
2469 &FRAGMENT_ID_MIN_VALUE
2470 }
2471
2472 fn max_value() -> &'static Self {
2473 &FRAGMENT_ID_MAX_VALUE
2474 }
2475
2476 fn between(left: &Self, right: &Self) -> Self {
2477 Self::between_with_max(left, right, u16::max_value())
2478 }
2479
2480 fn between_with_max(left: &Self, right: &Self, max_value: u16) -> Self {
2481 let mut new_entries = Vec::new();
2482
2483 let left_entries = left.0.iter().cloned().chain(iter::repeat(0));
2484 let right_entries = right.0.iter().cloned().chain(iter::repeat(max_value));
2485 for (l, r) in left_entries.zip(right_entries) {
2486 let interval = r - l;
2487 if interval > 1 {
2488 new_entries.push(l + cmp::max(1, cmp::min(8, interval / 2)));
2489 break;
2490 } else {
2491 new_entries.push(l);
2492 }
2493 }
2494
2495 FragmentId(Arc::from(new_entries))
2496 }
2497}
2498
2499#[derive(Ord, PartialOrd, Eq, PartialEq, Clone, Debug, Default)]
2500struct FragmentIdRef<'a>(Option<&'a FragmentId>);
2501
2502impl<'a> FragmentIdRef<'a> {
2503 fn new(id: &'a FragmentId) -> Self {
2504 Self(Some(id))
2505 }
2506}
2507
2508impl<'a> sum_tree::Dimension<'a, FragmentSummary> for FragmentIdRef<'a> {
2509 fn add_summary(&mut self, summary: &'a FragmentSummary, _: &Option<time::Global>) {
2510 self.0 = Some(&summary.max_fragment_id)
2511 }
2512}
2513
2514impl Fragment {
2515 fn new(id: FragmentId, insertion: Arc<Insertion>, range_in_insertion: Range<usize>) -> Self {
2516 Self {
2517 id,
2518 insertion,
2519 range_in_insertion,
2520 deletions: Default::default(),
2521 max_undos: Default::default(),
2522 visible: true,
2523 }
2524 }
2525
2526 fn is_visible(&self, undos: &UndoMap) -> bool {
2527 !undos.is_undone(self.insertion.id) && self.deletions.iter().all(|d| undos.is_undone(*d))
2528 }
2529
2530 fn was_visible(&self, version: &time::Global, undos: &UndoMap) -> bool {
2531 (version.observed(self.insertion.id) && !undos.was_undone(self.insertion.id, version))
2532 && self
2533 .deletions
2534 .iter()
2535 .all(|d| !version.observed(*d) || undos.was_undone(*d, version))
2536 }
2537
2538 fn len(&self) -> usize {
2539 self.range_in_insertion.len()
2540 }
2541
2542 fn visible_len(&self) -> usize {
2543 if self.visible {
2544 self.range_in_insertion.len()
2545 } else {
2546 0
2547 }
2548 }
2549}
2550
2551impl sum_tree::Item for Fragment {
2552 type Summary = FragmentSummary;
2553
2554 fn summary(&self) -> Self::Summary {
2555 let mut max_version = time::Global::new();
2556 max_version.observe(self.insertion.id);
2557 for deletion in &self.deletions {
2558 max_version.observe(*deletion);
2559 }
2560 max_version.join(&self.max_undos);
2561
2562 let mut min_insertion_version = time::Global::new();
2563 min_insertion_version.observe(self.insertion.id);
2564 let max_insertion_version = min_insertion_version.clone();
2565 if self.visible {
2566 FragmentSummary {
2567 text: FragmentTextSummary {
2568 visible: self.len(),
2569 deleted: 0,
2570 },
2571 max_fragment_id: self.id.clone(),
2572 max_version,
2573 min_insertion_version,
2574 max_insertion_version,
2575 }
2576 } else {
2577 FragmentSummary {
2578 text: FragmentTextSummary {
2579 visible: 0,
2580 deleted: self.len(),
2581 },
2582 max_fragment_id: self.id.clone(),
2583 max_version,
2584 min_insertion_version,
2585 max_insertion_version,
2586 }
2587 }
2588 }
2589}
2590
2591impl sum_tree::Summary for FragmentSummary {
2592 type Context = Option<time::Global>;
2593
2594 fn add_summary(&mut self, other: &Self, _: &Self::Context) {
2595 self.text.visible += &other.text.visible;
2596 self.text.deleted += &other.text.deleted;
2597 debug_assert!(self.max_fragment_id <= other.max_fragment_id);
2598 self.max_fragment_id = other.max_fragment_id.clone();
2599 self.max_version.join(&other.max_version);
2600 self.min_insertion_version
2601 .meet(&other.min_insertion_version);
2602 self.max_insertion_version
2603 .join(&other.max_insertion_version);
2604 }
2605}
2606
2607impl Default for FragmentSummary {
2608 fn default() -> Self {
2609 FragmentSummary {
2610 text: FragmentTextSummary::default(),
2611 max_fragment_id: FragmentId::min_value().clone(),
2612 max_version: time::Global::new(),
2613 min_insertion_version: time::Global::new(),
2614 max_insertion_version: time::Global::new(),
2615 }
2616 }
2617}
2618
2619impl<'a> sum_tree::Dimension<'a, FragmentSummary> for usize {
2620 fn add_summary(&mut self, summary: &FragmentSummary, _: &Option<time::Global>) {
2621 *self += summary.text.visible;
2622 }
2623}
2624
2625impl sum_tree::Item for InsertionSplit {
2626 type Summary = InsertionSplitSummary;
2627
2628 fn summary(&self) -> Self::Summary {
2629 InsertionSplitSummary {
2630 extent: self.extent,
2631 }
2632 }
2633}
2634
2635impl sum_tree::Summary for InsertionSplitSummary {
2636 type Context = ();
2637
2638 fn add_summary(&mut self, other: &Self, _: &()) {
2639 self.extent += other.extent;
2640 }
2641}
2642
2643impl Default for InsertionSplitSummary {
2644 fn default() -> Self {
2645 InsertionSplitSummary { extent: 0 }
2646 }
2647}
2648
2649impl<'a> sum_tree::Dimension<'a, InsertionSplitSummary> for usize {
2650 fn add_summary(&mut self, summary: &InsertionSplitSummary, _: &()) {
2651 *self += summary.extent;
2652 }
2653}
2654
2655#[derive(Copy, Clone, Debug, Eq, PartialEq)]
2656enum VersionedOffset {
2657 Offset(usize),
2658 InvalidVersion,
2659}
2660
2661impl VersionedOffset {
2662 fn offset(&self) -> usize {
2663 if let Self::Offset(offset) = self {
2664 *offset
2665 } else {
2666 panic!("invalid version")
2667 }
2668 }
2669}
2670
2671impl Default for VersionedOffset {
2672 fn default() -> Self {
2673 Self::Offset(0)
2674 }
2675}
2676
2677impl<'a> sum_tree::Dimension<'a, FragmentSummary> for VersionedOffset {
2678 fn add_summary(&mut self, summary: &'a FragmentSummary, cx: &Option<time::Global>) {
2679 if let Self::Offset(offset) = self {
2680 let version = cx.as_ref().unwrap();
2681 if *version >= summary.max_insertion_version {
2682 *offset += summary.text.visible + summary.text.deleted;
2683 } else if !summary
2684 .min_insertion_version
2685 .iter()
2686 .all(|t| !version.observed(*t))
2687 {
2688 *self = Self::InvalidVersion;
2689 }
2690 } else {
2691 unreachable!();
2692 }
2693 }
2694}
2695
2696impl<'a> sum_tree::SeekDimension<'a, FragmentSummary> for VersionedOffset {
2697 fn cmp(&self, other: &Self, _: &Option<time::Global>) -> cmp::Ordering {
2698 match (self, other) {
2699 (Self::Offset(a), Self::Offset(b)) => Ord::cmp(a, b),
2700 (Self::Offset(_), Self::InvalidVersion) => cmp::Ordering::Less,
2701 (Self::InvalidVersion, _) => unreachable!(),
2702 }
2703 }
2704}
2705
2706impl<'a> sum_tree::Dimension<'a, FragmentSummary> for (VersionedOffset, usize) {
2707 fn add_summary(&mut self, summary: &'a FragmentSummary, cx: &Option<time::Global>) {
2708 self.0.add_summary(summary, cx);
2709 self.1 += summary.text.visible;
2710 }
2711}
2712
2713impl<'a> sum_tree::Dimension<'a, FragmentSummary> for (VersionedOffset, FullOffset) {
2714 fn add_summary(&mut self, summary: &'a FragmentSummary, cx: &Option<time::Global>) {
2715 self.0.add_summary(summary, cx);
2716 self.1 .0 += summary.text.visible + summary.text.deleted;
2717 }
2718}
2719
2720#[derive(Clone, Copy, Debug, Default)]
2721struct FullOffset(usize);
2722
2723impl<'a> sum_tree::Dimension<'a, FragmentSummary> for FullOffset {
2724 fn add_summary(&mut self, summary: &'a FragmentSummary, _: &Option<time::Global>) {
2725 self.0 += summary.text.visible + summary.text.deleted;
2726 }
2727}
2728
2729impl Operation {
2730 fn replica_id(&self) -> ReplicaId {
2731 self.lamport_timestamp().replica_id
2732 }
2733
2734 fn lamport_timestamp(&self) -> time::Lamport {
2735 match self {
2736 Operation::Edit {
2737 lamport_timestamp, ..
2738 } => *lamport_timestamp,
2739 Operation::Undo {
2740 lamport_timestamp, ..
2741 } => *lamport_timestamp,
2742 Operation::UpdateSelections {
2743 lamport_timestamp, ..
2744 } => *lamport_timestamp,
2745 }
2746 }
2747
2748 pub fn is_edit(&self) -> bool {
2749 match self {
2750 Operation::Edit { .. } => true,
2751 _ => false,
2752 }
2753 }
2754}
2755
2756impl operation_queue::Operation for Operation {
2757 fn timestamp(&self) -> time::Lamport {
2758 self.lamport_timestamp()
2759 }
2760}
2761
2762pub trait ToOffset {
2763 fn to_offset(&self, buffer: &Buffer) -> usize;
2764}
2765
2766impl ToOffset for Point {
2767 fn to_offset(&self, buffer: &Buffer) -> usize {
2768 buffer.visible_text.to_offset(*self)
2769 }
2770}
2771
2772impl ToOffset for usize {
2773 fn to_offset(&self, _: &Buffer) -> usize {
2774 *self
2775 }
2776}
2777
2778impl ToOffset for Anchor {
2779 fn to_offset(&self, buffer: &Buffer) -> usize {
2780 buffer.summary_for_anchor(self).bytes
2781 }
2782}
2783
2784impl<'a> ToOffset for &'a Anchor {
2785 fn to_offset(&self, buffer: &Buffer) -> usize {
2786 buffer.summary_for_anchor(self).bytes
2787 }
2788}
2789
2790pub trait ToPoint {
2791 fn to_point(&self, buffer: &Buffer) -> Point;
2792}
2793
2794impl ToPoint for Anchor {
2795 fn to_point(&self, buffer: &Buffer) -> Point {
2796 buffer.summary_for_anchor(self).lines
2797 }
2798}
2799
2800impl ToPoint for usize {
2801 fn to_point(&self, buffer: &Buffer) -> Point {
2802 buffer.visible_text.to_point(*self)
2803 }
2804}
2805
2806#[cfg(test)]
2807mod tests {
2808 use super::*;
2809 use crate::{
2810 test::{build_app_state, temp_tree},
2811 util::RandomCharIter,
2812 worktree::{Worktree, WorktreeHandle},
2813 };
2814 use gpui::{App, ModelHandle};
2815 use rand::prelude::*;
2816 use serde_json::json;
2817 use std::{
2818 cell::RefCell,
2819 cmp::Ordering,
2820 env, fs,
2821 rc::Rc,
2822 sync::atomic::{self, AtomicUsize},
2823 };
2824
2825 #[gpui::test]
2826 fn test_edit(cx: &mut gpui::MutableAppContext) {
2827 cx.add_model(|cx| {
2828 let mut buffer = Buffer::new(0, "abc", cx);
2829 assert_eq!(buffer.text(), "abc");
2830 buffer.edit(vec![3..3], "def", None).unwrap();
2831 assert_eq!(buffer.text(), "abcdef");
2832 buffer.edit(vec![0..0], "ghi", None).unwrap();
2833 assert_eq!(buffer.text(), "ghiabcdef");
2834 buffer.edit(vec![5..5], "jkl", None).unwrap();
2835 assert_eq!(buffer.text(), "ghiabjklcdef");
2836 buffer.edit(vec![6..7], "", None).unwrap();
2837 assert_eq!(buffer.text(), "ghiabjlcdef");
2838 buffer.edit(vec![4..9], "mno", None).unwrap();
2839 assert_eq!(buffer.text(), "ghiamnoef");
2840 buffer
2841 });
2842 }
2843
2844 #[gpui::test]
2845 fn test_edit_events(cx: &mut gpui::MutableAppContext) {
2846 let mut now = Instant::now();
2847 let buffer_1_events = Rc::new(RefCell::new(Vec::new()));
2848 let buffer_2_events = Rc::new(RefCell::new(Vec::new()));
2849
2850 let buffer1 = cx.add_model(|cx| Buffer::new(0, "abcdef", cx));
2851 let buffer2 = cx.add_model(|cx| Buffer::new(1, "abcdef", cx));
2852 let mut buffer_ops = Vec::new();
2853 buffer1.update(cx, |buffer, cx| {
2854 let buffer_1_events = buffer_1_events.clone();
2855 cx.subscribe(&buffer1, move |_, event, _| {
2856 buffer_1_events.borrow_mut().push(event.clone())
2857 });
2858 let buffer_2_events = buffer_2_events.clone();
2859 cx.subscribe(&buffer2, move |_, event, _| {
2860 buffer_2_events.borrow_mut().push(event.clone())
2861 });
2862
2863 // An edit emits an edited event, followed by a dirtied event,
2864 // since the buffer was previously in a clean state.
2865 let ops = buffer.edit(Some(2..4), "XYZ", Some(cx)).unwrap();
2866 buffer_ops.extend_from_slice(&ops);
2867
2868 // An empty transaction does not emit any events.
2869 buffer.start_transaction(None).unwrap();
2870 buffer.end_transaction(None, Some(cx)).unwrap();
2871
2872 // A transaction containing two edits emits one edited event.
2873 now += Duration::from_secs(1);
2874 buffer.start_transaction_at(None, now).unwrap();
2875 let ops = buffer.edit(Some(5..5), "u", Some(cx)).unwrap();
2876 buffer_ops.extend_from_slice(&ops);
2877 let ops = buffer.edit(Some(6..6), "w", Some(cx)).unwrap();
2878 buffer_ops.extend_from_slice(&ops);
2879 buffer.end_transaction_at(None, now, Some(cx)).unwrap();
2880
2881 // Undoing a transaction emits one edited event.
2882 let ops = buffer.undo(Some(cx));
2883 buffer_ops.extend_from_slice(&ops);
2884 });
2885
2886 // Incorporating a set of remote ops emits a single edited event,
2887 // followed by a dirtied event.
2888 buffer2.update(cx, |buffer, cx| {
2889 buffer.apply_ops(buffer_ops, Some(cx)).unwrap();
2890 });
2891
2892 let buffer_1_events = buffer_1_events.borrow();
2893 assert_eq!(
2894 *buffer_1_events,
2895 vec![Event::Edited, Event::Dirtied, Event::Edited, Event::Edited]
2896 );
2897
2898 let buffer_2_events = buffer_2_events.borrow();
2899 assert_eq!(*buffer_2_events, vec![Event::Edited, Event::Dirtied]);
2900 }
2901
2902 #[gpui::test]
2903 fn test_random_edits(cx: &mut gpui::MutableAppContext) {
2904 for seed in 0..100 {
2905 println!("{:?}", seed);
2906 let mut rng = &mut StdRng::seed_from_u64(seed);
2907
2908 let reference_string_len = rng.gen_range(0..3);
2909 let mut reference_string = RandomCharIter::new(&mut rng)
2910 .take(reference_string_len)
2911 .collect::<String>();
2912 cx.add_model(|cx| {
2913 let mut buffer = Buffer::new(0, reference_string.as_str(), cx);
2914 let mut buffer_versions = Vec::new();
2915 for _i in 0..10 {
2916 let (old_ranges, new_text, _) = buffer.randomly_mutate(rng, None);
2917 for old_range in old_ranges.iter().rev() {
2918 reference_string.replace_range(old_range.clone(), &new_text);
2919 }
2920 assert_eq!(buffer.text(), reference_string);
2921
2922 if rng.gen_bool(0.25) {
2923 buffer.randomly_undo_redo(rng);
2924 reference_string = buffer.text();
2925 }
2926
2927 let range = buffer.random_byte_range(0, rng);
2928 assert_eq!(
2929 buffer.text_summary_for_range(range.clone()),
2930 TextSummary::from(&reference_string[range])
2931 );
2932
2933 if rng.gen_bool(0.3) {
2934 buffer_versions.push(buffer.clone());
2935 }
2936 }
2937
2938 for mut old_buffer in buffer_versions {
2939 let mut delta = 0_isize;
2940 for Edit {
2941 old_range,
2942 new_range,
2943 ..
2944 } in buffer.edits_since(old_buffer.version.clone())
2945 {
2946 let old_len = old_range.end - old_range.start;
2947 let new_len = new_range.end - new_range.start;
2948 let old_start = (old_range.start as isize + delta) as usize;
2949 let new_text: String = buffer.text_for_range(new_range).collect();
2950 old_buffer
2951 .edit(Some(old_start..old_start + old_len), new_text, None)
2952 .unwrap();
2953
2954 delta += new_len as isize - old_len as isize;
2955 }
2956 assert_eq!(old_buffer.text(), buffer.text());
2957 }
2958
2959 buffer
2960 });
2961 }
2962 }
2963
2964 #[gpui::test]
2965 fn test_line_len(cx: &mut gpui::MutableAppContext) {
2966 cx.add_model(|cx| {
2967 let mut buffer = Buffer::new(0, "", cx);
2968 buffer.edit(vec![0..0], "abcd\nefg\nhij", None).unwrap();
2969 buffer.edit(vec![12..12], "kl\nmno", None).unwrap();
2970 buffer.edit(vec![18..18], "\npqrs\n", None).unwrap();
2971 buffer.edit(vec![18..21], "\nPQ", None).unwrap();
2972
2973 assert_eq!(buffer.line_len(0), 4);
2974 assert_eq!(buffer.line_len(1), 3);
2975 assert_eq!(buffer.line_len(2), 5);
2976 assert_eq!(buffer.line_len(3), 3);
2977 assert_eq!(buffer.line_len(4), 4);
2978 assert_eq!(buffer.line_len(5), 0);
2979 buffer
2980 });
2981 }
2982
2983 #[gpui::test]
2984 fn test_text_summary_for_range(cx: &mut gpui::MutableAppContext) {
2985 cx.add_model(|cx| {
2986 let buffer = Buffer::new(0, "ab\nefg\nhklm\nnopqrs\ntuvwxyz", cx);
2987 assert_eq!(
2988 buffer.text_summary_for_range(1..3),
2989 TextSummary {
2990 bytes: 2,
2991 lines: Point::new(1, 0),
2992 first_line_chars: 1,
2993 last_line_chars: 0,
2994 longest_row: 0,
2995 longest_row_chars: 1,
2996 }
2997 );
2998 assert_eq!(
2999 buffer.text_summary_for_range(1..12),
3000 TextSummary {
3001 bytes: 11,
3002 lines: Point::new(3, 0),
3003 first_line_chars: 1,
3004 last_line_chars: 0,
3005 longest_row: 2,
3006 longest_row_chars: 4,
3007 }
3008 );
3009 assert_eq!(
3010 buffer.text_summary_for_range(0..20),
3011 TextSummary {
3012 bytes: 20,
3013 lines: Point::new(4, 1),
3014 first_line_chars: 2,
3015 last_line_chars: 1,
3016 longest_row: 3,
3017 longest_row_chars: 6,
3018 }
3019 );
3020 assert_eq!(
3021 buffer.text_summary_for_range(0..22),
3022 TextSummary {
3023 bytes: 22,
3024 lines: Point::new(4, 3),
3025 first_line_chars: 2,
3026 last_line_chars: 3,
3027 longest_row: 3,
3028 longest_row_chars: 6,
3029 }
3030 );
3031 assert_eq!(
3032 buffer.text_summary_for_range(7..22),
3033 TextSummary {
3034 bytes: 15,
3035 lines: Point::new(2, 3),
3036 first_line_chars: 4,
3037 last_line_chars: 3,
3038 longest_row: 1,
3039 longest_row_chars: 6,
3040 }
3041 );
3042 buffer
3043 });
3044 }
3045
3046 #[gpui::test]
3047 fn test_chars_at(cx: &mut gpui::MutableAppContext) {
3048 cx.add_model(|cx| {
3049 let mut buffer = Buffer::new(0, "", cx);
3050 buffer.edit(vec![0..0], "abcd\nefgh\nij", None).unwrap();
3051 buffer.edit(vec![12..12], "kl\nmno", None).unwrap();
3052 buffer.edit(vec![18..18], "\npqrs", None).unwrap();
3053 buffer.edit(vec![18..21], "\nPQ", None).unwrap();
3054
3055 let chars = buffer.chars_at(Point::new(0, 0));
3056 assert_eq!(chars.collect::<String>(), "abcd\nefgh\nijkl\nmno\nPQrs");
3057
3058 let chars = buffer.chars_at(Point::new(1, 0));
3059 assert_eq!(chars.collect::<String>(), "efgh\nijkl\nmno\nPQrs");
3060
3061 let chars = buffer.chars_at(Point::new(2, 0));
3062 assert_eq!(chars.collect::<String>(), "ijkl\nmno\nPQrs");
3063
3064 let chars = buffer.chars_at(Point::new(3, 0));
3065 assert_eq!(chars.collect::<String>(), "mno\nPQrs");
3066
3067 let chars = buffer.chars_at(Point::new(4, 0));
3068 assert_eq!(chars.collect::<String>(), "PQrs");
3069
3070 // Regression test:
3071 let mut buffer = Buffer::new(0, "", cx);
3072 buffer.edit(vec![0..0], "[workspace]\nmembers = [\n \"xray_core\",\n \"xray_server\",\n \"xray_cli\",\n \"xray_wasm\",\n]\n", None).unwrap();
3073 buffer.edit(vec![60..60], "\n", None).unwrap();
3074
3075 let chars = buffer.chars_at(Point::new(6, 0));
3076 assert_eq!(chars.collect::<String>(), " \"xray_wasm\",\n]\n");
3077
3078 buffer
3079 });
3080 }
3081
3082 #[test]
3083 fn test_fragment_ids() {
3084 for seed in 0..10 {
3085 let rng = &mut StdRng::seed_from_u64(seed);
3086
3087 let mut ids = vec![FragmentId(Arc::from([0])), FragmentId(Arc::from([4]))];
3088 for _i in 0..100 {
3089 let index = rng.gen_range(1..ids.len());
3090
3091 let left = ids[index - 1].clone();
3092 let right = ids[index].clone();
3093 ids.insert(index, FragmentId::between_with_max(&left, &right, 4));
3094
3095 let mut sorted_ids = ids.clone();
3096 sorted_ids.sort();
3097 assert_eq!(ids, sorted_ids);
3098 }
3099 }
3100 }
3101
3102 #[gpui::test]
3103 fn test_anchors(cx: &mut gpui::MutableAppContext) {
3104 cx.add_model(|cx| {
3105 let mut buffer = Buffer::new(0, "", cx);
3106 buffer.edit(vec![0..0], "abc", None).unwrap();
3107 let left_anchor = buffer.anchor_before(2);
3108 let right_anchor = buffer.anchor_after(2);
3109
3110 buffer.edit(vec![1..1], "def\n", None).unwrap();
3111 assert_eq!(buffer.text(), "adef\nbc");
3112 assert_eq!(left_anchor.to_offset(&buffer), 6);
3113 assert_eq!(right_anchor.to_offset(&buffer), 6);
3114 assert_eq!(left_anchor.to_point(&buffer), Point { row: 1, column: 1 });
3115 assert_eq!(right_anchor.to_point(&buffer), Point { row: 1, column: 1 });
3116
3117 buffer.edit(vec![2..3], "", None).unwrap();
3118 assert_eq!(buffer.text(), "adf\nbc");
3119 assert_eq!(left_anchor.to_offset(&buffer), 5);
3120 assert_eq!(right_anchor.to_offset(&buffer), 5);
3121 assert_eq!(left_anchor.to_point(&buffer), Point { row: 1, column: 1 });
3122 assert_eq!(right_anchor.to_point(&buffer), Point { row: 1, column: 1 });
3123
3124 buffer.edit(vec![5..5], "ghi\n", None).unwrap();
3125 assert_eq!(buffer.text(), "adf\nbghi\nc");
3126 assert_eq!(left_anchor.to_offset(&buffer), 5);
3127 assert_eq!(right_anchor.to_offset(&buffer), 9);
3128 assert_eq!(left_anchor.to_point(&buffer), Point { row: 1, column: 1 });
3129 assert_eq!(right_anchor.to_point(&buffer), Point { row: 2, column: 0 });
3130
3131 buffer.edit(vec![7..9], "", None).unwrap();
3132 assert_eq!(buffer.text(), "adf\nbghc");
3133 assert_eq!(left_anchor.to_offset(&buffer), 5);
3134 assert_eq!(right_anchor.to_offset(&buffer), 7);
3135 assert_eq!(left_anchor.to_point(&buffer), Point { row: 1, column: 1 },);
3136 assert_eq!(right_anchor.to_point(&buffer), Point { row: 1, column: 3 });
3137
3138 // Ensure anchoring to a point is equivalent to anchoring to an offset.
3139 assert_eq!(
3140 buffer.anchor_before(Point { row: 0, column: 0 }),
3141 buffer.anchor_before(0)
3142 );
3143 assert_eq!(
3144 buffer.anchor_before(Point { row: 0, column: 1 }),
3145 buffer.anchor_before(1)
3146 );
3147 assert_eq!(
3148 buffer.anchor_before(Point { row: 0, column: 2 }),
3149 buffer.anchor_before(2)
3150 );
3151 assert_eq!(
3152 buffer.anchor_before(Point { row: 0, column: 3 }),
3153 buffer.anchor_before(3)
3154 );
3155 assert_eq!(
3156 buffer.anchor_before(Point { row: 1, column: 0 }),
3157 buffer.anchor_before(4)
3158 );
3159 assert_eq!(
3160 buffer.anchor_before(Point { row: 1, column: 1 }),
3161 buffer.anchor_before(5)
3162 );
3163 assert_eq!(
3164 buffer.anchor_before(Point { row: 1, column: 2 }),
3165 buffer.anchor_before(6)
3166 );
3167 assert_eq!(
3168 buffer.anchor_before(Point { row: 1, column: 3 }),
3169 buffer.anchor_before(7)
3170 );
3171 assert_eq!(
3172 buffer.anchor_before(Point { row: 1, column: 4 }),
3173 buffer.anchor_before(8)
3174 );
3175
3176 // Comparison between anchors.
3177 let anchor_at_offset_0 = buffer.anchor_before(0);
3178 let anchor_at_offset_1 = buffer.anchor_before(1);
3179 let anchor_at_offset_2 = buffer.anchor_before(2);
3180
3181 assert_eq!(
3182 anchor_at_offset_0
3183 .cmp(&anchor_at_offset_0, &buffer)
3184 .unwrap(),
3185 Ordering::Equal
3186 );
3187 assert_eq!(
3188 anchor_at_offset_1
3189 .cmp(&anchor_at_offset_1, &buffer)
3190 .unwrap(),
3191 Ordering::Equal
3192 );
3193 assert_eq!(
3194 anchor_at_offset_2
3195 .cmp(&anchor_at_offset_2, &buffer)
3196 .unwrap(),
3197 Ordering::Equal
3198 );
3199
3200 assert_eq!(
3201 anchor_at_offset_0
3202 .cmp(&anchor_at_offset_1, &buffer)
3203 .unwrap(),
3204 Ordering::Less
3205 );
3206 assert_eq!(
3207 anchor_at_offset_1
3208 .cmp(&anchor_at_offset_2, &buffer)
3209 .unwrap(),
3210 Ordering::Less
3211 );
3212 assert_eq!(
3213 anchor_at_offset_0
3214 .cmp(&anchor_at_offset_2, &buffer)
3215 .unwrap(),
3216 Ordering::Less
3217 );
3218
3219 assert_eq!(
3220 anchor_at_offset_1
3221 .cmp(&anchor_at_offset_0, &buffer)
3222 .unwrap(),
3223 Ordering::Greater
3224 );
3225 assert_eq!(
3226 anchor_at_offset_2
3227 .cmp(&anchor_at_offset_1, &buffer)
3228 .unwrap(),
3229 Ordering::Greater
3230 );
3231 assert_eq!(
3232 anchor_at_offset_2
3233 .cmp(&anchor_at_offset_0, &buffer)
3234 .unwrap(),
3235 Ordering::Greater
3236 );
3237 buffer
3238 });
3239 }
3240
3241 #[gpui::test]
3242 fn test_anchors_at_start_and_end(cx: &mut gpui::MutableAppContext) {
3243 cx.add_model(|cx| {
3244 let mut buffer = Buffer::new(0, "", cx);
3245 let before_start_anchor = buffer.anchor_before(0);
3246 let after_end_anchor = buffer.anchor_after(0);
3247
3248 buffer.edit(vec![0..0], "abc", None).unwrap();
3249 assert_eq!(buffer.text(), "abc");
3250 assert_eq!(before_start_anchor.to_offset(&buffer), 0);
3251 assert_eq!(after_end_anchor.to_offset(&buffer), 3);
3252
3253 let after_start_anchor = buffer.anchor_after(0);
3254 let before_end_anchor = buffer.anchor_before(3);
3255
3256 buffer.edit(vec![3..3], "def", None).unwrap();
3257 buffer.edit(vec![0..0], "ghi", None).unwrap();
3258 assert_eq!(buffer.text(), "ghiabcdef");
3259 assert_eq!(before_start_anchor.to_offset(&buffer), 0);
3260 assert_eq!(after_start_anchor.to_offset(&buffer), 3);
3261 assert_eq!(before_end_anchor.to_offset(&buffer), 6);
3262 assert_eq!(after_end_anchor.to_offset(&buffer), 9);
3263 buffer
3264 });
3265 }
3266
3267 #[test]
3268 fn test_is_dirty() {
3269 App::test_async((), |mut cx| async move {
3270 let dir = temp_tree(json!({
3271 "file1": "",
3272 "file2": "",
3273 "file3": "",
3274 }));
3275 let tree = cx.add_model(|cx| Worktree::new(dir.path(), cx));
3276 tree.flush_fs_events(&cx).await;
3277 cx.read(|cx| tree.read(cx).scan_complete()).await;
3278
3279 let file1 = cx.update(|cx| tree.file("file1", cx)).await;
3280 let buffer1 = cx.add_model(|cx| {
3281 Buffer::from_history(0, History::new("abc".into()), Some(file1), None, cx)
3282 });
3283 let events = Rc::new(RefCell::new(Vec::new()));
3284
3285 // initially, the buffer isn't dirty.
3286 buffer1.update(&mut cx, |buffer, cx| {
3287 cx.subscribe(&buffer1, {
3288 let events = events.clone();
3289 move |_, event, _| events.borrow_mut().push(event.clone())
3290 });
3291
3292 assert!(!buffer.is_dirty());
3293 assert!(events.borrow().is_empty());
3294
3295 buffer.edit(vec![1..2], "", Some(cx)).unwrap();
3296 });
3297
3298 // after the first edit, the buffer is dirty, and emits a dirtied event.
3299 buffer1.update(&mut cx, |buffer, cx| {
3300 assert!(buffer.text() == "ac");
3301 assert!(buffer.is_dirty());
3302 assert_eq!(*events.borrow(), &[Event::Edited, Event::Dirtied]);
3303 events.borrow_mut().clear();
3304
3305 buffer.did_save(buffer.version(), None, cx);
3306 });
3307
3308 // after saving, the buffer is not dirty, and emits a saved event.
3309 buffer1.update(&mut cx, |buffer, cx| {
3310 assert!(!buffer.is_dirty());
3311 assert_eq!(*events.borrow(), &[Event::Saved]);
3312 events.borrow_mut().clear();
3313
3314 buffer.edit(vec![1..1], "B", Some(cx)).unwrap();
3315 buffer.edit(vec![2..2], "D", Some(cx)).unwrap();
3316 });
3317
3318 // after editing again, the buffer is dirty, and emits another dirty event.
3319 buffer1.update(&mut cx, |buffer, cx| {
3320 assert!(buffer.text() == "aBDc");
3321 assert!(buffer.is_dirty());
3322 assert_eq!(
3323 *events.borrow(),
3324 &[Event::Edited, Event::Dirtied, Event::Edited],
3325 );
3326 events.borrow_mut().clear();
3327
3328 // TODO - currently, after restoring the buffer to its
3329 // previously-saved state, the is still considered dirty.
3330 buffer.edit(vec![1..3], "", Some(cx)).unwrap();
3331 assert!(buffer.text() == "ac");
3332 assert!(buffer.is_dirty());
3333 });
3334
3335 assert_eq!(*events.borrow(), &[Event::Edited]);
3336
3337 // When a file is deleted, the buffer is considered dirty.
3338 let events = Rc::new(RefCell::new(Vec::new()));
3339 let file2 = cx.update(|cx| tree.file("file2", cx)).await;
3340 let buffer2 = cx.add_model(|cx: &mut ModelContext<Buffer>| {
3341 cx.subscribe(&cx.handle(), {
3342 let events = events.clone();
3343 move |_, event, _| events.borrow_mut().push(event.clone())
3344 });
3345
3346 Buffer::from_history(0, History::new("abc".into()), Some(file2), None, cx)
3347 });
3348
3349 fs::remove_file(dir.path().join("file2")).unwrap();
3350 buffer2.condition(&cx, |b, _| b.is_dirty()).await;
3351 assert_eq!(
3352 *events.borrow(),
3353 &[Event::Dirtied, Event::FileHandleChanged]
3354 );
3355
3356 // When a file is already dirty when deleted, we don't emit a Dirtied event.
3357 let events = Rc::new(RefCell::new(Vec::new()));
3358 let file3 = cx.update(|cx| tree.file("file3", cx)).await;
3359 let buffer3 = cx.add_model(|cx: &mut ModelContext<Buffer>| {
3360 cx.subscribe(&cx.handle(), {
3361 let events = events.clone();
3362 move |_, event, _| events.borrow_mut().push(event.clone())
3363 });
3364
3365 Buffer::from_history(0, History::new("abc".into()), Some(file3), None, cx)
3366 });
3367
3368 tree.flush_fs_events(&cx).await;
3369 buffer3.update(&mut cx, |buffer, cx| {
3370 buffer.edit(Some(0..0), "x", Some(cx)).unwrap();
3371 });
3372 events.borrow_mut().clear();
3373 fs::remove_file(dir.path().join("file3")).unwrap();
3374 buffer3
3375 .condition(&cx, |_, _| !events.borrow().is_empty())
3376 .await;
3377 assert_eq!(*events.borrow(), &[Event::FileHandleChanged]);
3378 cx.read(|cx| assert!(buffer3.read(cx).is_dirty()));
3379 });
3380 }
3381
3382 #[gpui::test]
3383 async fn test_file_changes_on_disk(mut cx: gpui::TestAppContext) {
3384 let initial_contents = "aaa\nbbbbb\nc\n";
3385 let dir = temp_tree(json!({ "the-file": initial_contents }));
3386 let tree = cx.add_model(|cx| Worktree::new(dir.path(), cx));
3387 cx.read(|cx| tree.read(cx).scan_complete()).await;
3388
3389 let abs_path = dir.path().join("the-file");
3390 let file = cx.update(|cx| tree.file("the-file", cx)).await;
3391 let buffer = cx.add_model(|cx| {
3392 Buffer::from_history(
3393 0,
3394 History::new(initial_contents.into()),
3395 Some(file),
3396 None,
3397 cx,
3398 )
3399 });
3400
3401 // Add a cursor at the start of each row.
3402 let (selection_set_id, _) = buffer.update(&mut cx, |buffer, cx| {
3403 assert!(!buffer.is_dirty());
3404 buffer.add_selection_set(
3405 (0..3)
3406 .map(|row| {
3407 let anchor = buffer.anchor_at(Point::new(row, 0), AnchorBias::Right);
3408 Selection {
3409 id: row as usize,
3410 start: anchor.clone(),
3411 end: anchor,
3412 reversed: false,
3413 goal: SelectionGoal::None,
3414 }
3415 })
3416 .collect::<Vec<_>>(),
3417 Some(cx),
3418 )
3419 });
3420
3421 // Change the file on disk, adding two new lines of text, and removing
3422 // one line.
3423 buffer.read_with(&cx, |buffer, _| {
3424 assert!(!buffer.is_dirty());
3425 assert!(!buffer.has_conflict());
3426 });
3427 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
3428 fs::write(&abs_path, new_contents).unwrap();
3429
3430 // Because the buffer was not modified, it is reloaded from disk. Its
3431 // contents are edited according to the diff between the old and new
3432 // file contents.
3433 buffer
3434 .condition(&cx, |buffer, _| buffer.text() != initial_contents)
3435 .await;
3436
3437 buffer.update(&mut cx, |buffer, _| {
3438 assert_eq!(buffer.text(), new_contents);
3439 assert!(!buffer.is_dirty());
3440 assert!(!buffer.has_conflict());
3441
3442 let selections = buffer.selections(selection_set_id).unwrap();
3443 let cursor_positions = selections
3444 .iter()
3445 .map(|selection| {
3446 assert_eq!(selection.start, selection.end);
3447 selection.start.to_point(&buffer)
3448 })
3449 .collect::<Vec<_>>();
3450 assert_eq!(
3451 cursor_positions,
3452 &[Point::new(1, 0), Point::new(3, 0), Point::new(4, 0),]
3453 );
3454 });
3455
3456 // Modify the buffer
3457 buffer.update(&mut cx, |buffer, cx| {
3458 buffer.edit(vec![0..0], " ", Some(cx)).unwrap();
3459 assert!(buffer.is_dirty());
3460 });
3461
3462 // Change the file on disk again, adding blank lines to the beginning.
3463 fs::write(&abs_path, "\n\n\nAAAA\naaa\nBB\nbbbbb\n").unwrap();
3464
3465 // Becaues the buffer is modified, it doesn't reload from disk, but is
3466 // marked as having a conflict.
3467 buffer
3468 .condition(&cx, |buffer, _| buffer.has_conflict())
3469 .await;
3470 }
3471
3472 #[gpui::test]
3473 async fn test_set_text_via_diff(mut cx: gpui::TestAppContext) {
3474 let text = "a\nbb\nccc\ndddd\neeeee\nffffff\n";
3475 let buffer = cx.add_model(|cx| Buffer::new(0, text, cx));
3476
3477 let text = "a\nccc\ndddd\nffffff\n";
3478 let diff = buffer.read_with(&cx, |b, cx| b.diff(text.into(), cx)).await;
3479 buffer.update(&mut cx, |b, cx| b.set_text_via_diff(diff, cx));
3480 cx.read(|cx| assert_eq!(buffer.read(cx).text(), text));
3481
3482 let text = "a\n1\n\nccc\ndd2dd\nffffff\n";
3483 let diff = buffer.read_with(&cx, |b, cx| b.diff(text.into(), cx)).await;
3484 buffer.update(&mut cx, |b, cx| b.set_text_via_diff(diff, cx));
3485 cx.read(|cx| assert_eq!(buffer.read(cx).text(), text));
3486 }
3487
3488 #[gpui::test]
3489 fn test_undo_redo(cx: &mut gpui::MutableAppContext) {
3490 cx.add_model(|cx| {
3491 let mut buffer = Buffer::new(0, "1234", cx);
3492
3493 let edit1 = buffer.edit(vec![1..1], "abx", None).unwrap();
3494 let edit2 = buffer.edit(vec![3..4], "yzef", None).unwrap();
3495 let edit3 = buffer.edit(vec![3..5], "cd", None).unwrap();
3496 assert_eq!(buffer.text(), "1abcdef234");
3497
3498 buffer.undo_or_redo(edit1[0].edit_id().unwrap()).unwrap();
3499 assert_eq!(buffer.text(), "1cdef234");
3500 buffer.undo_or_redo(edit1[0].edit_id().unwrap()).unwrap();
3501 assert_eq!(buffer.text(), "1abcdef234");
3502
3503 buffer.undo_or_redo(edit2[0].edit_id().unwrap()).unwrap();
3504 assert_eq!(buffer.text(), "1abcdx234");
3505 buffer.undo_or_redo(edit3[0].edit_id().unwrap()).unwrap();
3506 assert_eq!(buffer.text(), "1abx234");
3507 buffer.undo_or_redo(edit2[0].edit_id().unwrap()).unwrap();
3508 assert_eq!(buffer.text(), "1abyzef234");
3509 buffer.undo_or_redo(edit3[0].edit_id().unwrap()).unwrap();
3510 assert_eq!(buffer.text(), "1abcdef234");
3511
3512 buffer.undo_or_redo(edit3[0].edit_id().unwrap()).unwrap();
3513 assert_eq!(buffer.text(), "1abyzef234");
3514 buffer.undo_or_redo(edit1[0].edit_id().unwrap()).unwrap();
3515 assert_eq!(buffer.text(), "1yzef234");
3516 buffer.undo_or_redo(edit2[0].edit_id().unwrap()).unwrap();
3517 assert_eq!(buffer.text(), "1234");
3518
3519 buffer
3520 });
3521 }
3522
3523 #[gpui::test]
3524 fn test_history(cx: &mut gpui::MutableAppContext) {
3525 cx.add_model(|cx| {
3526 let mut now = Instant::now();
3527 let mut buffer = Buffer::new(0, "123456", cx);
3528
3529 let (set_id, _) =
3530 buffer.add_selection_set(buffer.selections_from_ranges(vec![4..4]).unwrap(), None);
3531 buffer.start_transaction_at(Some(set_id), now).unwrap();
3532 buffer.edit(vec![2..4], "cd", None).unwrap();
3533 buffer.end_transaction_at(Some(set_id), now, None).unwrap();
3534 assert_eq!(buffer.text(), "12cd56");
3535 assert_eq!(buffer.selection_ranges(set_id).unwrap(), vec![4..4]);
3536
3537 buffer.start_transaction_at(Some(set_id), now).unwrap();
3538 buffer
3539 .update_selection_set(
3540 set_id,
3541 buffer.selections_from_ranges(vec![1..3]).unwrap(),
3542 None,
3543 )
3544 .unwrap();
3545 buffer.edit(vec![4..5], "e", None).unwrap();
3546 buffer.end_transaction_at(Some(set_id), now, None).unwrap();
3547 assert_eq!(buffer.text(), "12cde6");
3548 assert_eq!(buffer.selection_ranges(set_id).unwrap(), vec![1..3]);
3549
3550 now += UNDO_GROUP_INTERVAL + Duration::from_millis(1);
3551 buffer.start_transaction_at(Some(set_id), now).unwrap();
3552 buffer
3553 .update_selection_set(
3554 set_id,
3555 buffer.selections_from_ranges(vec![2..2]).unwrap(),
3556 None,
3557 )
3558 .unwrap();
3559 buffer.edit(vec![0..1], "a", None).unwrap();
3560 buffer.edit(vec![1..1], "b", None).unwrap();
3561 buffer.end_transaction_at(Some(set_id), now, None).unwrap();
3562 assert_eq!(buffer.text(), "ab2cde6");
3563 assert_eq!(buffer.selection_ranges(set_id).unwrap(), vec![3..3]);
3564
3565 // Last transaction happened past the group interval, undo it on its
3566 // own.
3567 buffer.undo(None);
3568 assert_eq!(buffer.text(), "12cde6");
3569 assert_eq!(buffer.selection_ranges(set_id).unwrap(), vec![1..3]);
3570
3571 // First two transactions happened within the group interval, undo them
3572 // together.
3573 buffer.undo(None);
3574 assert_eq!(buffer.text(), "123456");
3575 assert_eq!(buffer.selection_ranges(set_id).unwrap(), vec![4..4]);
3576
3577 // Redo the first two transactions together.
3578 buffer.redo(None);
3579 assert_eq!(buffer.text(), "12cde6");
3580 assert_eq!(buffer.selection_ranges(set_id).unwrap(), vec![1..3]);
3581
3582 // Redo the last transaction on its own.
3583 buffer.redo(None);
3584 assert_eq!(buffer.text(), "ab2cde6");
3585 assert_eq!(buffer.selection_ranges(set_id).unwrap(), vec![3..3]);
3586
3587 buffer
3588 });
3589 }
3590
3591 #[gpui::test]
3592 fn test_random_concurrent_edits(cx: &mut gpui::MutableAppContext) {
3593 use crate::test::Network;
3594
3595 let peers = env::var("PEERS")
3596 .map(|i| i.parse().expect("invalid `PEERS` variable"))
3597 .unwrap_or(5);
3598 let iterations = env::var("ITERATIONS")
3599 .map(|i| i.parse().expect("invalid `ITERATIONS` variable"))
3600 .unwrap_or(100);
3601 let operations = env::var("OPERATIONS")
3602 .map(|i| i.parse().expect("invalid `OPERATIONS` variable"))
3603 .unwrap_or(10);
3604 let seed_range = if let Ok(seed) = env::var("SEED") {
3605 let seed = seed.parse().expect("invalid `SEED` variable");
3606 seed..seed + 1
3607 } else {
3608 0..iterations
3609 };
3610
3611 for seed in seed_range {
3612 dbg!(seed);
3613 let mut rng = &mut StdRng::seed_from_u64(seed);
3614
3615 let base_text_len = rng.gen_range(0..10);
3616 let base_text = RandomCharIter::new(&mut rng)
3617 .take(base_text_len)
3618 .collect::<String>();
3619 let mut replica_ids = Vec::new();
3620 let mut buffers = Vec::new();
3621 let mut network = Network::new();
3622 for i in 0..peers {
3623 let buffer = cx.add_model(|cx| Buffer::new(i as ReplicaId, base_text.as_str(), cx));
3624 buffers.push(buffer);
3625 replica_ids.push(i as u16);
3626 network.add_peer(i as u16);
3627 }
3628
3629 let mut mutation_count = operations;
3630 loop {
3631 let replica_index = rng.gen_range(0..peers);
3632 let replica_id = replica_ids[replica_index];
3633 buffers[replica_index].update(cx, |buffer, _| match rng.gen_range(0..=100) {
3634 0..=50 if mutation_count != 0 => {
3635 let (_, _, ops) = buffer.randomly_mutate(&mut rng, None);
3636 network.broadcast(replica_id, ops, &mut rng);
3637 mutation_count -= 1;
3638 }
3639 51..=70 if mutation_count != 0 => {
3640 let ops = buffer.randomly_undo_redo(&mut rng);
3641 network.broadcast(replica_id, ops, &mut rng);
3642 mutation_count -= 1;
3643 }
3644 71..=100 if network.has_unreceived(replica_id) => {
3645 buffer
3646 .apply_ops(network.receive(replica_id, &mut rng), None)
3647 .unwrap();
3648 }
3649 _ => {}
3650 });
3651
3652 if mutation_count == 0 && network.is_idle() {
3653 break;
3654 }
3655 }
3656
3657 let first_buffer = buffers[0].read(cx);
3658 for buffer in &buffers[1..] {
3659 let buffer = buffer.read(cx);
3660 assert_eq!(buffer.text(), first_buffer.text());
3661 assert_eq!(
3662 buffer.all_selections().collect::<HashMap<_, _>>(),
3663 first_buffer.all_selections().collect::<HashMap<_, _>>()
3664 );
3665 assert_eq!(
3666 buffer.all_selection_ranges().collect::<HashMap<_, _>>(),
3667 first_buffer
3668 .all_selection_ranges()
3669 .collect::<HashMap<_, _>>()
3670 );
3671 }
3672 }
3673 }
3674
3675 #[gpui::test]
3676 async fn test_reparse(mut cx: gpui::TestAppContext) {
3677 let app_state = cx.read(build_app_state);
3678 let rust_lang = app_state.language_registry.select_language("test.rs");
3679 assert!(rust_lang.is_some());
3680
3681 let buffer = cx.add_model(|cx| {
3682 let text = "fn a() {}".into();
3683 let buffer = Buffer::from_history(0, History::new(text), None, rust_lang.cloned(), cx);
3684 assert!(buffer.is_parsing());
3685 assert!(buffer.syntax_tree().is_none());
3686 buffer
3687 });
3688
3689 // Wait for the initial text to parse
3690 buffer
3691 .condition(&cx, |buffer, _| !buffer.is_parsing())
3692 .await;
3693 assert_eq!(
3694 get_tree_sexp(&buffer, &cx),
3695 concat!(
3696 "(source_file (function_item name: (identifier) ",
3697 "parameters: (parameters) ",
3698 "body: (block)))"
3699 )
3700 );
3701
3702 // Perform some edits (add parameter and variable reference)
3703 // Parsing doesn't begin until the transaction is complete
3704 buffer.update(&mut cx, |buf, cx| {
3705 buf.start_transaction(None).unwrap();
3706
3707 let offset = buf.text().find(")").unwrap();
3708 buf.edit(vec![offset..offset], "b: C", Some(cx)).unwrap();
3709 assert!(!buf.is_parsing());
3710
3711 let offset = buf.text().find("}").unwrap();
3712 buf.edit(vec![offset..offset], " d; ", Some(cx)).unwrap();
3713 assert!(!buf.is_parsing());
3714
3715 buf.end_transaction(None, Some(cx)).unwrap();
3716 assert_eq!(buf.text(), "fn a(b: C) { d; }");
3717 assert!(buf.is_parsing());
3718 });
3719 buffer
3720 .condition(&cx, |buffer, _| !buffer.is_parsing())
3721 .await;
3722 assert_eq!(
3723 get_tree_sexp(&buffer, &cx),
3724 concat!(
3725 "(source_file (function_item name: (identifier) ",
3726 "parameters: (parameters (parameter pattern: (identifier) type: (type_identifier))) ",
3727 "body: (block (identifier))))"
3728 )
3729 );
3730
3731 // Perform a series of edits without waiting for the current parse to complete:
3732 // * turn identifier into a field expression
3733 // * turn field expression into a method call
3734 // * add a turbofish to the method call
3735 buffer.update(&mut cx, |buf, cx| {
3736 let offset = buf.text().find(";").unwrap();
3737 buf.edit(vec![offset..offset], ".e", Some(cx)).unwrap();
3738 assert_eq!(buf.text(), "fn a(b: C) { d.e; }");
3739 assert!(buf.is_parsing());
3740 });
3741 buffer.update(&mut cx, |buf, cx| {
3742 let offset = buf.text().find(";").unwrap();
3743 buf.edit(vec![offset..offset], "(f)", Some(cx)).unwrap();
3744 assert_eq!(buf.text(), "fn a(b: C) { d.e(f); }");
3745 assert!(buf.is_parsing());
3746 });
3747 buffer.update(&mut cx, |buf, cx| {
3748 let offset = buf.text().find("(f)").unwrap();
3749 buf.edit(vec![offset..offset], "::<G>", Some(cx)).unwrap();
3750 assert_eq!(buf.text(), "fn a(b: C) { d.e::<G>(f); }");
3751 assert!(buf.is_parsing());
3752 });
3753 buffer
3754 .condition(&cx, |buffer, _| !buffer.is_parsing())
3755 .await;
3756 assert_eq!(
3757 get_tree_sexp(&buffer, &cx),
3758 concat!(
3759 "(source_file (function_item name: (identifier) ",
3760 "parameters: (parameters (parameter pattern: (identifier) type: (type_identifier))) ",
3761 "body: (block (call_expression ",
3762 "function: (generic_function ",
3763 "function: (field_expression value: (identifier) field: (field_identifier)) ",
3764 "type_arguments: (type_arguments (type_identifier))) ",
3765 "arguments: (arguments (identifier))))))",
3766 )
3767 );
3768
3769 buffer.update(&mut cx, |buf, cx| {
3770 buf.undo(Some(cx));
3771 assert_eq!(buf.text(), "fn a() {}");
3772 assert!(buf.is_parsing());
3773 });
3774 buffer
3775 .condition(&cx, |buffer, _| !buffer.is_parsing())
3776 .await;
3777 assert_eq!(
3778 get_tree_sexp(&buffer, &cx),
3779 concat!(
3780 "(source_file (function_item name: (identifier) ",
3781 "parameters: (parameters) ",
3782 "body: (block)))"
3783 )
3784 );
3785
3786 buffer.update(&mut cx, |buf, cx| {
3787 buf.redo(Some(cx));
3788 assert_eq!(buf.text(), "fn a(b: C) { d.e::<G>(f); }");
3789 assert!(buf.is_parsing());
3790 });
3791 buffer
3792 .condition(&cx, |buffer, _| !buffer.is_parsing())
3793 .await;
3794 assert_eq!(
3795 get_tree_sexp(&buffer, &cx),
3796 concat!(
3797 "(source_file (function_item name: (identifier) ",
3798 "parameters: (parameters (parameter pattern: (identifier) type: (type_identifier))) ",
3799 "body: (block (call_expression ",
3800 "function: (generic_function ",
3801 "function: (field_expression value: (identifier) field: (field_identifier)) ",
3802 "type_arguments: (type_arguments (type_identifier))) ",
3803 "arguments: (arguments (identifier))))))",
3804 )
3805 );
3806
3807 fn get_tree_sexp(buffer: &ModelHandle<Buffer>, cx: &gpui::TestAppContext) -> String {
3808 buffer.read_with(cx, |buffer, _| {
3809 buffer.syntax_tree().unwrap().root_node().to_sexp()
3810 })
3811 }
3812 }
3813
3814 #[gpui::test]
3815 async fn test_enclosing_bracket_ranges(mut cx: gpui::TestAppContext) {
3816 use unindent::Unindent as _;
3817
3818 let app_state = cx.read(build_app_state);
3819 let rust_lang = app_state.language_registry.select_language("test.rs");
3820 assert!(rust_lang.is_some());
3821
3822 let buffer = cx.add_model(|cx| {
3823 let text = "
3824 mod x {
3825 mod y {
3826
3827 }
3828 }
3829 "
3830 .unindent()
3831 .into();
3832 Buffer::from_history(0, History::new(text), None, rust_lang.cloned(), cx)
3833 });
3834 buffer
3835 .condition(&cx, |buffer, _| !buffer.is_parsing())
3836 .await;
3837 buffer.read_with(&cx, |buf, _| {
3838 assert_eq!(
3839 buf.enclosing_bracket_point_ranges(Point::new(1, 6)..Point::new(1, 6)),
3840 Some((
3841 Point::new(0, 6)..Point::new(0, 7),
3842 Point::new(4, 0)..Point::new(4, 1)
3843 ))
3844 );
3845 assert_eq!(
3846 buf.enclosing_bracket_point_ranges(Point::new(1, 10)..Point::new(1, 10)),
3847 Some((
3848 Point::new(1, 10)..Point::new(1, 11),
3849 Point::new(3, 4)..Point::new(3, 5)
3850 ))
3851 );
3852 assert_eq!(
3853 buf.enclosing_bracket_point_ranges(Point::new(3, 5)..Point::new(3, 5)),
3854 Some((
3855 Point::new(1, 10)..Point::new(1, 11),
3856 Point::new(3, 4)..Point::new(3, 5)
3857 ))
3858 );
3859 });
3860 }
3861
3862 impl Buffer {
3863 fn random_byte_range(&mut self, start_offset: usize, rng: &mut impl Rng) -> Range<usize> {
3864 let end = self.clip_offset(rng.gen_range(start_offset..=self.len()), Bias::Right);
3865 let start = self.clip_offset(rng.gen_range(start_offset..=end), Bias::Left);
3866 start..end
3867 }
3868
3869 pub fn randomly_edit<T>(
3870 &mut self,
3871 rng: &mut T,
3872 old_range_count: usize,
3873 cx: Option<&mut ModelContext<Self>>,
3874 ) -> (Vec<Range<usize>>, String, Vec<Operation>)
3875 where
3876 T: Rng,
3877 {
3878 let mut old_ranges: Vec<Range<usize>> = Vec::new();
3879 for _ in 0..old_range_count {
3880 let last_end = old_ranges.last().map_or(0, |last_range| last_range.end + 1);
3881 if last_end > self.len() {
3882 break;
3883 }
3884 old_ranges.push(self.random_byte_range(last_end, rng));
3885 }
3886 let new_text_len = rng.gen_range(0..10);
3887 let new_text: String = RandomCharIter::new(&mut *rng).take(new_text_len).collect();
3888
3889 let operations = self
3890 .edit(old_ranges.iter().cloned(), new_text.as_str(), cx)
3891 .unwrap();
3892
3893 (old_ranges, new_text, operations)
3894 }
3895
3896 pub fn randomly_mutate<T>(
3897 &mut self,
3898 rng: &mut T,
3899 mut cx: Option<&mut ModelContext<Self>>,
3900 ) -> (Vec<Range<usize>>, String, Vec<Operation>)
3901 where
3902 T: Rng,
3903 {
3904 // Randomly edit
3905 let (old_ranges, new_text, mut operations) =
3906 self.randomly_edit(rng, 5, cx.as_deref_mut());
3907 log::info!("Mutating buffer at {:?}: {:?}", old_ranges, new_text);
3908
3909 // Randomly add, remove or mutate selection sets.
3910 let replica_selection_sets = &self
3911 .all_selections()
3912 .map(|(set_id, _)| *set_id)
3913 .filter(|set_id| self.replica_id == set_id.replica_id)
3914 .collect::<Vec<_>>();
3915 let set_id = replica_selection_sets.choose(rng);
3916 if set_id.is_some() && rng.gen_bool(1.0 / 6.0) {
3917 let op = self.remove_selection_set(*set_id.unwrap(), None).unwrap();
3918 operations.push(op);
3919 } else {
3920 let mut ranges = Vec::new();
3921 for _ in 0..5 {
3922 ranges.push(self.random_byte_range(0, rng));
3923 }
3924 let new_selections = self.selections_from_ranges(ranges).unwrap();
3925
3926 let op = if set_id.is_none() || rng.gen_bool(1.0 / 5.0) {
3927 self.add_selection_set(new_selections, None).1
3928 } else {
3929 self.update_selection_set(*set_id.unwrap(), new_selections, None)
3930 .unwrap()
3931 };
3932 operations.push(op);
3933 }
3934
3935 (old_ranges, new_text, operations)
3936 }
3937
3938 pub fn randomly_undo_redo(&mut self, rng: &mut impl Rng) -> Vec<Operation> {
3939 let mut ops = Vec::new();
3940 for _ in 0..rng.gen_range(1..5) {
3941 if let Some(edit_id) = self.history.ops.keys().choose(rng).copied() {
3942 ops.push(self.undo_or_redo(edit_id).unwrap());
3943 }
3944 }
3945 ops
3946 }
3947
3948 fn selections_from_ranges<I>(&self, ranges: I) -> Result<Vec<Selection>>
3949 where
3950 I: IntoIterator<Item = Range<usize>>,
3951 {
3952 static NEXT_SELECTION_ID: AtomicUsize = AtomicUsize::new(0);
3953
3954 let mut ranges = ranges.into_iter().collect::<Vec<_>>();
3955 ranges.sort_unstable_by_key(|range| range.start);
3956
3957 let mut selections = Vec::with_capacity(ranges.len());
3958 for range in ranges {
3959 if range.start > range.end {
3960 selections.push(Selection {
3961 id: NEXT_SELECTION_ID.fetch_add(1, atomic::Ordering::SeqCst),
3962 start: self.anchor_before(range.end),
3963 end: self.anchor_before(range.start),
3964 reversed: true,
3965 goal: SelectionGoal::None,
3966 });
3967 } else {
3968 selections.push(Selection {
3969 id: NEXT_SELECTION_ID.fetch_add(1, atomic::Ordering::SeqCst),
3970 start: self.anchor_after(range.start),
3971 end: self.anchor_before(range.end),
3972 reversed: false,
3973 goal: SelectionGoal::None,
3974 });
3975 }
3976 }
3977 Ok(selections)
3978 }
3979
3980 pub fn selection_ranges<'a>(&'a self, set_id: SelectionSetId) -> Result<Vec<Range<usize>>> {
3981 Ok(self
3982 .selections(set_id)?
3983 .iter()
3984 .map(move |selection| {
3985 let start = selection.start.to_offset(self);
3986 let end = selection.end.to_offset(self);
3987 if selection.reversed {
3988 end..start
3989 } else {
3990 start..end
3991 }
3992 })
3993 .collect())
3994 }
3995
3996 pub fn all_selections(&self) -> impl Iterator<Item = (&SelectionSetId, &[Selection])> {
3997 self.selections
3998 .iter()
3999 .map(|(set_id, selections)| (set_id, selections.as_ref()))
4000 }
4001
4002 pub fn all_selection_ranges<'a>(
4003 &'a self,
4004 ) -> impl 'a + Iterator<Item = (SelectionSetId, Vec<Range<usize>>)> {
4005 self.selections
4006 .keys()
4007 .map(move |set_id| (*set_id, self.selection_ranges(*set_id).unwrap()))
4008 }
4009
4010 pub fn enclosing_bracket_point_ranges<T: ToOffset>(
4011 &self,
4012 range: Range<T>,
4013 ) -> Option<(Range<Point>, Range<Point>)> {
4014 self.enclosing_bracket_ranges(range).map(|(start, end)| {
4015 let point_start = start.start.to_point(self)..start.end.to_point(self);
4016 let point_end = end.start.to_point(self)..end.end.to_point(self);
4017 (point_start, point_end)
4018 })
4019 }
4020 }
4021
4022 impl Operation {
4023 fn edit_id(&self) -> Option<time::Local> {
4024 match self {
4025 Operation::Edit { edit, .. } => Some(edit.id),
4026 Operation::Undo { undo, .. } => Some(undo.edit_id),
4027 Operation::UpdateSelections { .. } => None,
4028 }
4029 }
4030 }
4031}