1pub use crate::{
2 diagnostic_set::DiagnosticSet,
3 highlight_map::{HighlightId, HighlightMap},
4 proto, BracketPair, Grammar, Language, LanguageConfig, LanguageRegistry, PLAIN_TEXT,
5};
6use crate::{
7 diagnostic_set::{DiagnosticEntry, DiagnosticGroup},
8 outline::OutlineItem,
9 syntax_map::{
10 SyntaxMap, SyntaxMapCapture, SyntaxMapCaptures, SyntaxSnapshot, ToTreeSitterPoint,
11 },
12 CodeLabel, Outline,
13};
14use anyhow::{anyhow, Result};
15use clock::ReplicaId;
16use futures::FutureExt as _;
17use gpui::{fonts::HighlightStyle, AppContext, Entity, ModelContext, MutableAppContext, Task};
18use parking_lot::Mutex;
19use settings::Settings;
20use similar::{ChangeTag, TextDiff};
21use smol::future::yield_now;
22use std::{
23 any::Any,
24 cmp::{self, Ordering},
25 collections::BTreeMap,
26 ffi::OsStr,
27 future::Future,
28 iter::{self, Iterator, Peekable},
29 mem,
30 ops::{Deref, Range},
31 path::{Path, PathBuf},
32 str,
33 sync::Arc,
34 time::{Duration, Instant, SystemTime, UNIX_EPOCH},
35 vec,
36};
37use sum_tree::TreeMap;
38use text::operation_queue::OperationQueue;
39pub use text::{Buffer as TextBuffer, BufferSnapshot as TextBufferSnapshot, Operation as _, *};
40use theme::SyntaxTheme;
41use util::TryFutureExt as _;
42
43#[cfg(any(test, feature = "test-support"))]
44pub use {tree_sitter_rust, tree_sitter_typescript};
45
46pub use lsp::DiagnosticSeverity;
47
48pub struct Buffer {
49 text: TextBuffer,
50 file: Option<Arc<dyn File>>,
51 saved_version: clock::Global,
52 saved_version_fingerprint: String,
53 saved_mtime: SystemTime,
54 transaction_depth: usize,
55 was_dirty_before_starting_transaction: Option<bool>,
56 language: Option<Arc<Language>>,
57 autoindent_requests: Vec<Arc<AutoindentRequest>>,
58 pending_autoindent: Option<Task<()>>,
59 sync_parse_timeout: Duration,
60 syntax_map: Mutex<SyntaxMap>,
61 parsing_in_background: bool,
62 parse_count: usize,
63 diagnostics: DiagnosticSet,
64 remote_selections: TreeMap<ReplicaId, SelectionSet>,
65 selections_update_count: usize,
66 diagnostics_update_count: usize,
67 diagnostics_timestamp: clock::Lamport,
68 file_update_count: usize,
69 completion_triggers: Vec<String>,
70 completion_triggers_timestamp: clock::Lamport,
71 deferred_ops: OperationQueue<Operation>,
72}
73
74pub struct BufferSnapshot {
75 text: text::BufferSnapshot,
76 pub(crate) syntax: SyntaxSnapshot,
77 file: Option<Arc<dyn File>>,
78 diagnostics: DiagnosticSet,
79 diagnostics_update_count: usize,
80 file_update_count: usize,
81 remote_selections: TreeMap<ReplicaId, SelectionSet>,
82 selections_update_count: usize,
83 language: Option<Arc<Language>>,
84 parse_count: usize,
85}
86
87#[derive(Clone, Copy, Debug, PartialEq, Eq)]
88pub struct IndentSize {
89 pub len: u32,
90 pub kind: IndentKind,
91}
92
93#[derive(Clone, Copy, Debug, PartialEq, Eq)]
94pub enum IndentKind {
95 Space,
96 Tab,
97}
98
99#[derive(Clone, Debug)]
100struct SelectionSet {
101 line_mode: bool,
102 selections: Arc<[Selection<Anchor>]>,
103 lamport_timestamp: clock::Lamport,
104}
105
106#[derive(Clone, Debug, PartialEq, Eq)]
107pub struct GroupId {
108 source: Arc<str>,
109 id: usize,
110}
111
112#[derive(Clone, Debug, PartialEq, Eq)]
113pub struct Diagnostic {
114 pub code: Option<String>,
115 pub severity: DiagnosticSeverity,
116 pub message: String,
117 pub group_id: usize,
118 pub is_valid: bool,
119 pub is_primary: bool,
120 pub is_disk_based: bool,
121 pub is_unnecessary: bool,
122}
123
124#[derive(Clone, Debug)]
125pub struct Completion {
126 pub old_range: Range<Anchor>,
127 pub new_text: String,
128 pub label: CodeLabel,
129 pub lsp_completion: lsp::CompletionItem,
130}
131
132#[derive(Clone, Debug)]
133pub struct CodeAction {
134 pub range: Range<Anchor>,
135 pub lsp_action: lsp::CodeAction,
136}
137
138#[derive(Clone, Debug, PartialEq, Eq)]
139pub enum Operation {
140 Buffer(text::Operation),
141 UpdateDiagnostics {
142 diagnostics: Arc<[DiagnosticEntry<Anchor>]>,
143 lamport_timestamp: clock::Lamport,
144 },
145 UpdateSelections {
146 selections: Arc<[Selection<Anchor>]>,
147 lamport_timestamp: clock::Lamport,
148 line_mode: bool,
149 },
150 UpdateCompletionTriggers {
151 triggers: Vec<String>,
152 lamport_timestamp: clock::Lamport,
153 },
154}
155
156#[derive(Clone, Debug, PartialEq, Eq)]
157pub enum Event {
158 Operation(Operation),
159 Edited,
160 DirtyChanged,
161 Saved,
162 FileHandleChanged,
163 Reloaded,
164 Reparsed,
165 DiagnosticsUpdated,
166 Closed,
167}
168
169pub trait File: Send + Sync {
170 fn as_local(&self) -> Option<&dyn LocalFile>;
171
172 fn is_local(&self) -> bool {
173 self.as_local().is_some()
174 }
175
176 fn mtime(&self) -> SystemTime;
177
178 /// Returns the path of this file relative to the worktree's root directory.
179 fn path(&self) -> &Arc<Path>;
180
181 /// Returns the path of this file relative to the worktree's parent directory (this means it
182 /// includes the name of the worktree's root folder).
183 fn full_path(&self, cx: &AppContext) -> PathBuf;
184
185 /// Returns the last component of this handle's absolute path. If this handle refers to the root
186 /// of its worktree, then this method will return the name of the worktree itself.
187 fn file_name<'a>(&'a self, cx: &'a AppContext) -> &'a OsStr;
188
189 fn is_deleted(&self) -> bool;
190
191 fn save(
192 &self,
193 buffer_id: u64,
194 text: Rope,
195 version: clock::Global,
196 line_ending: LineEnding,
197 cx: &mut MutableAppContext,
198 ) -> Task<Result<(clock::Global, String, SystemTime)>>;
199
200 fn as_any(&self) -> &dyn Any;
201
202 fn to_proto(&self) -> rpc::proto::File;
203}
204
205pub trait LocalFile: File {
206 /// Returns the absolute path of this file.
207 fn abs_path(&self, cx: &AppContext) -> PathBuf;
208
209 fn load(&self, cx: &AppContext) -> Task<Result<String>>;
210
211 fn buffer_reloaded(
212 &self,
213 buffer_id: u64,
214 version: &clock::Global,
215 fingerprint: String,
216 line_ending: LineEnding,
217 mtime: SystemTime,
218 cx: &mut MutableAppContext,
219 );
220}
221
222#[derive(Clone, Debug)]
223pub enum AutoindentMode {
224 /// Indent each line of inserted text.
225 EachLine,
226 /// Apply the same indentation adjustment to all of the lines
227 /// in a given insertion.
228 Block {
229 /// The original indentation level of the first line of each
230 /// insertion, if it has been copied.
231 original_indent_columns: Vec<u32>,
232 },
233}
234
235#[derive(Clone)]
236struct AutoindentRequest {
237 before_edit: BufferSnapshot,
238 entries: Vec<AutoindentRequestEntry>,
239 indent_size: IndentSize,
240 is_block_mode: bool,
241}
242
243#[derive(Clone)]
244struct AutoindentRequestEntry {
245 /// A range of the buffer whose indentation should be adjusted.
246 range: Range<Anchor>,
247 /// Whether or not these lines should be considered brand new, for the
248 /// purpose of auto-indent. When text is not new, its indentation will
249 /// only be adjusted if the suggested indentation level has *changed*
250 /// since the edit was made.
251 first_line_is_new: bool,
252 original_indent_column: Option<u32>,
253}
254
255#[derive(Debug)]
256struct IndentSuggestion {
257 basis_row: u32,
258 delta: Ordering,
259}
260
261struct BufferChunkHighlights<'a> {
262 captures: SyntaxMapCaptures<'a>,
263 next_capture: Option<SyntaxMapCapture<'a>>,
264 stack: Vec<(usize, HighlightId)>,
265 highlight_maps: Vec<HighlightMap>,
266}
267
268pub struct BufferChunks<'a> {
269 range: Range<usize>,
270 chunks: rope::Chunks<'a>,
271 diagnostic_endpoints: Peekable<vec::IntoIter<DiagnosticEndpoint>>,
272 error_depth: usize,
273 warning_depth: usize,
274 information_depth: usize,
275 hint_depth: usize,
276 unnecessary_depth: usize,
277 highlights: Option<BufferChunkHighlights<'a>>,
278}
279
280#[derive(Clone, Copy, Debug, Default)]
281pub struct Chunk<'a> {
282 pub text: &'a str,
283 pub syntax_highlight_id: Option<HighlightId>,
284 pub highlight_style: Option<HighlightStyle>,
285 pub diagnostic_severity: Option<DiagnosticSeverity>,
286 pub is_unnecessary: bool,
287}
288
289pub struct Diff {
290 base_version: clock::Global,
291 new_text: Arc<str>,
292 changes: Vec<(ChangeTag, usize)>,
293 line_ending: LineEnding,
294 start_offset: usize,
295}
296
297#[derive(Clone, Copy)]
298pub(crate) struct DiagnosticEndpoint {
299 offset: usize,
300 is_start: bool,
301 severity: DiagnosticSeverity,
302 is_unnecessary: bool,
303}
304
305#[derive(Copy, Clone, Eq, PartialEq, PartialOrd, Ord, Debug)]
306pub enum CharKind {
307 Punctuation,
308 Whitespace,
309 Word,
310}
311
312impl CharKind {
313 pub fn coerce_punctuation(self, treat_punctuation_as_word: bool) -> Self {
314 if treat_punctuation_as_word && self == CharKind::Punctuation {
315 CharKind::Word
316 } else {
317 self
318 }
319 }
320}
321
322impl Buffer {
323 pub fn new<T: Into<String>>(
324 replica_id: ReplicaId,
325 base_text: T,
326 cx: &mut ModelContext<Self>,
327 ) -> Self {
328 Self::build(
329 TextBuffer::new(replica_id, cx.model_id() as u64, base_text.into()),
330 None,
331 )
332 }
333
334 pub fn from_file<T: Into<String>>(
335 replica_id: ReplicaId,
336 base_text: T,
337 file: Arc<dyn File>,
338 cx: &mut ModelContext<Self>,
339 ) -> Self {
340 Self::build(
341 TextBuffer::new(replica_id, cx.model_id() as u64, base_text.into()),
342 Some(file),
343 )
344 }
345
346 pub fn from_proto(
347 replica_id: ReplicaId,
348 message: proto::BufferState,
349 file: Option<Arc<dyn File>>,
350 ) -> Result<Self> {
351 let buffer = TextBuffer::new(replica_id, message.id, message.base_text);
352 let mut this = Self::build(buffer, file);
353 this.text.set_line_ending(proto::deserialize_line_ending(
354 proto::LineEnding::from_i32(message.line_ending)
355 .ok_or_else(|| anyhow!("missing line_ending"))?,
356 ));
357 Ok(this)
358 }
359
360 pub fn to_proto(&self) -> proto::BufferState {
361 proto::BufferState {
362 id: self.remote_id(),
363 file: self.file.as_ref().map(|f| f.to_proto()),
364 base_text: self.base_text().to_string(),
365 line_ending: proto::serialize_line_ending(self.line_ending()) as i32,
366 }
367 }
368
369 pub fn serialize_ops(&self, cx: &AppContext) -> Task<Vec<proto::Operation>> {
370 let mut operations = Vec::new();
371 operations.extend(self.deferred_ops.iter().map(proto::serialize_operation));
372 operations.extend(self.remote_selections.iter().map(|(_, set)| {
373 proto::serialize_operation(&Operation::UpdateSelections {
374 selections: set.selections.clone(),
375 lamport_timestamp: set.lamport_timestamp,
376 line_mode: set.line_mode,
377 })
378 }));
379 operations.push(proto::serialize_operation(&Operation::UpdateDiagnostics {
380 diagnostics: self.diagnostics.iter().cloned().collect(),
381 lamport_timestamp: self.diagnostics_timestamp,
382 }));
383 operations.push(proto::serialize_operation(
384 &Operation::UpdateCompletionTriggers {
385 triggers: self.completion_triggers.clone(),
386 lamport_timestamp: self.completion_triggers_timestamp,
387 },
388 ));
389
390 let text_operations = self.text.operations().clone();
391 cx.background().spawn(async move {
392 operations.extend(
393 text_operations
394 .iter()
395 .map(|(_, op)| proto::serialize_operation(&Operation::Buffer(op.clone()))),
396 );
397 operations.sort_unstable_by_key(proto::lamport_timestamp_for_operation);
398 operations
399 })
400 }
401
402 pub fn with_language(mut self, language: Arc<Language>, cx: &mut ModelContext<Self>) -> Self {
403 self.set_language(Some(language), cx);
404 self
405 }
406
407 fn build(buffer: TextBuffer, file: Option<Arc<dyn File>>) -> Self {
408 let saved_mtime = if let Some(file) = file.as_ref() {
409 file.mtime()
410 } else {
411 UNIX_EPOCH
412 };
413
414 Self {
415 saved_mtime,
416 saved_version: buffer.version(),
417 saved_version_fingerprint: buffer.as_rope().fingerprint(),
418 transaction_depth: 0,
419 was_dirty_before_starting_transaction: None,
420 text: buffer,
421 file,
422 syntax_map: Mutex::new(SyntaxMap::new()),
423 parsing_in_background: false,
424 parse_count: 0,
425 sync_parse_timeout: Duration::from_millis(1),
426 autoindent_requests: Default::default(),
427 pending_autoindent: Default::default(),
428 language: None,
429 remote_selections: Default::default(),
430 selections_update_count: 0,
431 diagnostics: Default::default(),
432 diagnostics_update_count: 0,
433 diagnostics_timestamp: Default::default(),
434 file_update_count: 0,
435 completion_triggers: Default::default(),
436 completion_triggers_timestamp: Default::default(),
437 deferred_ops: OperationQueue::new(),
438 }
439 }
440
441 pub fn snapshot(&self) -> BufferSnapshot {
442 let text = self.text.snapshot();
443 let mut syntax_map = self.syntax_map.lock();
444 syntax_map.interpolate(&text);
445 let syntax = syntax_map.snapshot();
446
447 BufferSnapshot {
448 text,
449 syntax,
450 file: self.file.clone(),
451 remote_selections: self.remote_selections.clone(),
452 diagnostics: self.diagnostics.clone(),
453 diagnostics_update_count: self.diagnostics_update_count,
454 file_update_count: self.file_update_count,
455 language: self.language.clone(),
456 parse_count: self.parse_count,
457 selections_update_count: self.selections_update_count,
458 }
459 }
460
461 pub fn as_text_snapshot(&self) -> &text::BufferSnapshot {
462 &self.text
463 }
464
465 pub fn text_snapshot(&self) -> text::BufferSnapshot {
466 self.text.snapshot()
467 }
468
469 pub fn file(&self) -> Option<&dyn File> {
470 self.file.as_deref()
471 }
472
473 pub fn save(
474 &mut self,
475 cx: &mut ModelContext<Self>,
476 ) -> Task<Result<(clock::Global, String, SystemTime)>> {
477 let file = if let Some(file) = self.file.as_ref() {
478 file
479 } else {
480 return Task::ready(Err(anyhow!("buffer has no file")));
481 };
482 let text = self.as_rope().clone();
483 let version = self.version();
484 let save = file.save(
485 self.remote_id(),
486 text,
487 version,
488 self.line_ending(),
489 cx.as_mut(),
490 );
491 cx.spawn(|this, mut cx| async move {
492 let (version, fingerprint, mtime) = save.await?;
493 this.update(&mut cx, |this, cx| {
494 this.did_save(version.clone(), fingerprint.clone(), mtime, None, cx);
495 });
496 Ok((version, fingerprint, mtime))
497 })
498 }
499
500 pub fn saved_version(&self) -> &clock::Global {
501 &self.saved_version
502 }
503
504 pub fn set_language(&mut self, language: Option<Arc<Language>>, cx: &mut ModelContext<Self>) {
505 self.syntax_map.lock().clear();
506 self.language = language;
507 self.reparse(cx);
508 }
509
510 pub fn set_language_registry(&mut self, language_registry: Arc<LanguageRegistry>) {
511 self.syntax_map
512 .lock()
513 .set_language_registry(language_registry);
514 }
515
516 pub fn did_save(
517 &mut self,
518 version: clock::Global,
519 fingerprint: String,
520 mtime: SystemTime,
521 new_file: Option<Arc<dyn File>>,
522 cx: &mut ModelContext<Self>,
523 ) {
524 self.saved_version = version;
525 self.saved_version_fingerprint = fingerprint;
526 self.saved_mtime = mtime;
527 if let Some(new_file) = new_file {
528 self.file = Some(new_file);
529 self.file_update_count += 1;
530 }
531 cx.emit(Event::Saved);
532 cx.notify();
533 }
534
535 pub fn reload(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<Option<Transaction>>> {
536 cx.spawn(|this, mut cx| async move {
537 if let Some((new_mtime, new_text)) = this.read_with(&cx, |this, cx| {
538 let file = this.file.as_ref()?.as_local()?;
539 Some((file.mtime(), file.load(cx)))
540 }) {
541 let new_text = new_text.await?;
542 let diff = this
543 .read_with(&cx, |this, cx| this.diff(new_text, cx))
544 .await;
545 this.update(&mut cx, |this, cx| {
546 if let Some(transaction) = this.apply_diff(diff, cx).cloned() {
547 this.did_reload(
548 this.version(),
549 this.as_rope().fingerprint(),
550 this.line_ending(),
551 new_mtime,
552 cx,
553 );
554 Ok(Some(transaction))
555 } else {
556 Ok(None)
557 }
558 })
559 } else {
560 Ok(None)
561 }
562 })
563 }
564
565 pub fn did_reload(
566 &mut self,
567 version: clock::Global,
568 fingerprint: String,
569 line_ending: LineEnding,
570 mtime: SystemTime,
571 cx: &mut ModelContext<Self>,
572 ) {
573 self.saved_version = version;
574 self.saved_version_fingerprint = fingerprint;
575 self.text.set_line_ending(line_ending);
576 self.saved_mtime = mtime;
577 if let Some(file) = self.file.as_ref().and_then(|f| f.as_local()) {
578 file.buffer_reloaded(
579 self.remote_id(),
580 &self.saved_version,
581 self.saved_version_fingerprint.clone(),
582 self.line_ending(),
583 self.saved_mtime,
584 cx,
585 );
586 }
587 cx.emit(Event::Reloaded);
588 cx.notify();
589 }
590
591 pub fn file_updated(
592 &mut self,
593 new_file: Arc<dyn File>,
594 cx: &mut ModelContext<Self>,
595 ) -> Task<()> {
596 let old_file = if let Some(file) = self.file.as_ref() {
597 file
598 } else {
599 return Task::ready(());
600 };
601 let mut file_changed = false;
602 let mut task = Task::ready(());
603
604 if new_file.path() != old_file.path() {
605 file_changed = true;
606 }
607
608 if new_file.is_deleted() {
609 if !old_file.is_deleted() {
610 file_changed = true;
611 if !self.is_dirty() {
612 cx.emit(Event::DirtyChanged);
613 }
614 }
615 } else {
616 let new_mtime = new_file.mtime();
617 if new_mtime != old_file.mtime() {
618 file_changed = true;
619
620 if !self.is_dirty() {
621 let reload = self.reload(cx).log_err().map(drop);
622 task = cx.foreground().spawn(reload);
623 }
624 }
625 }
626
627 if file_changed {
628 self.file_update_count += 1;
629 cx.emit(Event::FileHandleChanged);
630 cx.notify();
631 }
632 self.file = Some(new_file);
633 task
634 }
635
636 pub fn close(&mut self, cx: &mut ModelContext<Self>) {
637 cx.emit(Event::Closed);
638 }
639
640 pub fn language(&self) -> Option<&Arc<Language>> {
641 self.language.as_ref()
642 }
643
644 pub fn parse_count(&self) -> usize {
645 self.parse_count
646 }
647
648 pub fn selections_update_count(&self) -> usize {
649 self.selections_update_count
650 }
651
652 pub fn diagnostics_update_count(&self) -> usize {
653 self.diagnostics_update_count
654 }
655
656 pub fn file_update_count(&self) -> usize {
657 self.file_update_count
658 }
659
660 #[cfg(any(test, feature = "test-support"))]
661 pub fn is_parsing(&self) -> bool {
662 self.parsing_in_background
663 }
664
665 #[cfg(test)]
666 pub fn set_sync_parse_timeout(&mut self, timeout: Duration) {
667 self.sync_parse_timeout = timeout;
668 }
669
670 fn reparse(&mut self, cx: &mut ModelContext<Self>) {
671 if self.parsing_in_background {
672 return;
673 }
674 let language = if let Some(language) = self.language.clone() {
675 language
676 } else {
677 return;
678 };
679
680 let text = self.text_snapshot();
681 let parsed_version = self.version();
682
683 let mut syntax_map = self.syntax_map.lock();
684 syntax_map.interpolate(&text);
685 let language_registry = syntax_map.language_registry();
686 let mut syntax_snapshot = syntax_map.snapshot();
687 let syntax_map_version = syntax_map.parsed_version();
688 drop(syntax_map);
689
690 let parse_task = cx.background().spawn({
691 let language = language.clone();
692 async move {
693 syntax_snapshot.reparse(&syntax_map_version, &text, language_registry, language);
694 syntax_snapshot
695 }
696 });
697
698 match cx
699 .background()
700 .block_with_timeout(self.sync_parse_timeout, parse_task)
701 {
702 Ok(new_syntax_snapshot) => {
703 self.did_finish_parsing(new_syntax_snapshot, parsed_version, cx);
704 return;
705 }
706 Err(parse_task) => {
707 self.parsing_in_background = true;
708 cx.spawn(move |this, mut cx| async move {
709 let new_syntax_map = parse_task.await;
710 this.update(&mut cx, move |this, cx| {
711 let grammar_changed =
712 this.language.as_ref().map_or(true, |current_language| {
713 !Arc::ptr_eq(&language, current_language)
714 });
715 let parse_again =
716 this.version.changed_since(&parsed_version) || grammar_changed;
717 this.did_finish_parsing(new_syntax_map, parsed_version, cx);
718 this.parsing_in_background = false;
719 if parse_again {
720 this.reparse(cx);
721 }
722 });
723 })
724 .detach();
725 }
726 }
727 }
728
729 fn did_finish_parsing(
730 &mut self,
731 syntax_snapshot: SyntaxSnapshot,
732 version: clock::Global,
733 cx: &mut ModelContext<Self>,
734 ) {
735 self.parse_count += 1;
736 self.syntax_map.lock().did_parse(syntax_snapshot, version);
737 self.request_autoindent(cx);
738 cx.emit(Event::Reparsed);
739 cx.notify();
740 }
741
742 pub fn update_diagnostics(&mut self, diagnostics: DiagnosticSet, cx: &mut ModelContext<Self>) {
743 let lamport_timestamp = self.text.lamport_clock.tick();
744 let op = Operation::UpdateDiagnostics {
745 diagnostics: diagnostics.iter().cloned().collect(),
746 lamport_timestamp,
747 };
748 self.apply_diagnostic_update(diagnostics, lamport_timestamp, cx);
749 self.send_operation(op, cx);
750 }
751
752 fn request_autoindent(&mut self, cx: &mut ModelContext<Self>) {
753 if let Some(indent_sizes) = self.compute_autoindents() {
754 let indent_sizes = cx.background().spawn(indent_sizes);
755 match cx
756 .background()
757 .block_with_timeout(Duration::from_micros(500), indent_sizes)
758 {
759 Ok(indent_sizes) => self.apply_autoindents(indent_sizes, cx),
760 Err(indent_sizes) => {
761 self.pending_autoindent = Some(cx.spawn(|this, mut cx| async move {
762 let indent_sizes = indent_sizes.await;
763 this.update(&mut cx, |this, cx| {
764 this.apply_autoindents(indent_sizes, cx);
765 });
766 }));
767 }
768 }
769 }
770 }
771
772 fn compute_autoindents(&self) -> Option<impl Future<Output = BTreeMap<u32, IndentSize>>> {
773 let max_rows_between_yields = 100;
774 let snapshot = self.snapshot();
775 if snapshot.syntax.is_empty() || self.autoindent_requests.is_empty() {
776 return None;
777 }
778
779 let autoindent_requests = self.autoindent_requests.clone();
780 Some(async move {
781 let mut indent_sizes = BTreeMap::new();
782 for request in autoindent_requests {
783 // Resolve each edited range to its row in the current buffer and in the
784 // buffer before this batch of edits.
785 let mut row_ranges = Vec::new();
786 let mut old_to_new_rows = BTreeMap::new();
787 for entry in &request.entries {
788 let position = entry.range.start;
789 let new_row = position.to_point(&snapshot).row;
790 let new_end_row = entry.range.end.to_point(&snapshot).row + 1;
791 if !entry.first_line_is_new {
792 let old_row = position.to_point(&request.before_edit).row;
793 old_to_new_rows.insert(old_row, new_row);
794 }
795 row_ranges.push((new_row..new_end_row, entry.original_indent_column));
796 }
797
798 // Build a map containing the suggested indentation for each of the edited lines
799 // with respect to the state of the buffer before these edits. This map is keyed
800 // by the rows for these lines in the current state of the buffer.
801 let mut old_suggestions = BTreeMap::<u32, IndentSize>::default();
802 let old_edited_ranges =
803 contiguous_ranges(old_to_new_rows.keys().copied(), max_rows_between_yields);
804 for old_edited_range in old_edited_ranges {
805 let suggestions = request
806 .before_edit
807 .suggest_autoindents(old_edited_range.clone())
808 .into_iter()
809 .flatten();
810 for (old_row, suggestion) in old_edited_range.zip(suggestions) {
811 if let Some(suggestion) = suggestion {
812 let suggested_indent = old_to_new_rows
813 .get(&suggestion.basis_row)
814 .and_then(|from_row| old_suggestions.get(from_row).copied())
815 .unwrap_or_else(|| {
816 request
817 .before_edit
818 .indent_size_for_line(suggestion.basis_row)
819 })
820 .with_delta(suggestion.delta, request.indent_size);
821 old_suggestions
822 .insert(*old_to_new_rows.get(&old_row).unwrap(), suggested_indent);
823 }
824 }
825 yield_now().await;
826 }
827
828 // In block mode, only compute indentation suggestions for the first line
829 // of each insertion. Otherwise, compute suggestions for every inserted line.
830 let new_edited_row_ranges = contiguous_ranges(
831 row_ranges.iter().flat_map(|(range, _)| {
832 if request.is_block_mode {
833 range.start..range.start + 1
834 } else {
835 range.clone()
836 }
837 }),
838 max_rows_between_yields,
839 );
840
841 // Compute new suggestions for each line, but only include them in the result
842 // if they differ from the old suggestion for that line.
843 for new_edited_row_range in new_edited_row_ranges {
844 let suggestions = snapshot
845 .suggest_autoindents(new_edited_row_range.clone())
846 .into_iter()
847 .flatten();
848 for (new_row, suggestion) in new_edited_row_range.zip(suggestions) {
849 if let Some(suggestion) = suggestion {
850 let suggested_indent = indent_sizes
851 .get(&suggestion.basis_row)
852 .copied()
853 .unwrap_or_else(|| {
854 snapshot.indent_size_for_line(suggestion.basis_row)
855 })
856 .with_delta(suggestion.delta, request.indent_size);
857 if old_suggestions
858 .get(&new_row)
859 .map_or(true, |old_indentation| {
860 suggested_indent != *old_indentation
861 })
862 {
863 indent_sizes.insert(new_row, suggested_indent);
864 }
865 }
866 }
867 yield_now().await;
868 }
869
870 // For each block of inserted text, adjust the indentation of the remaining
871 // lines of the block by the same amount as the first line was adjusted.
872 if request.is_block_mode {
873 for (row_range, original_indent_column) in
874 row_ranges
875 .into_iter()
876 .filter_map(|(range, original_indent_column)| {
877 if range.len() > 1 {
878 Some((range, original_indent_column?))
879 } else {
880 None
881 }
882 })
883 {
884 let new_indent = indent_sizes
885 .get(&row_range.start)
886 .copied()
887 .unwrap_or_else(|| snapshot.indent_size_for_line(row_range.start));
888 let delta = new_indent.len as i64 - original_indent_column as i64;
889 if delta != 0 {
890 for row in row_range.skip(1) {
891 indent_sizes.entry(row).or_insert_with(|| {
892 let mut size = snapshot.indent_size_for_line(row);
893 if size.kind == new_indent.kind {
894 match delta.cmp(&0) {
895 Ordering::Greater => size.len += delta as u32,
896 Ordering::Less => {
897 size.len = size.len.saturating_sub(-delta as u32)
898 }
899 Ordering::Equal => {}
900 }
901 }
902 size
903 });
904 }
905 }
906 }
907 }
908 }
909
910 indent_sizes
911 })
912 }
913
914 fn apply_autoindents(
915 &mut self,
916 indent_sizes: BTreeMap<u32, IndentSize>,
917 cx: &mut ModelContext<Self>,
918 ) {
919 self.autoindent_requests.clear();
920
921 let edits: Vec<_> = indent_sizes
922 .into_iter()
923 .filter_map(|(row, indent_size)| {
924 let current_size = indent_size_for_line(self, row);
925 Self::edit_for_indent_size_adjustment(row, current_size, indent_size)
926 })
927 .collect();
928
929 self.edit(edits, None, cx);
930 }
931
932 pub fn edit_for_indent_size_adjustment(
933 row: u32,
934 current_size: IndentSize,
935 new_size: IndentSize,
936 ) -> Option<(Range<Point>, String)> {
937 if new_size.kind != current_size.kind && current_size.len > 0 {
938 return None;
939 }
940
941 match new_size.len.cmp(¤t_size.len) {
942 Ordering::Greater => {
943 let point = Point::new(row, 0);
944 Some((
945 point..point,
946 iter::repeat(new_size.char())
947 .take((new_size.len - current_size.len) as usize)
948 .collect::<String>(),
949 ))
950 }
951
952 Ordering::Less => Some((
953 Point::new(row, 0)..Point::new(row, current_size.len - new_size.len),
954 String::new(),
955 )),
956
957 Ordering::Equal => None,
958 }
959 }
960
961 pub fn diff(&self, mut new_text: String, cx: &AppContext) -> Task<Diff> {
962 let old_text = self.as_rope().clone();
963 let base_version = self.version();
964 cx.background().spawn(async move {
965 let old_text = old_text.to_string();
966 let line_ending = LineEnding::detect(&new_text);
967 LineEnding::normalize(&mut new_text);
968 let changes = TextDiff::from_chars(old_text.as_str(), new_text.as_str())
969 .iter_all_changes()
970 .map(|c| (c.tag(), c.value().len()))
971 .collect::<Vec<_>>();
972 Diff {
973 base_version,
974 new_text: new_text.into(),
975 changes,
976 line_ending,
977 start_offset: 0,
978 }
979 })
980 }
981
982 pub fn apply_diff(&mut self, diff: Diff, cx: &mut ModelContext<Self>) -> Option<&Transaction> {
983 if self.version == diff.base_version {
984 self.finalize_last_transaction();
985 self.start_transaction();
986 self.text.set_line_ending(diff.line_ending);
987 let mut offset = diff.start_offset;
988 for (tag, len) in diff.changes {
989 let range = offset..(offset + len);
990 match tag {
991 ChangeTag::Equal => offset += len,
992 ChangeTag::Delete => {
993 self.edit([(range, "")], None, cx);
994 }
995 ChangeTag::Insert => {
996 self.edit(
997 [(
998 offset..offset,
999 &diff.new_text[range.start - diff.start_offset
1000 ..range.end - diff.start_offset],
1001 )],
1002 None,
1003 cx,
1004 );
1005 offset += len;
1006 }
1007 }
1008 }
1009 if self.end_transaction(cx).is_some() {
1010 self.finalize_last_transaction()
1011 } else {
1012 None
1013 }
1014 } else {
1015 None
1016 }
1017 }
1018
1019 pub fn is_dirty(&self) -> bool {
1020 self.saved_version_fingerprint != self.as_rope().fingerprint()
1021 || self.file.as_ref().map_or(false, |file| file.is_deleted())
1022 }
1023
1024 pub fn has_conflict(&self) -> bool {
1025 self.saved_version_fingerprint != self.as_rope().fingerprint()
1026 && self
1027 .file
1028 .as_ref()
1029 .map_or(false, |file| file.mtime() > self.saved_mtime)
1030 }
1031
1032 pub fn subscribe(&mut self) -> Subscription {
1033 self.text.subscribe()
1034 }
1035
1036 pub fn start_transaction(&mut self) -> Option<TransactionId> {
1037 self.start_transaction_at(Instant::now())
1038 }
1039
1040 pub fn start_transaction_at(&mut self, now: Instant) -> Option<TransactionId> {
1041 self.transaction_depth += 1;
1042 if self.was_dirty_before_starting_transaction.is_none() {
1043 self.was_dirty_before_starting_transaction = Some(self.is_dirty());
1044 }
1045 self.text.start_transaction_at(now)
1046 }
1047
1048 pub fn end_transaction(&mut self, cx: &mut ModelContext<Self>) -> Option<TransactionId> {
1049 self.end_transaction_at(Instant::now(), cx)
1050 }
1051
1052 pub fn end_transaction_at(
1053 &mut self,
1054 now: Instant,
1055 cx: &mut ModelContext<Self>,
1056 ) -> Option<TransactionId> {
1057 assert!(self.transaction_depth > 0);
1058 self.transaction_depth -= 1;
1059 let was_dirty = if self.transaction_depth == 0 {
1060 self.was_dirty_before_starting_transaction.take().unwrap()
1061 } else {
1062 false
1063 };
1064 if let Some((transaction_id, start_version)) = self.text.end_transaction_at(now) {
1065 self.did_edit(&start_version, was_dirty, cx);
1066 Some(transaction_id)
1067 } else {
1068 None
1069 }
1070 }
1071
1072 pub fn push_transaction(&mut self, transaction: Transaction, now: Instant) {
1073 self.text.push_transaction(transaction, now);
1074 }
1075
1076 pub fn finalize_last_transaction(&mut self) -> Option<&Transaction> {
1077 self.text.finalize_last_transaction()
1078 }
1079
1080 pub fn group_until_transaction(&mut self, transaction_id: TransactionId) {
1081 self.text.group_until_transaction(transaction_id);
1082 }
1083
1084 pub fn forget_transaction(&mut self, transaction_id: TransactionId) {
1085 self.text.forget_transaction(transaction_id);
1086 }
1087
1088 pub fn wait_for_edits(
1089 &mut self,
1090 edit_ids: impl IntoIterator<Item = clock::Local>,
1091 ) -> impl Future<Output = ()> {
1092 self.text.wait_for_edits(edit_ids)
1093 }
1094
1095 pub fn wait_for_anchors<'a>(
1096 &mut self,
1097 anchors: impl IntoIterator<Item = &'a Anchor>,
1098 ) -> impl Future<Output = ()> {
1099 self.text.wait_for_anchors(anchors)
1100 }
1101
1102 pub fn wait_for_version(&mut self, version: clock::Global) -> impl Future<Output = ()> {
1103 self.text.wait_for_version(version)
1104 }
1105
1106 pub fn set_active_selections(
1107 &mut self,
1108 selections: Arc<[Selection<Anchor>]>,
1109 line_mode: bool,
1110 cx: &mut ModelContext<Self>,
1111 ) {
1112 let lamport_timestamp = self.text.lamport_clock.tick();
1113 self.remote_selections.insert(
1114 self.text.replica_id(),
1115 SelectionSet {
1116 selections: selections.clone(),
1117 lamport_timestamp,
1118 line_mode,
1119 },
1120 );
1121 self.send_operation(
1122 Operation::UpdateSelections {
1123 selections,
1124 line_mode,
1125 lamport_timestamp,
1126 },
1127 cx,
1128 );
1129 }
1130
1131 pub fn remove_active_selections(&mut self, cx: &mut ModelContext<Self>) {
1132 self.set_active_selections(Arc::from([]), false, cx);
1133 }
1134
1135 pub fn set_text<T>(&mut self, text: T, cx: &mut ModelContext<Self>) -> Option<clock::Local>
1136 where
1137 T: Into<Arc<str>>,
1138 {
1139 self.edit([(0..self.len(), text)], None, cx)
1140 }
1141
1142 pub fn edit<I, S, T>(
1143 &mut self,
1144 edits_iter: I,
1145 autoindent_mode: Option<AutoindentMode>,
1146 cx: &mut ModelContext<Self>,
1147 ) -> Option<clock::Local>
1148 where
1149 I: IntoIterator<Item = (Range<S>, T)>,
1150 S: ToOffset,
1151 T: Into<Arc<str>>,
1152 {
1153 // Skip invalid edits and coalesce contiguous ones.
1154 let mut edits: Vec<(Range<usize>, Arc<str>)> = Vec::new();
1155 for (range, new_text) in edits_iter {
1156 let mut range = range.start.to_offset(self)..range.end.to_offset(self);
1157 if range.start > range.end {
1158 mem::swap(&mut range.start, &mut range.end);
1159 }
1160 let new_text = new_text.into();
1161 if !new_text.is_empty() || !range.is_empty() {
1162 if let Some((prev_range, prev_text)) = edits.last_mut() {
1163 if prev_range.end >= range.start {
1164 prev_range.end = cmp::max(prev_range.end, range.end);
1165 *prev_text = format!("{prev_text}{new_text}").into();
1166 } else {
1167 edits.push((range, new_text));
1168 }
1169 } else {
1170 edits.push((range, new_text));
1171 }
1172 }
1173 }
1174 if edits.is_empty() {
1175 return None;
1176 }
1177
1178 self.start_transaction();
1179 self.pending_autoindent.take();
1180 let autoindent_request = autoindent_mode
1181 .and_then(|mode| self.language.as_ref().map(|_| (self.snapshot(), mode)));
1182
1183 let edit_operation = self.text.edit(edits.iter().cloned());
1184 let edit_id = edit_operation.local_timestamp();
1185
1186 if let Some((before_edit, mode)) = autoindent_request {
1187 let indent_size = before_edit.single_indent_size(cx);
1188 let (start_columns, is_block_mode) = match mode {
1189 AutoindentMode::Block {
1190 original_indent_columns: start_columns,
1191 } => (start_columns, true),
1192 AutoindentMode::EachLine => (Default::default(), false),
1193 };
1194
1195 let mut delta = 0isize;
1196 let entries = edits
1197 .into_iter()
1198 .enumerate()
1199 .zip(&edit_operation.as_edit().unwrap().new_text)
1200 .map(|((ix, (range, _)), new_text)| {
1201 let new_text_len = new_text.len();
1202 let old_start = range.start.to_point(&before_edit);
1203 let new_start = (delta + range.start as isize) as usize;
1204 delta += new_text_len as isize - (range.end as isize - range.start as isize);
1205
1206 let mut range_of_insertion_to_indent = 0..new_text_len;
1207 let mut first_line_is_new = false;
1208 let mut start_column = None;
1209
1210 // When inserting an entire line at the beginning of an existing line,
1211 // treat the insertion as new.
1212 if new_text.contains('\n')
1213 && old_start.column <= before_edit.indent_size_for_line(old_start.row).len
1214 {
1215 first_line_is_new = true;
1216 }
1217
1218 // When inserting text starting with a newline, avoid auto-indenting the
1219 // previous line.
1220 if new_text[range_of_insertion_to_indent.clone()].starts_with('\n') {
1221 range_of_insertion_to_indent.start += 1;
1222 first_line_is_new = true;
1223 }
1224
1225 // Avoid auto-indenting after the insertion.
1226 if is_block_mode {
1227 start_column = start_columns.get(ix).copied();
1228 if new_text[range_of_insertion_to_indent.clone()].ends_with('\n') {
1229 range_of_insertion_to_indent.end -= 1;
1230 }
1231 }
1232
1233 AutoindentRequestEntry {
1234 first_line_is_new,
1235 original_indent_column: start_column,
1236 range: self.anchor_before(new_start + range_of_insertion_to_indent.start)
1237 ..self.anchor_after(new_start + range_of_insertion_to_indent.end),
1238 }
1239 })
1240 .collect();
1241
1242 self.autoindent_requests.push(Arc::new(AutoindentRequest {
1243 before_edit,
1244 entries,
1245 indent_size,
1246 is_block_mode,
1247 }));
1248 }
1249
1250 self.end_transaction(cx);
1251 self.send_operation(Operation::Buffer(edit_operation), cx);
1252 Some(edit_id)
1253 }
1254
1255 fn did_edit(
1256 &mut self,
1257 old_version: &clock::Global,
1258 was_dirty: bool,
1259 cx: &mut ModelContext<Self>,
1260 ) {
1261 if self.edits_since::<usize>(old_version).next().is_none() {
1262 return;
1263 }
1264
1265 self.reparse(cx);
1266
1267 cx.emit(Event::Edited);
1268 if was_dirty != self.is_dirty() {
1269 cx.emit(Event::DirtyChanged);
1270 }
1271 cx.notify();
1272 }
1273
1274 pub fn apply_ops<I: IntoIterator<Item = Operation>>(
1275 &mut self,
1276 ops: I,
1277 cx: &mut ModelContext<Self>,
1278 ) -> Result<()> {
1279 self.pending_autoindent.take();
1280 let was_dirty = self.is_dirty();
1281 let old_version = self.version.clone();
1282 let mut deferred_ops = Vec::new();
1283 let buffer_ops = ops
1284 .into_iter()
1285 .filter_map(|op| match op {
1286 Operation::Buffer(op) => Some(op),
1287 _ => {
1288 if self.can_apply_op(&op) {
1289 self.apply_op(op, cx);
1290 } else {
1291 deferred_ops.push(op);
1292 }
1293 None
1294 }
1295 })
1296 .collect::<Vec<_>>();
1297 self.text.apply_ops(buffer_ops)?;
1298 self.deferred_ops.insert(deferred_ops);
1299 self.flush_deferred_ops(cx);
1300 self.did_edit(&old_version, was_dirty, cx);
1301 // Notify independently of whether the buffer was edited as the operations could include a
1302 // selection update.
1303 cx.notify();
1304 Ok(())
1305 }
1306
1307 fn flush_deferred_ops(&mut self, cx: &mut ModelContext<Self>) {
1308 let mut deferred_ops = Vec::new();
1309 for op in self.deferred_ops.drain().iter().cloned() {
1310 if self.can_apply_op(&op) {
1311 self.apply_op(op, cx);
1312 } else {
1313 deferred_ops.push(op);
1314 }
1315 }
1316 self.deferred_ops.insert(deferred_ops);
1317 }
1318
1319 fn can_apply_op(&self, operation: &Operation) -> bool {
1320 match operation {
1321 Operation::Buffer(_) => {
1322 unreachable!("buffer operations should never be applied at this layer")
1323 }
1324 Operation::UpdateDiagnostics {
1325 diagnostics: diagnostic_set,
1326 ..
1327 } => diagnostic_set.iter().all(|diagnostic| {
1328 self.text.can_resolve(&diagnostic.range.start)
1329 && self.text.can_resolve(&diagnostic.range.end)
1330 }),
1331 Operation::UpdateSelections { selections, .. } => selections
1332 .iter()
1333 .all(|s| self.can_resolve(&s.start) && self.can_resolve(&s.end)),
1334 Operation::UpdateCompletionTriggers { .. } => true,
1335 }
1336 }
1337
1338 fn apply_op(&mut self, operation: Operation, cx: &mut ModelContext<Self>) {
1339 match operation {
1340 Operation::Buffer(_) => {
1341 unreachable!("buffer operations should never be applied at this layer")
1342 }
1343 Operation::UpdateDiagnostics {
1344 diagnostics: diagnostic_set,
1345 lamport_timestamp,
1346 } => {
1347 let snapshot = self.snapshot();
1348 self.apply_diagnostic_update(
1349 DiagnosticSet::from_sorted_entries(diagnostic_set.iter().cloned(), &snapshot),
1350 lamport_timestamp,
1351 cx,
1352 );
1353 }
1354 Operation::UpdateSelections {
1355 selections,
1356 lamport_timestamp,
1357 line_mode,
1358 } => {
1359 if let Some(set) = self.remote_selections.get(&lamport_timestamp.replica_id) {
1360 if set.lamport_timestamp > lamport_timestamp {
1361 return;
1362 }
1363 }
1364
1365 self.remote_selections.insert(
1366 lamport_timestamp.replica_id,
1367 SelectionSet {
1368 selections,
1369 lamport_timestamp,
1370 line_mode,
1371 },
1372 );
1373 self.text.lamport_clock.observe(lamport_timestamp);
1374 self.selections_update_count += 1;
1375 }
1376 Operation::UpdateCompletionTriggers {
1377 triggers,
1378 lamport_timestamp,
1379 } => {
1380 self.completion_triggers = triggers;
1381 self.text.lamport_clock.observe(lamport_timestamp);
1382 }
1383 }
1384 }
1385
1386 fn apply_diagnostic_update(
1387 &mut self,
1388 diagnostics: DiagnosticSet,
1389 lamport_timestamp: clock::Lamport,
1390 cx: &mut ModelContext<Self>,
1391 ) {
1392 if lamport_timestamp > self.diagnostics_timestamp {
1393 self.diagnostics = diagnostics;
1394 self.diagnostics_timestamp = lamport_timestamp;
1395 self.diagnostics_update_count += 1;
1396 self.text.lamport_clock.observe(lamport_timestamp);
1397 cx.notify();
1398 cx.emit(Event::DiagnosticsUpdated);
1399 }
1400 }
1401
1402 fn send_operation(&mut self, operation: Operation, cx: &mut ModelContext<Self>) {
1403 cx.emit(Event::Operation(operation));
1404 }
1405
1406 pub fn remove_peer(&mut self, replica_id: ReplicaId, cx: &mut ModelContext<Self>) {
1407 self.remote_selections.remove(&replica_id);
1408 cx.notify();
1409 }
1410
1411 pub fn undo(&mut self, cx: &mut ModelContext<Self>) -> Option<TransactionId> {
1412 let was_dirty = self.is_dirty();
1413 let old_version = self.version.clone();
1414
1415 if let Some((transaction_id, operation)) = self.text.undo() {
1416 self.send_operation(Operation::Buffer(operation), cx);
1417 self.did_edit(&old_version, was_dirty, cx);
1418 Some(transaction_id)
1419 } else {
1420 None
1421 }
1422 }
1423
1424 pub fn undo_to_transaction(
1425 &mut self,
1426 transaction_id: TransactionId,
1427 cx: &mut ModelContext<Self>,
1428 ) -> bool {
1429 let was_dirty = self.is_dirty();
1430 let old_version = self.version.clone();
1431
1432 let operations = self.text.undo_to_transaction(transaction_id);
1433 let undone = !operations.is_empty();
1434 for operation in operations {
1435 self.send_operation(Operation::Buffer(operation), cx);
1436 }
1437 if undone {
1438 self.did_edit(&old_version, was_dirty, cx)
1439 }
1440 undone
1441 }
1442
1443 pub fn redo(&mut self, cx: &mut ModelContext<Self>) -> Option<TransactionId> {
1444 let was_dirty = self.is_dirty();
1445 let old_version = self.version.clone();
1446
1447 if let Some((transaction_id, operation)) = self.text.redo() {
1448 self.send_operation(Operation::Buffer(operation), cx);
1449 self.did_edit(&old_version, was_dirty, cx);
1450 Some(transaction_id)
1451 } else {
1452 None
1453 }
1454 }
1455
1456 pub fn redo_to_transaction(
1457 &mut self,
1458 transaction_id: TransactionId,
1459 cx: &mut ModelContext<Self>,
1460 ) -> bool {
1461 let was_dirty = self.is_dirty();
1462 let old_version = self.version.clone();
1463
1464 let operations = self.text.redo_to_transaction(transaction_id);
1465 let redone = !operations.is_empty();
1466 for operation in operations {
1467 self.send_operation(Operation::Buffer(operation), cx);
1468 }
1469 if redone {
1470 self.did_edit(&old_version, was_dirty, cx)
1471 }
1472 redone
1473 }
1474
1475 pub fn set_completion_triggers(&mut self, triggers: Vec<String>, cx: &mut ModelContext<Self>) {
1476 self.completion_triggers = triggers.clone();
1477 self.completion_triggers_timestamp = self.text.lamport_clock.tick();
1478 self.send_operation(
1479 Operation::UpdateCompletionTriggers {
1480 triggers,
1481 lamport_timestamp: self.completion_triggers_timestamp,
1482 },
1483 cx,
1484 );
1485 cx.notify();
1486 }
1487
1488 pub fn completion_triggers(&self) -> &[String] {
1489 &self.completion_triggers
1490 }
1491}
1492
1493#[cfg(any(test, feature = "test-support"))]
1494impl Buffer {
1495 pub fn set_group_interval(&mut self, group_interval: Duration) {
1496 self.text.set_group_interval(group_interval);
1497 }
1498
1499 pub fn randomly_edit<T>(
1500 &mut self,
1501 rng: &mut T,
1502 old_range_count: usize,
1503 cx: &mut ModelContext<Self>,
1504 ) where
1505 T: rand::Rng,
1506 {
1507 let mut edits: Vec<(Range<usize>, String)> = Vec::new();
1508 let mut last_end = None;
1509 for _ in 0..old_range_count {
1510 if last_end.map_or(false, |last_end| last_end >= self.len()) {
1511 break;
1512 }
1513
1514 let new_start = last_end.map_or(0, |last_end| last_end + 1);
1515 let mut range = self.random_byte_range(new_start, rng);
1516 if rng.gen_bool(0.2) {
1517 mem::swap(&mut range.start, &mut range.end);
1518 }
1519 last_end = Some(range.end);
1520
1521 let new_text_len = rng.gen_range(0..10);
1522 let new_text: String = crate::random_char_iter::RandomCharIter::new(&mut *rng)
1523 .take(new_text_len)
1524 .collect();
1525
1526 edits.push((range, new_text));
1527 }
1528 log::info!("mutating buffer {} with {:?}", self.replica_id(), edits);
1529 self.edit(edits, None, cx);
1530 }
1531
1532 pub fn randomly_undo_redo(&mut self, rng: &mut impl rand::Rng, cx: &mut ModelContext<Self>) {
1533 let was_dirty = self.is_dirty();
1534 let old_version = self.version.clone();
1535
1536 let ops = self.text.randomly_undo_redo(rng);
1537 if !ops.is_empty() {
1538 for op in ops {
1539 self.send_operation(Operation::Buffer(op), cx);
1540 self.did_edit(&old_version, was_dirty, cx);
1541 }
1542 }
1543 }
1544}
1545
1546impl Entity for Buffer {
1547 type Event = Event;
1548}
1549
1550impl Deref for Buffer {
1551 type Target = TextBuffer;
1552
1553 fn deref(&self) -> &Self::Target {
1554 &self.text
1555 }
1556}
1557
1558impl BufferSnapshot {
1559 pub fn indent_size_for_line(&self, row: u32) -> IndentSize {
1560 indent_size_for_line(self, row)
1561 }
1562
1563 pub fn single_indent_size(&self, cx: &AppContext) -> IndentSize {
1564 let language_name = self.language().map(|language| language.name());
1565 let settings = cx.global::<Settings>();
1566 if settings.hard_tabs(language_name.as_deref()) {
1567 IndentSize::tab()
1568 } else {
1569 IndentSize::spaces(settings.tab_size(language_name.as_deref()).get())
1570 }
1571 }
1572
1573 pub fn suggested_indents(
1574 &self,
1575 rows: impl Iterator<Item = u32>,
1576 single_indent_size: IndentSize,
1577 ) -> BTreeMap<u32, IndentSize> {
1578 let mut result = BTreeMap::new();
1579
1580 for row_range in contiguous_ranges(rows, 10) {
1581 let suggestions = match self.suggest_autoindents(row_range.clone()) {
1582 Some(suggestions) => suggestions,
1583 _ => break,
1584 };
1585
1586 for (row, suggestion) in row_range.zip(suggestions) {
1587 let indent_size = if let Some(suggestion) = suggestion {
1588 result
1589 .get(&suggestion.basis_row)
1590 .copied()
1591 .unwrap_or_else(|| self.indent_size_for_line(suggestion.basis_row))
1592 .with_delta(suggestion.delta, single_indent_size)
1593 } else {
1594 self.indent_size_for_line(row)
1595 };
1596
1597 result.insert(row, indent_size);
1598 }
1599 }
1600
1601 result
1602 }
1603
1604 fn suggest_autoindents(
1605 &self,
1606 row_range: Range<u32>,
1607 ) -> Option<impl Iterator<Item = Option<IndentSuggestion>> + '_> {
1608 let config = &self.language.as_ref()?.config;
1609 let prev_non_blank_row = self.prev_non_blank_row(row_range.start);
1610
1611 // Find the suggested indentation ranges based on the syntax tree.
1612 let start = Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0);
1613 let end = Point::new(row_range.end, 0);
1614 let range = (start..end).to_offset(&self.text);
1615 let mut matches = self.syntax.matches(range, &self.text, |grammar| {
1616 Some(&grammar.indents_config.as_ref()?.query)
1617 });
1618 let indent_configs = matches
1619 .grammars()
1620 .iter()
1621 .map(|grammar| grammar.indents_config.as_ref().unwrap())
1622 .collect::<Vec<_>>();
1623
1624 let mut indent_ranges = Vec::<Range<Point>>::new();
1625 while let Some(mat) = matches.peek() {
1626 let mut start: Option<Point> = None;
1627 let mut end: Option<Point> = None;
1628
1629 let config = &indent_configs[mat.grammar_index];
1630 for capture in mat.captures {
1631 if capture.index == config.indent_capture_ix {
1632 start.get_or_insert(Point::from_ts_point(capture.node.start_position()));
1633 end.get_or_insert(Point::from_ts_point(capture.node.end_position()));
1634 } else if Some(capture.index) == config.end_capture_ix {
1635 end = Some(Point::from_ts_point(capture.node.start_position()));
1636 }
1637 }
1638
1639 matches.advance();
1640 if let Some((start, end)) = start.zip(end) {
1641 if start.row == end.row {
1642 continue;
1643 }
1644
1645 let range = start..end;
1646 match indent_ranges.binary_search_by_key(&range.start, |r| r.start) {
1647 Err(ix) => indent_ranges.insert(ix, range),
1648 Ok(ix) => {
1649 let prev_range = &mut indent_ranges[ix];
1650 prev_range.end = prev_range.end.max(range.end);
1651 }
1652 }
1653 }
1654 }
1655
1656 // Find the suggested indentation increases and decreased based on regexes.
1657 let mut indent_change_rows = Vec::<(u32, Ordering)>::new();
1658 self.for_each_line(
1659 Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0)
1660 ..Point::new(row_range.end, 0),
1661 |row, line| {
1662 if config
1663 .decrease_indent_pattern
1664 .as_ref()
1665 .map_or(false, |regex| regex.is_match(line))
1666 {
1667 indent_change_rows.push((row, Ordering::Less));
1668 }
1669 if config
1670 .increase_indent_pattern
1671 .as_ref()
1672 .map_or(false, |regex| regex.is_match(line))
1673 {
1674 indent_change_rows.push((row + 1, Ordering::Greater));
1675 }
1676 },
1677 );
1678
1679 let mut indent_changes = indent_change_rows.into_iter().peekable();
1680 let mut prev_row = if config.auto_indent_using_last_non_empty_line {
1681 prev_non_blank_row.unwrap_or(0)
1682 } else {
1683 row_range.start.saturating_sub(1)
1684 };
1685 let mut prev_row_start = Point::new(prev_row, self.indent_size_for_line(prev_row).len);
1686 Some(row_range.map(move |row| {
1687 let row_start = Point::new(row, self.indent_size_for_line(row).len);
1688
1689 let mut indent_from_prev_row = false;
1690 let mut outdent_from_prev_row = false;
1691 let mut outdent_to_row = u32::MAX;
1692
1693 while let Some((indent_row, delta)) = indent_changes.peek() {
1694 match indent_row.cmp(&row) {
1695 Ordering::Equal => match delta {
1696 Ordering::Less => outdent_from_prev_row = true,
1697 Ordering::Greater => indent_from_prev_row = true,
1698 _ => {}
1699 },
1700
1701 Ordering::Greater => break,
1702 Ordering::Less => {}
1703 }
1704
1705 indent_changes.next();
1706 }
1707
1708 for range in &indent_ranges {
1709 if range.start.row >= row {
1710 break;
1711 }
1712 if range.start.row == prev_row && range.end > row_start {
1713 indent_from_prev_row = true;
1714 }
1715 if range.end > prev_row_start && range.end <= row_start {
1716 outdent_to_row = outdent_to_row.min(range.start.row);
1717 }
1718 }
1719
1720 let suggestion = if outdent_to_row == prev_row
1721 || (outdent_from_prev_row && indent_from_prev_row)
1722 {
1723 Some(IndentSuggestion {
1724 basis_row: prev_row,
1725 delta: Ordering::Equal,
1726 })
1727 } else if indent_from_prev_row {
1728 Some(IndentSuggestion {
1729 basis_row: prev_row,
1730 delta: Ordering::Greater,
1731 })
1732 } else if outdent_to_row < prev_row {
1733 Some(IndentSuggestion {
1734 basis_row: outdent_to_row,
1735 delta: Ordering::Equal,
1736 })
1737 } else if outdent_from_prev_row {
1738 Some(IndentSuggestion {
1739 basis_row: prev_row,
1740 delta: Ordering::Less,
1741 })
1742 } else if config.auto_indent_using_last_non_empty_line || !self.is_line_blank(prev_row)
1743 {
1744 Some(IndentSuggestion {
1745 basis_row: prev_row,
1746 delta: Ordering::Equal,
1747 })
1748 } else {
1749 None
1750 };
1751
1752 prev_row = row;
1753 prev_row_start = row_start;
1754 suggestion
1755 }))
1756 }
1757
1758 fn prev_non_blank_row(&self, mut row: u32) -> Option<u32> {
1759 while row > 0 {
1760 row -= 1;
1761 if !self.is_line_blank(row) {
1762 return Some(row);
1763 }
1764 }
1765 None
1766 }
1767
1768 pub fn chunks<T: ToOffset>(&self, range: Range<T>, language_aware: bool) -> BufferChunks {
1769 let range = range.start.to_offset(self)..range.end.to_offset(self);
1770
1771 let mut syntax = None;
1772 let mut diagnostic_endpoints = Vec::new();
1773 if language_aware {
1774 let captures = self.syntax.captures(range.clone(), &self.text, |grammar| {
1775 grammar.highlights_query.as_ref()
1776 });
1777 let highlight_maps = captures
1778 .grammars()
1779 .into_iter()
1780 .map(|grammar| grammar.highlight_map())
1781 .collect();
1782 syntax = Some((captures, highlight_maps));
1783 for entry in self.diagnostics_in_range::<_, usize>(range.clone(), false) {
1784 diagnostic_endpoints.push(DiagnosticEndpoint {
1785 offset: entry.range.start,
1786 is_start: true,
1787 severity: entry.diagnostic.severity,
1788 is_unnecessary: entry.diagnostic.is_unnecessary,
1789 });
1790 diagnostic_endpoints.push(DiagnosticEndpoint {
1791 offset: entry.range.end,
1792 is_start: false,
1793 severity: entry.diagnostic.severity,
1794 is_unnecessary: entry.diagnostic.is_unnecessary,
1795 });
1796 }
1797 diagnostic_endpoints
1798 .sort_unstable_by_key(|endpoint| (endpoint.offset, !endpoint.is_start));
1799 }
1800
1801 BufferChunks::new(self.text.as_rope(), range, syntax, diagnostic_endpoints)
1802 }
1803
1804 pub fn for_each_line(&self, range: Range<Point>, mut callback: impl FnMut(u32, &str)) {
1805 let mut line = String::new();
1806 let mut row = range.start.row;
1807 for chunk in self
1808 .as_rope()
1809 .chunks_in_range(range.to_offset(self))
1810 .chain(["\n"])
1811 {
1812 for (newline_ix, text) in chunk.split('\n').enumerate() {
1813 if newline_ix > 0 {
1814 callback(row, &line);
1815 row += 1;
1816 line.clear();
1817 }
1818 line.push_str(text);
1819 }
1820 }
1821 }
1822
1823 pub fn language(&self) -> Option<&Arc<Language>> {
1824 self.language.as_ref()
1825 }
1826
1827 pub fn surrounding_word<T: ToOffset>(&self, start: T) -> (Range<usize>, Option<CharKind>) {
1828 let mut start = start.to_offset(self);
1829 let mut end = start;
1830 let mut next_chars = self.chars_at(start).peekable();
1831 let mut prev_chars = self.reversed_chars_at(start).peekable();
1832 let word_kind = cmp::max(
1833 prev_chars.peek().copied().map(char_kind),
1834 next_chars.peek().copied().map(char_kind),
1835 );
1836
1837 for ch in prev_chars {
1838 if Some(char_kind(ch)) == word_kind && ch != '\n' {
1839 start -= ch.len_utf8();
1840 } else {
1841 break;
1842 }
1843 }
1844
1845 for ch in next_chars {
1846 if Some(char_kind(ch)) == word_kind && ch != '\n' {
1847 end += ch.len_utf8();
1848 } else {
1849 break;
1850 }
1851 }
1852
1853 (start..end, word_kind)
1854 }
1855
1856 pub fn range_for_syntax_ancestor<T: ToOffset>(&self, range: Range<T>) -> Option<Range<usize>> {
1857 let range = range.start.to_offset(self)..range.end.to_offset(self);
1858 let mut result: Option<Range<usize>> = None;
1859 'outer: for (_, _, node) in self.syntax.layers_for_range(range.clone(), &self.text) {
1860 let mut cursor = node.walk();
1861
1862 // Descend to the first leaf that touches the start of the range,
1863 // and if the range is non-empty, extends beyond the start.
1864 while cursor.goto_first_child_for_byte(range.start).is_some() {
1865 if !range.is_empty() && cursor.node().end_byte() == range.start {
1866 cursor.goto_next_sibling();
1867 }
1868 }
1869
1870 // Ascend to the smallest ancestor that strictly contains the range.
1871 loop {
1872 let node_range = cursor.node().byte_range();
1873 if node_range.start <= range.start
1874 && node_range.end >= range.end
1875 && node_range.len() > range.len()
1876 {
1877 break;
1878 }
1879 if !cursor.goto_parent() {
1880 continue 'outer;
1881 }
1882 }
1883
1884 let left_node = cursor.node();
1885 let mut layer_result = left_node.byte_range();
1886
1887 // For an empty range, try to find another node immediately to the right of the range.
1888 if left_node.end_byte() == range.start {
1889 let mut right_node = None;
1890 while !cursor.goto_next_sibling() {
1891 if !cursor.goto_parent() {
1892 break;
1893 }
1894 }
1895
1896 while cursor.node().start_byte() == range.start {
1897 right_node = Some(cursor.node());
1898 if !cursor.goto_first_child() {
1899 break;
1900 }
1901 }
1902
1903 // If there is a candidate node on both sides of the (empty) range, then
1904 // decide between the two by favoring a named node over an anonymous token.
1905 // If both nodes are the same in that regard, favor the right one.
1906 if let Some(right_node) = right_node {
1907 if right_node.is_named() || !left_node.is_named() {
1908 layer_result = right_node.byte_range();
1909 }
1910 }
1911 }
1912
1913 if let Some(previous_result) = &result {
1914 if previous_result.len() < layer_result.len() {
1915 continue;
1916 }
1917 }
1918 result = Some(layer_result);
1919 }
1920
1921 result
1922 }
1923
1924 pub fn outline(&self, theme: Option<&SyntaxTheme>) -> Option<Outline<Anchor>> {
1925 self.outline_items_containing(0..self.len(), theme)
1926 .map(Outline::new)
1927 }
1928
1929 pub fn symbols_containing<T: ToOffset>(
1930 &self,
1931 position: T,
1932 theme: Option<&SyntaxTheme>,
1933 ) -> Option<Vec<OutlineItem<Anchor>>> {
1934 let position = position.to_offset(self);
1935 let mut items = self.outline_items_containing(
1936 position.saturating_sub(1)..self.len().min(position + 1),
1937 theme,
1938 )?;
1939 let mut prev_depth = None;
1940 items.retain(|item| {
1941 let result = prev_depth.map_or(true, |prev_depth| item.depth > prev_depth);
1942 prev_depth = Some(item.depth);
1943 result
1944 });
1945 Some(items)
1946 }
1947
1948 fn outline_items_containing(
1949 &self,
1950 range: Range<usize>,
1951 theme: Option<&SyntaxTheme>,
1952 ) -> Option<Vec<OutlineItem<Anchor>>> {
1953 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
1954 grammar.outline_config.as_ref().map(|c| &c.query)
1955 });
1956 let configs = matches
1957 .grammars()
1958 .iter()
1959 .map(|g| g.outline_config.as_ref().unwrap())
1960 .collect::<Vec<_>>();
1961
1962 let mut chunks = self.chunks(0..self.len(), true);
1963 let mut stack = Vec::<Range<usize>>::new();
1964 let mut items = Vec::new();
1965 while let Some(mat) = matches.peek() {
1966 let config = &configs[mat.grammar_index];
1967 let item_node = mat.captures.iter().find_map(|cap| {
1968 if cap.index == config.item_capture_ix {
1969 Some(cap.node)
1970 } else {
1971 None
1972 }
1973 })?;
1974
1975 let item_range = item_node.byte_range();
1976 if item_range.end < range.start || item_range.start > range.end {
1977 matches.advance();
1978 continue;
1979 }
1980
1981 // TODO - move later, after processing captures
1982
1983 let mut text = String::new();
1984 let mut name_ranges = Vec::new();
1985 let mut highlight_ranges = Vec::new();
1986 for capture in mat.captures {
1987 let node_is_name;
1988 if capture.index == config.name_capture_ix {
1989 node_is_name = true;
1990 } else if Some(capture.index) == config.context_capture_ix {
1991 node_is_name = false;
1992 } else {
1993 continue;
1994 }
1995
1996 let range = capture.node.start_byte()..capture.node.end_byte();
1997 if !text.is_empty() {
1998 text.push(' ');
1999 }
2000 if node_is_name {
2001 let mut start = text.len();
2002 let end = start + range.len();
2003
2004 // When multiple names are captured, then the matcheable text
2005 // includes the whitespace in between the names.
2006 if !name_ranges.is_empty() {
2007 start -= 1;
2008 }
2009
2010 name_ranges.push(start..end);
2011 }
2012
2013 let mut offset = range.start;
2014 chunks.seek(offset);
2015 for mut chunk in chunks.by_ref() {
2016 if chunk.text.len() > range.end - offset {
2017 chunk.text = &chunk.text[0..(range.end - offset)];
2018 offset = range.end;
2019 } else {
2020 offset += chunk.text.len();
2021 }
2022 let style = chunk
2023 .syntax_highlight_id
2024 .zip(theme)
2025 .and_then(|(highlight, theme)| highlight.style(theme));
2026 if let Some(style) = style {
2027 let start = text.len();
2028 let end = start + chunk.text.len();
2029 highlight_ranges.push((start..end, style));
2030 }
2031 text.push_str(chunk.text);
2032 if offset >= range.end {
2033 break;
2034 }
2035 }
2036 }
2037
2038 matches.advance();
2039 while stack.last().map_or(false, |prev_range| {
2040 prev_range.start > item_range.start || prev_range.end < item_range.end
2041 }) {
2042 stack.pop();
2043 }
2044 stack.push(item_range.clone());
2045
2046 items.push(OutlineItem {
2047 depth: stack.len() - 1,
2048 range: self.anchor_after(item_range.start)..self.anchor_before(item_range.end),
2049 text,
2050 highlight_ranges,
2051 name_ranges,
2052 })
2053 }
2054 Some(items)
2055 }
2056
2057 pub fn enclosing_bracket_ranges<T: ToOffset>(
2058 &self,
2059 range: Range<T>,
2060 ) -> Option<(Range<usize>, Range<usize>)> {
2061 // Find bracket pairs that *inclusively* contain the given range.
2062 let range = range.start.to_offset(self).saturating_sub(1)
2063 ..self.len().min(range.end.to_offset(self) + 1);
2064 let mut matches = self.syntax.matches(range, &self.text, |grammar| {
2065 grammar.brackets_config.as_ref().map(|c| &c.query)
2066 });
2067 let configs = matches
2068 .grammars()
2069 .iter()
2070 .map(|grammar| grammar.brackets_config.as_ref().unwrap())
2071 .collect::<Vec<_>>();
2072
2073 // Get the ranges of the innermost pair of brackets.
2074 let mut result: Option<(Range<usize>, Range<usize>)> = None;
2075 while let Some(mat) = matches.peek() {
2076 let mut open = None;
2077 let mut close = None;
2078 let config = &configs[mat.grammar_index];
2079 for capture in mat.captures {
2080 if capture.index == config.open_capture_ix {
2081 open = Some(capture.node.byte_range());
2082 } else if capture.index == config.close_capture_ix {
2083 close = Some(capture.node.byte_range());
2084 }
2085 }
2086
2087 matches.advance();
2088
2089 if let Some((open, close)) = open.zip(close) {
2090 let len = close.end - open.start;
2091
2092 if let Some((existing_open, existing_close)) = &result {
2093 let existing_len = existing_close.end - existing_open.start;
2094 if len > existing_len {
2095 continue;
2096 }
2097 }
2098
2099 result = Some((open, close));
2100 }
2101 }
2102
2103 result
2104 }
2105
2106 #[allow(clippy::type_complexity)]
2107 pub fn remote_selections_in_range(
2108 &self,
2109 range: Range<Anchor>,
2110 ) -> impl Iterator<
2111 Item = (
2112 ReplicaId,
2113 bool,
2114 impl Iterator<Item = &Selection<Anchor>> + '_,
2115 ),
2116 > + '_ {
2117 self.remote_selections
2118 .iter()
2119 .filter(|(replica_id, set)| {
2120 **replica_id != self.text.replica_id() && !set.selections.is_empty()
2121 })
2122 .map(move |(replica_id, set)| {
2123 let start_ix = match set.selections.binary_search_by(|probe| {
2124 probe.end.cmp(&range.start, self).then(Ordering::Greater)
2125 }) {
2126 Ok(ix) | Err(ix) => ix,
2127 };
2128 let end_ix = match set.selections.binary_search_by(|probe| {
2129 probe.start.cmp(&range.end, self).then(Ordering::Less)
2130 }) {
2131 Ok(ix) | Err(ix) => ix,
2132 };
2133
2134 (
2135 *replica_id,
2136 set.line_mode,
2137 set.selections[start_ix..end_ix].iter(),
2138 )
2139 })
2140 }
2141
2142 pub fn diagnostics_in_range<'a, T, O>(
2143 &'a self,
2144 search_range: Range<T>,
2145 reversed: bool,
2146 ) -> impl 'a + Iterator<Item = DiagnosticEntry<O>>
2147 where
2148 T: 'a + Clone + ToOffset,
2149 O: 'a + FromAnchor,
2150 {
2151 self.diagnostics.range(search_range, self, true, reversed)
2152 }
2153
2154 pub fn diagnostic_groups(&self) -> Vec<DiagnosticGroup<Anchor>> {
2155 let mut groups = Vec::new();
2156 self.diagnostics.groups(&mut groups, self);
2157 groups
2158 }
2159
2160 pub fn diagnostic_group<'a, O>(
2161 &'a self,
2162 group_id: usize,
2163 ) -> impl 'a + Iterator<Item = DiagnosticEntry<O>>
2164 where
2165 O: 'a + FromAnchor,
2166 {
2167 self.diagnostics.group(group_id, self)
2168 }
2169
2170 pub fn diagnostics_update_count(&self) -> usize {
2171 self.diagnostics_update_count
2172 }
2173
2174 pub fn parse_count(&self) -> usize {
2175 self.parse_count
2176 }
2177
2178 pub fn selections_update_count(&self) -> usize {
2179 self.selections_update_count
2180 }
2181
2182 pub fn file(&self) -> Option<&dyn File> {
2183 self.file.as_deref()
2184 }
2185
2186 pub fn file_update_count(&self) -> usize {
2187 self.file_update_count
2188 }
2189}
2190
2191pub fn indent_size_for_line(text: &text::BufferSnapshot, row: u32) -> IndentSize {
2192 indent_size_for_text(text.chars_at(Point::new(row, 0)))
2193}
2194
2195pub fn indent_size_for_text(text: impl Iterator<Item = char>) -> IndentSize {
2196 let mut result = IndentSize::spaces(0);
2197 for c in text {
2198 let kind = match c {
2199 ' ' => IndentKind::Space,
2200 '\t' => IndentKind::Tab,
2201 _ => break,
2202 };
2203 if result.len == 0 {
2204 result.kind = kind;
2205 }
2206 result.len += 1;
2207 }
2208 result
2209}
2210
2211impl Clone for BufferSnapshot {
2212 fn clone(&self) -> Self {
2213 Self {
2214 text: self.text.clone(),
2215 syntax: self.syntax.clone(),
2216 file: self.file.clone(),
2217 remote_selections: self.remote_selections.clone(),
2218 diagnostics: self.diagnostics.clone(),
2219 selections_update_count: self.selections_update_count,
2220 diagnostics_update_count: self.diagnostics_update_count,
2221 file_update_count: self.file_update_count,
2222 language: self.language.clone(),
2223 parse_count: self.parse_count,
2224 }
2225 }
2226}
2227
2228impl Deref for BufferSnapshot {
2229 type Target = text::BufferSnapshot;
2230
2231 fn deref(&self) -> &Self::Target {
2232 &self.text
2233 }
2234}
2235
2236unsafe impl<'a> Send for BufferChunks<'a> {}
2237
2238impl<'a> BufferChunks<'a> {
2239 pub(crate) fn new(
2240 text: &'a Rope,
2241 range: Range<usize>,
2242 syntax: Option<(SyntaxMapCaptures<'a>, Vec<HighlightMap>)>,
2243 diagnostic_endpoints: Vec<DiagnosticEndpoint>,
2244 ) -> Self {
2245 let mut highlights = None;
2246 if let Some((captures, highlight_maps)) = syntax {
2247 highlights = Some(BufferChunkHighlights {
2248 captures,
2249 next_capture: None,
2250 stack: Default::default(),
2251 highlight_maps,
2252 })
2253 }
2254
2255 let diagnostic_endpoints = diagnostic_endpoints.into_iter().peekable();
2256 let chunks = text.chunks_in_range(range.clone());
2257
2258 BufferChunks {
2259 range,
2260 chunks,
2261 diagnostic_endpoints,
2262 error_depth: 0,
2263 warning_depth: 0,
2264 information_depth: 0,
2265 hint_depth: 0,
2266 unnecessary_depth: 0,
2267 highlights,
2268 }
2269 }
2270
2271 pub fn seek(&mut self, offset: usize) {
2272 self.range.start = offset;
2273 self.chunks.seek(self.range.start);
2274 if let Some(highlights) = self.highlights.as_mut() {
2275 highlights
2276 .stack
2277 .retain(|(end_offset, _)| *end_offset > offset);
2278 if let Some(capture) = &highlights.next_capture {
2279 if offset >= capture.node.start_byte() {
2280 let next_capture_end = capture.node.end_byte();
2281 if offset < next_capture_end {
2282 highlights.stack.push((
2283 next_capture_end,
2284 highlights.highlight_maps[capture.grammar_index].get(capture.index),
2285 ));
2286 }
2287 highlights.next_capture.take();
2288 }
2289 }
2290 highlights.captures.set_byte_range(self.range.clone());
2291 }
2292 }
2293
2294 pub fn offset(&self) -> usize {
2295 self.range.start
2296 }
2297
2298 fn update_diagnostic_depths(&mut self, endpoint: DiagnosticEndpoint) {
2299 let depth = match endpoint.severity {
2300 DiagnosticSeverity::ERROR => &mut self.error_depth,
2301 DiagnosticSeverity::WARNING => &mut self.warning_depth,
2302 DiagnosticSeverity::INFORMATION => &mut self.information_depth,
2303 DiagnosticSeverity::HINT => &mut self.hint_depth,
2304 _ => return,
2305 };
2306 if endpoint.is_start {
2307 *depth += 1;
2308 } else {
2309 *depth -= 1;
2310 }
2311
2312 if endpoint.is_unnecessary {
2313 if endpoint.is_start {
2314 self.unnecessary_depth += 1;
2315 } else {
2316 self.unnecessary_depth -= 1;
2317 }
2318 }
2319 }
2320
2321 fn current_diagnostic_severity(&self) -> Option<DiagnosticSeverity> {
2322 if self.error_depth > 0 {
2323 Some(DiagnosticSeverity::ERROR)
2324 } else if self.warning_depth > 0 {
2325 Some(DiagnosticSeverity::WARNING)
2326 } else if self.information_depth > 0 {
2327 Some(DiagnosticSeverity::INFORMATION)
2328 } else if self.hint_depth > 0 {
2329 Some(DiagnosticSeverity::HINT)
2330 } else {
2331 None
2332 }
2333 }
2334
2335 fn current_code_is_unnecessary(&self) -> bool {
2336 self.unnecessary_depth > 0
2337 }
2338}
2339
2340impl<'a> Iterator for BufferChunks<'a> {
2341 type Item = Chunk<'a>;
2342
2343 fn next(&mut self) -> Option<Self::Item> {
2344 let mut next_capture_start = usize::MAX;
2345 let mut next_diagnostic_endpoint = usize::MAX;
2346
2347 if let Some(highlights) = self.highlights.as_mut() {
2348 while let Some((parent_capture_end, _)) = highlights.stack.last() {
2349 if *parent_capture_end <= self.range.start {
2350 highlights.stack.pop();
2351 } else {
2352 break;
2353 }
2354 }
2355
2356 if highlights.next_capture.is_none() {
2357 highlights.next_capture = highlights.captures.next();
2358 }
2359
2360 while let Some(capture) = highlights.next_capture.as_ref() {
2361 if self.range.start < capture.node.start_byte() {
2362 next_capture_start = capture.node.start_byte();
2363 break;
2364 } else {
2365 let highlight_id =
2366 highlights.highlight_maps[capture.grammar_index].get(capture.index);
2367 highlights
2368 .stack
2369 .push((capture.node.end_byte(), highlight_id));
2370 highlights.next_capture = highlights.captures.next();
2371 }
2372 }
2373 }
2374
2375 while let Some(endpoint) = self.diagnostic_endpoints.peek().copied() {
2376 if endpoint.offset <= self.range.start {
2377 self.update_diagnostic_depths(endpoint);
2378 self.diagnostic_endpoints.next();
2379 } else {
2380 next_diagnostic_endpoint = endpoint.offset;
2381 break;
2382 }
2383 }
2384
2385 if let Some(chunk) = self.chunks.peek() {
2386 let chunk_start = self.range.start;
2387 let mut chunk_end = (self.chunks.offset() + chunk.len())
2388 .min(next_capture_start)
2389 .min(next_diagnostic_endpoint);
2390 let mut highlight_id = None;
2391 if let Some(highlights) = self.highlights.as_ref() {
2392 if let Some((parent_capture_end, parent_highlight_id)) = highlights.stack.last() {
2393 chunk_end = chunk_end.min(*parent_capture_end);
2394 highlight_id = Some(*parent_highlight_id);
2395 }
2396 }
2397
2398 let slice =
2399 &chunk[chunk_start - self.chunks.offset()..chunk_end - self.chunks.offset()];
2400 self.range.start = chunk_end;
2401 if self.range.start == self.chunks.offset() + chunk.len() {
2402 self.chunks.next().unwrap();
2403 }
2404
2405 Some(Chunk {
2406 text: slice,
2407 syntax_highlight_id: highlight_id,
2408 highlight_style: None,
2409 diagnostic_severity: self.current_diagnostic_severity(),
2410 is_unnecessary: self.current_code_is_unnecessary(),
2411 })
2412 } else {
2413 None
2414 }
2415 }
2416}
2417
2418impl operation_queue::Operation for Operation {
2419 fn lamport_timestamp(&self) -> clock::Lamport {
2420 match self {
2421 Operation::Buffer(_) => {
2422 unreachable!("buffer operations should never be deferred at this layer")
2423 }
2424 Operation::UpdateDiagnostics {
2425 lamport_timestamp, ..
2426 }
2427 | Operation::UpdateSelections {
2428 lamport_timestamp, ..
2429 }
2430 | Operation::UpdateCompletionTriggers {
2431 lamport_timestamp, ..
2432 } => *lamport_timestamp,
2433 }
2434 }
2435}
2436
2437impl Default for Diagnostic {
2438 fn default() -> Self {
2439 Self {
2440 code: None,
2441 severity: DiagnosticSeverity::ERROR,
2442 message: Default::default(),
2443 group_id: 0,
2444 is_primary: false,
2445 is_valid: true,
2446 is_disk_based: false,
2447 is_unnecessary: false,
2448 }
2449 }
2450}
2451
2452impl IndentSize {
2453 pub fn spaces(len: u32) -> Self {
2454 Self {
2455 len,
2456 kind: IndentKind::Space,
2457 }
2458 }
2459
2460 pub fn tab() -> Self {
2461 Self {
2462 len: 1,
2463 kind: IndentKind::Tab,
2464 }
2465 }
2466
2467 pub fn chars(&self) -> impl Iterator<Item = char> {
2468 iter::repeat(self.char()).take(self.len as usize)
2469 }
2470
2471 pub fn char(&self) -> char {
2472 match self.kind {
2473 IndentKind::Space => ' ',
2474 IndentKind::Tab => '\t',
2475 }
2476 }
2477
2478 pub fn with_delta(mut self, direction: Ordering, size: IndentSize) -> Self {
2479 match direction {
2480 Ordering::Less => {
2481 if self.kind == size.kind && self.len >= size.len {
2482 self.len -= size.len;
2483 }
2484 }
2485 Ordering::Equal => {}
2486 Ordering::Greater => {
2487 if self.len == 0 {
2488 self = size;
2489 } else if self.kind == size.kind {
2490 self.len += size.len;
2491 }
2492 }
2493 }
2494 self
2495 }
2496}
2497
2498impl Completion {
2499 pub fn sort_key(&self) -> (usize, &str) {
2500 let kind_key = match self.lsp_completion.kind {
2501 Some(lsp::CompletionItemKind::VARIABLE) => 0,
2502 _ => 1,
2503 };
2504 (kind_key, &self.label.text[self.label.filter_range.clone()])
2505 }
2506
2507 pub fn is_snippet(&self) -> bool {
2508 self.lsp_completion.insert_text_format == Some(lsp::InsertTextFormat::SNIPPET)
2509 }
2510}
2511
2512pub fn contiguous_ranges(
2513 values: impl Iterator<Item = u32>,
2514 max_len: usize,
2515) -> impl Iterator<Item = Range<u32>> {
2516 let mut values = values;
2517 let mut current_range: Option<Range<u32>> = None;
2518 std::iter::from_fn(move || loop {
2519 if let Some(value) = values.next() {
2520 if let Some(range) = &mut current_range {
2521 if value == range.end && range.len() < max_len {
2522 range.end += 1;
2523 continue;
2524 }
2525 }
2526
2527 let prev_range = current_range.clone();
2528 current_range = Some(value..(value + 1));
2529 if prev_range.is_some() {
2530 return prev_range;
2531 }
2532 } else {
2533 return current_range.take();
2534 }
2535 })
2536}
2537
2538pub fn char_kind(c: char) -> CharKind {
2539 if c.is_whitespace() {
2540 CharKind::Whitespace
2541 } else if c.is_alphanumeric() || c == '_' {
2542 CharKind::Word
2543 } else {
2544 CharKind::Punctuation
2545 }
2546}