1pub use crate::{
2 diagnostic_set::DiagnosticSet,
3 highlight_map::{HighlightId, HighlightMap},
4 proto, BracketPair, Grammar, Language, LanguageConfig, LanguageRegistry, PLAIN_TEXT,
5};
6use crate::{
7 diagnostic_set::{DiagnosticEntry, DiagnosticGroup},
8 outline::OutlineItem,
9 syntax_map::{
10 SyntaxMap, SyntaxMapCapture, SyntaxMapCaptures, SyntaxSnapshot, ToTreeSitterPoint,
11 },
12 CodeLabel, LanguageScope, Outline,
13};
14use anyhow::{anyhow, Result};
15use clock::ReplicaId;
16use collections::HashMap;
17use fs::LineEnding;
18use futures::FutureExt as _;
19use gpui::{fonts::HighlightStyle, AppContext, Entity, ModelContext, Task};
20use parking_lot::Mutex;
21use settings::Settings;
22use similar::{ChangeTag, TextDiff};
23use smol::future::yield_now;
24use std::{
25 any::Any,
26 cmp::{self, Ordering},
27 collections::BTreeMap,
28 ffi::OsStr,
29 future::Future,
30 iter::{self, Iterator, Peekable},
31 mem,
32 ops::{Deref, Range},
33 path::{Path, PathBuf},
34 str,
35 sync::Arc,
36 time::{Duration, Instant, SystemTime, UNIX_EPOCH},
37 vec,
38};
39use sum_tree::TreeMap;
40use text::operation_queue::OperationQueue;
41pub use text::{Buffer as TextBuffer, BufferSnapshot as TextBufferSnapshot, Operation as _, *};
42use theme::SyntaxTheme;
43#[cfg(any(test, feature = "test-support"))]
44use util::RandomCharIter;
45use util::{RangeExt, TryFutureExt as _};
46
47#[cfg(any(test, feature = "test-support"))]
48pub use {tree_sitter_rust, tree_sitter_typescript};
49
50pub use lsp::DiagnosticSeverity;
51
52struct GitDiffStatus {
53 diff: git::diff::BufferDiff,
54 update_in_progress: bool,
55 update_requested: bool,
56}
57
58pub struct Buffer {
59 text: TextBuffer,
60 diff_base: Option<String>,
61 git_diff_status: GitDiffStatus,
62 file: Option<Arc<dyn File>>,
63 saved_version: clock::Global,
64 saved_version_fingerprint: RopeFingerprint,
65 saved_mtime: SystemTime,
66 transaction_depth: usize,
67 was_dirty_before_starting_transaction: Option<bool>,
68 language: Option<Arc<Language>>,
69 autoindent_requests: Vec<Arc<AutoindentRequest>>,
70 pending_autoindent: Option<Task<()>>,
71 sync_parse_timeout: Duration,
72 syntax_map: Mutex<SyntaxMap>,
73 parsing_in_background: bool,
74 parse_count: usize,
75 diagnostics: HashMap<usize, DiagnosticSet>, // server_id -> diagnostic set
76 remote_selections: TreeMap<ReplicaId, SelectionSet>,
77 selections_update_count: usize,
78 diagnostics_update_count: usize,
79 diagnostics_timestamp: clock::Lamport,
80 file_update_count: usize,
81 git_diff_update_count: usize,
82 completion_triggers: Vec<String>,
83 completion_triggers_timestamp: clock::Lamport,
84 deferred_ops: OperationQueue<Operation>,
85}
86
87pub struct BufferSnapshot {
88 text: text::BufferSnapshot,
89 pub git_diff: git::diff::BufferDiff,
90 pub(crate) syntax: SyntaxSnapshot,
91 file: Option<Arc<dyn File>>,
92 diagnostics: HashMap<usize, DiagnosticSet>, // server_id -> diagnostic set
93 diagnostics_update_count: usize,
94 file_update_count: usize,
95 git_diff_update_count: usize,
96 remote_selections: TreeMap<ReplicaId, SelectionSet>,
97 selections_update_count: usize,
98 language: Option<Arc<Language>>,
99 parse_count: usize,
100}
101
102#[derive(Clone, Copy, Debug, PartialEq, Eq, Default)]
103pub struct IndentSize {
104 pub len: u32,
105 pub kind: IndentKind,
106}
107
108#[derive(Clone, Copy, Debug, PartialEq, Eq, Default)]
109pub enum IndentKind {
110 #[default]
111 Space,
112 Tab,
113}
114
115#[derive(Copy, Clone, PartialEq, Eq, Debug, Default)]
116pub enum CursorShape {
117 #[default]
118 Bar,
119 Block,
120 Underscore,
121 Hollow,
122}
123
124#[derive(Clone, Debug)]
125struct SelectionSet {
126 line_mode: bool,
127 cursor_shape: CursorShape,
128 selections: Arc<[Selection<Anchor>]>,
129 lamport_timestamp: clock::Lamport,
130}
131
132#[derive(Clone, Debug, PartialEq, Eq)]
133pub struct GroupId {
134 source: Arc<str>,
135 id: usize,
136}
137
138#[derive(Clone, Debug, PartialEq, Eq)]
139pub struct Diagnostic {
140 pub code: Option<String>,
141 pub severity: DiagnosticSeverity,
142 pub message: String,
143 pub group_id: usize,
144 pub is_valid: bool,
145 pub is_primary: bool,
146 pub is_disk_based: bool,
147 pub is_unnecessary: bool,
148}
149
150#[derive(Clone, Debug)]
151pub struct Completion {
152 pub old_range: Range<Anchor>,
153 pub new_text: String,
154 pub label: CodeLabel,
155 pub lsp_completion: lsp::CompletionItem,
156}
157
158#[derive(Clone, Debug)]
159pub struct CodeAction {
160 pub server_id: usize,
161 pub range: Range<Anchor>,
162 pub lsp_action: lsp::CodeAction,
163}
164
165#[derive(Clone, Debug, PartialEq, Eq)]
166pub enum Operation {
167 Buffer(text::Operation),
168
169 UpdateDiagnostics {
170 server_id: usize,
171 diagnostics: Arc<[DiagnosticEntry<Anchor>]>,
172 lamport_timestamp: clock::Lamport,
173 },
174
175 UpdateSelections {
176 selections: Arc<[Selection<Anchor>]>,
177 lamport_timestamp: clock::Lamport,
178 line_mode: bool,
179 cursor_shape: CursorShape,
180 },
181
182 UpdateCompletionTriggers {
183 triggers: Vec<String>,
184 lamport_timestamp: clock::Lamport,
185 },
186}
187
188#[derive(Clone, Debug, PartialEq, Eq)]
189pub enum Event {
190 Operation(Operation),
191 Edited,
192 DirtyChanged,
193 Saved,
194 FileHandleChanged,
195 Reloaded,
196 LanguageChanged,
197 Reparsed,
198 DiagnosticsUpdated,
199 Closed,
200}
201
202pub trait File: Send + Sync {
203 fn as_local(&self) -> Option<&dyn LocalFile>;
204
205 fn is_local(&self) -> bool {
206 self.as_local().is_some()
207 }
208
209 fn mtime(&self) -> SystemTime;
210
211 /// Returns the path of this file relative to the worktree's root directory.
212 fn path(&self) -> &Arc<Path>;
213
214 /// Returns the path of this file relative to the worktree's parent directory (this means it
215 /// includes the name of the worktree's root folder).
216 fn full_path(&self, cx: &AppContext) -> PathBuf;
217
218 /// Returns the last component of this handle's absolute path. If this handle refers to the root
219 /// of its worktree, then this method will return the name of the worktree itself.
220 fn file_name<'a>(&'a self, cx: &'a AppContext) -> &'a OsStr;
221
222 fn is_deleted(&self) -> bool;
223
224 fn as_any(&self) -> &dyn Any;
225
226 fn to_proto(&self) -> rpc::proto::File;
227}
228
229pub trait LocalFile: File {
230 /// Returns the absolute path of this file.
231 fn abs_path(&self, cx: &AppContext) -> PathBuf;
232
233 fn load(&self, cx: &AppContext) -> Task<Result<String>>;
234
235 fn buffer_reloaded(
236 &self,
237 buffer_id: u64,
238 version: &clock::Global,
239 fingerprint: RopeFingerprint,
240 line_ending: LineEnding,
241 mtime: SystemTime,
242 cx: &mut AppContext,
243 );
244}
245
246#[derive(Clone, Debug)]
247pub enum AutoindentMode {
248 /// Indent each line of inserted text.
249 EachLine,
250 /// Apply the same indentation adjustment to all of the lines
251 /// in a given insertion.
252 Block {
253 /// The original indentation level of the first line of each
254 /// insertion, if it has been copied.
255 original_indent_columns: Vec<u32>,
256 },
257}
258
259#[derive(Clone)]
260struct AutoindentRequest {
261 before_edit: BufferSnapshot,
262 entries: Vec<AutoindentRequestEntry>,
263 is_block_mode: bool,
264}
265
266#[derive(Clone)]
267struct AutoindentRequestEntry {
268 /// A range of the buffer whose indentation should be adjusted.
269 range: Range<Anchor>,
270 /// Whether or not these lines should be considered brand new, for the
271 /// purpose of auto-indent. When text is not new, its indentation will
272 /// only be adjusted if the suggested indentation level has *changed*
273 /// since the edit was made.
274 first_line_is_new: bool,
275 indent_size: IndentSize,
276 original_indent_column: Option<u32>,
277}
278
279#[derive(Debug)]
280struct IndentSuggestion {
281 basis_row: u32,
282 delta: Ordering,
283 within_error: bool,
284}
285
286struct BufferChunkHighlights<'a> {
287 captures: SyntaxMapCaptures<'a>,
288 next_capture: Option<SyntaxMapCapture<'a>>,
289 stack: Vec<(usize, HighlightId)>,
290 highlight_maps: Vec<HighlightMap>,
291}
292
293pub struct BufferChunks<'a> {
294 range: Range<usize>,
295 chunks: text::Chunks<'a>,
296 diagnostic_endpoints: Peekable<vec::IntoIter<DiagnosticEndpoint>>,
297 error_depth: usize,
298 warning_depth: usize,
299 information_depth: usize,
300 hint_depth: usize,
301 unnecessary_depth: usize,
302 highlights: Option<BufferChunkHighlights<'a>>,
303}
304
305#[derive(Clone, Copy, Debug, Default)]
306pub struct Chunk<'a> {
307 pub text: &'a str,
308 pub syntax_highlight_id: Option<HighlightId>,
309 pub highlight_style: Option<HighlightStyle>,
310 pub diagnostic_severity: Option<DiagnosticSeverity>,
311 pub is_unnecessary: bool,
312}
313
314pub struct Diff {
315 pub(crate) base_version: clock::Global,
316 line_ending: LineEnding,
317 edits: Vec<(Range<usize>, Arc<str>)>,
318}
319
320#[derive(Clone, Copy)]
321pub(crate) struct DiagnosticEndpoint {
322 offset: usize,
323 is_start: bool,
324 severity: DiagnosticSeverity,
325 is_unnecessary: bool,
326}
327
328#[derive(Copy, Clone, Eq, PartialEq, PartialOrd, Ord, Debug)]
329pub enum CharKind {
330 Punctuation,
331 Whitespace,
332 Word,
333}
334
335impl CharKind {
336 pub fn coerce_punctuation(self, treat_punctuation_as_word: bool) -> Self {
337 if treat_punctuation_as_word && self == CharKind::Punctuation {
338 CharKind::Word
339 } else {
340 self
341 }
342 }
343}
344
345impl Buffer {
346 pub fn new<T: Into<String>>(
347 replica_id: ReplicaId,
348 base_text: T,
349 cx: &mut ModelContext<Self>,
350 ) -> Self {
351 Self::build(
352 TextBuffer::new(replica_id, cx.model_id() as u64, base_text.into()),
353 None,
354 None,
355 )
356 }
357
358 pub fn from_file<T: Into<String>>(
359 replica_id: ReplicaId,
360 base_text: T,
361 diff_base: Option<T>,
362 file: Arc<dyn File>,
363 cx: &mut ModelContext<Self>,
364 ) -> Self {
365 Self::build(
366 TextBuffer::new(replica_id, cx.model_id() as u64, base_text.into()),
367 diff_base.map(|h| h.into().into_boxed_str().into()),
368 Some(file),
369 )
370 }
371
372 pub fn from_proto(
373 replica_id: ReplicaId,
374 message: proto::BufferState,
375 file: Option<Arc<dyn File>>,
376 ) -> Result<Self> {
377 let buffer = TextBuffer::new(replica_id, message.id, message.base_text);
378 let mut this = Self::build(
379 buffer,
380 message.diff_base.map(|text| text.into_boxed_str().into()),
381 file,
382 );
383 this.text.set_line_ending(proto::deserialize_line_ending(
384 rpc::proto::LineEnding::from_i32(message.line_ending)
385 .ok_or_else(|| anyhow!("missing line_ending"))?,
386 ));
387 this.saved_version = proto::deserialize_version(&message.saved_version);
388 this.saved_version_fingerprint =
389 proto::deserialize_fingerprint(&message.saved_version_fingerprint)?;
390 this.saved_mtime = message
391 .saved_mtime
392 .ok_or_else(|| anyhow!("invalid saved_mtime"))?
393 .into();
394 Ok(this)
395 }
396
397 pub fn to_proto(&self) -> proto::BufferState {
398 proto::BufferState {
399 id: self.remote_id(),
400 file: self.file.as_ref().map(|f| f.to_proto()),
401 base_text: self.base_text().to_string(),
402 diff_base: self.diff_base.as_ref().map(|h| h.to_string()),
403 line_ending: proto::serialize_line_ending(self.line_ending()) as i32,
404 saved_version: proto::serialize_version(&self.saved_version),
405 saved_version_fingerprint: proto::serialize_fingerprint(self.saved_version_fingerprint),
406 saved_mtime: Some(self.saved_mtime.into()),
407 }
408 }
409
410 pub fn serialize_ops(
411 &self,
412 since: Option<clock::Global>,
413 cx: &AppContext,
414 ) -> Task<Vec<proto::Operation>> {
415 let mut operations = Vec::new();
416 operations.extend(self.deferred_ops.iter().map(proto::serialize_operation));
417
418 operations.extend(self.remote_selections.iter().map(|(_, set)| {
419 proto::serialize_operation(&Operation::UpdateSelections {
420 selections: set.selections.clone(),
421 lamport_timestamp: set.lamport_timestamp,
422 line_mode: set.line_mode,
423 cursor_shape: set.cursor_shape,
424 })
425 }));
426
427 for (server_id, diagnostics) in &self.diagnostics {
428 operations.push(proto::serialize_operation(&Operation::UpdateDiagnostics {
429 lamport_timestamp: self.diagnostics_timestamp,
430 server_id: *server_id,
431 diagnostics: diagnostics.iter().cloned().collect(),
432 }));
433 }
434
435 operations.push(proto::serialize_operation(
436 &Operation::UpdateCompletionTriggers {
437 triggers: self.completion_triggers.clone(),
438 lamport_timestamp: self.completion_triggers_timestamp,
439 },
440 ));
441
442 let text_operations = self.text.operations().clone();
443 cx.background().spawn(async move {
444 let since = since.unwrap_or_default();
445 operations.extend(
446 text_operations
447 .iter()
448 .filter(|(_, op)| !since.observed(op.local_timestamp()))
449 .map(|(_, op)| proto::serialize_operation(&Operation::Buffer(op.clone()))),
450 );
451 operations.sort_unstable_by_key(proto::lamport_timestamp_for_operation);
452 operations
453 })
454 }
455
456 pub fn with_language(mut self, language: Arc<Language>, cx: &mut ModelContext<Self>) -> Self {
457 self.set_language(Some(language), cx);
458 self
459 }
460
461 fn build(buffer: TextBuffer, diff_base: Option<String>, file: Option<Arc<dyn File>>) -> Self {
462 let saved_mtime = if let Some(file) = file.as_ref() {
463 file.mtime()
464 } else {
465 UNIX_EPOCH
466 };
467
468 Self {
469 saved_mtime,
470 saved_version: buffer.version(),
471 saved_version_fingerprint: buffer.as_rope().fingerprint(),
472 transaction_depth: 0,
473 was_dirty_before_starting_transaction: None,
474 text: buffer,
475 diff_base,
476 git_diff_status: GitDiffStatus {
477 diff: git::diff::BufferDiff::new(),
478 update_in_progress: false,
479 update_requested: false,
480 },
481 file,
482 syntax_map: Mutex::new(SyntaxMap::new()),
483 parsing_in_background: false,
484 parse_count: 0,
485 sync_parse_timeout: Duration::from_millis(1),
486 autoindent_requests: Default::default(),
487 pending_autoindent: Default::default(),
488 language: None,
489 remote_selections: Default::default(),
490 selections_update_count: 0,
491 diagnostics: Default::default(),
492 diagnostics_update_count: 0,
493 diagnostics_timestamp: Default::default(),
494 file_update_count: 0,
495 git_diff_update_count: 0,
496 completion_triggers: Default::default(),
497 completion_triggers_timestamp: Default::default(),
498 deferred_ops: OperationQueue::new(),
499 }
500 }
501
502 pub fn snapshot(&self) -> BufferSnapshot {
503 let text = self.text.snapshot();
504 let mut syntax_map = self.syntax_map.lock();
505 syntax_map.interpolate(&text);
506 let syntax = syntax_map.snapshot();
507
508 BufferSnapshot {
509 text,
510 syntax,
511 git_diff: self.git_diff_status.diff.clone(),
512 file: self.file.clone(),
513 remote_selections: self.remote_selections.clone(),
514 diagnostics: self.diagnostics.clone(),
515 diagnostics_update_count: self.diagnostics_update_count,
516 file_update_count: self.file_update_count,
517 git_diff_update_count: self.git_diff_update_count,
518 language: self.language.clone(),
519 parse_count: self.parse_count,
520 selections_update_count: self.selections_update_count,
521 }
522 }
523
524 pub fn as_text_snapshot(&self) -> &text::BufferSnapshot {
525 &self.text
526 }
527
528 pub fn text_snapshot(&self) -> text::BufferSnapshot {
529 self.text.snapshot()
530 }
531
532 pub fn file(&self) -> Option<&Arc<dyn File>> {
533 self.file.as_ref()
534 }
535
536 pub fn saved_version(&self) -> &clock::Global {
537 &self.saved_version
538 }
539
540 pub fn saved_version_fingerprint(&self) -> RopeFingerprint {
541 self.saved_version_fingerprint
542 }
543
544 pub fn saved_mtime(&self) -> SystemTime {
545 self.saved_mtime
546 }
547
548 pub fn set_language(&mut self, language: Option<Arc<Language>>, cx: &mut ModelContext<Self>) {
549 self.syntax_map.lock().clear();
550 self.language = language;
551 self.reparse(cx);
552 cx.emit(Event::LanguageChanged);
553 }
554
555 pub fn set_language_registry(&mut self, language_registry: Arc<LanguageRegistry>) {
556 self.syntax_map
557 .lock()
558 .set_language_registry(language_registry);
559 }
560
561 pub fn did_save(
562 &mut self,
563 version: clock::Global,
564 fingerprint: RopeFingerprint,
565 mtime: SystemTime,
566 cx: &mut ModelContext<Self>,
567 ) {
568 self.saved_version = version;
569 self.saved_version_fingerprint = fingerprint;
570 self.saved_mtime = mtime;
571 cx.emit(Event::Saved);
572 cx.notify();
573 }
574
575 pub fn reload(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<Option<Transaction>>> {
576 cx.spawn(|this, mut cx| async move {
577 if let Some((new_mtime, new_text)) = this.read_with(&cx, |this, cx| {
578 let file = this.file.as_ref()?.as_local()?;
579 Some((file.mtime(), file.load(cx)))
580 }) {
581 let new_text = new_text.await?;
582 let diff = this
583 .read_with(&cx, |this, cx| this.diff(new_text, cx))
584 .await;
585 this.update(&mut cx, |this, cx| {
586 if this.version() == diff.base_version {
587 this.finalize_last_transaction();
588 this.apply_diff(diff, cx);
589 if let Some(transaction) = this.finalize_last_transaction().cloned() {
590 this.did_reload(
591 this.version(),
592 this.as_rope().fingerprint(),
593 this.line_ending(),
594 new_mtime,
595 cx,
596 );
597 return Ok(Some(transaction));
598 }
599 }
600 Ok(None)
601 })
602 } else {
603 Ok(None)
604 }
605 })
606 }
607
608 pub fn did_reload(
609 &mut self,
610 version: clock::Global,
611 fingerprint: RopeFingerprint,
612 line_ending: LineEnding,
613 mtime: SystemTime,
614 cx: &mut ModelContext<Self>,
615 ) {
616 self.saved_version = version;
617 self.saved_version_fingerprint = fingerprint;
618 self.text.set_line_ending(line_ending);
619 self.saved_mtime = mtime;
620 if let Some(file) = self.file.as_ref().and_then(|f| f.as_local()) {
621 file.buffer_reloaded(
622 self.remote_id(),
623 &self.saved_version,
624 self.saved_version_fingerprint,
625 self.line_ending(),
626 self.saved_mtime,
627 cx,
628 );
629 }
630 self.git_diff_recalc(cx);
631 cx.emit(Event::Reloaded);
632 cx.notify();
633 }
634
635 pub fn file_updated(
636 &mut self,
637 new_file: Arc<dyn File>,
638 cx: &mut ModelContext<Self>,
639 ) -> Task<()> {
640 let mut file_changed = false;
641 let mut task = Task::ready(());
642
643 if let Some(old_file) = self.file.as_ref() {
644 if new_file.path() != old_file.path() {
645 file_changed = true;
646 }
647
648 if new_file.is_deleted() {
649 if !old_file.is_deleted() {
650 file_changed = true;
651 if !self.is_dirty() {
652 cx.emit(Event::DirtyChanged);
653 }
654 }
655 } else {
656 let new_mtime = new_file.mtime();
657 if new_mtime != old_file.mtime() {
658 file_changed = true;
659
660 if !self.is_dirty() {
661 let reload = self.reload(cx).log_err().map(drop);
662 task = cx.foreground().spawn(reload);
663 }
664 }
665 }
666 } else {
667 file_changed = true;
668 };
669
670 if file_changed {
671 self.file_update_count += 1;
672 cx.emit(Event::FileHandleChanged);
673 cx.notify();
674 }
675 self.file = Some(new_file);
676 task
677 }
678
679 pub fn diff_base(&self) -> Option<&str> {
680 self.diff_base.as_deref()
681 }
682
683 pub fn set_diff_base(&mut self, diff_base: Option<String>, cx: &mut ModelContext<Self>) {
684 self.diff_base = diff_base;
685 self.git_diff_recalc(cx);
686 }
687
688 pub fn needs_git_diff_recalc(&self) -> bool {
689 self.git_diff_status.diff.needs_update(self)
690 }
691
692 pub fn git_diff_recalc(&mut self, cx: &mut ModelContext<Self>) {
693 if self.git_diff_status.update_in_progress {
694 self.git_diff_status.update_requested = true;
695 return;
696 }
697
698 if let Some(diff_base) = &self.diff_base {
699 let snapshot = self.snapshot();
700 let diff_base = diff_base.clone();
701
702 let mut diff = self.git_diff_status.diff.clone();
703 let diff = cx.background().spawn(async move {
704 diff.update(&diff_base, &snapshot).await;
705 diff
706 });
707
708 cx.spawn_weak(|this, mut cx| async move {
709 let buffer_diff = diff.await;
710 if let Some(this) = this.upgrade(&cx) {
711 this.update(&mut cx, |this, cx| {
712 this.git_diff_status.diff = buffer_diff;
713 this.git_diff_update_count += 1;
714 cx.notify();
715
716 this.git_diff_status.update_in_progress = false;
717 if this.git_diff_status.update_requested {
718 this.git_diff_recalc(cx);
719 }
720 })
721 }
722 })
723 .detach()
724 } else {
725 let snapshot = self.snapshot();
726 self.git_diff_status.diff.clear(&snapshot);
727 self.git_diff_update_count += 1;
728 cx.notify();
729 }
730 }
731
732 pub fn close(&mut self, cx: &mut ModelContext<Self>) {
733 cx.emit(Event::Closed);
734 }
735
736 pub fn language(&self) -> Option<&Arc<Language>> {
737 self.language.as_ref()
738 }
739
740 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<Arc<Language>> {
741 let offset = position.to_offset(self);
742 self.syntax_map
743 .lock()
744 .layers_for_range(offset..offset, &self.text)
745 .last()
746 .map(|info| info.language.clone())
747 .or_else(|| self.language.clone())
748 }
749
750 pub fn parse_count(&self) -> usize {
751 self.parse_count
752 }
753
754 pub fn selections_update_count(&self) -> usize {
755 self.selections_update_count
756 }
757
758 pub fn diagnostics_update_count(&self) -> usize {
759 self.diagnostics_update_count
760 }
761
762 pub fn file_update_count(&self) -> usize {
763 self.file_update_count
764 }
765
766 pub fn git_diff_update_count(&self) -> usize {
767 self.git_diff_update_count
768 }
769
770 #[cfg(any(test, feature = "test-support"))]
771 pub fn is_parsing(&self) -> bool {
772 self.parsing_in_background
773 }
774
775 pub fn contains_unknown_injections(&self) -> bool {
776 self.syntax_map.lock().contains_unknown_injections()
777 }
778
779 #[cfg(test)]
780 pub fn set_sync_parse_timeout(&mut self, timeout: Duration) {
781 self.sync_parse_timeout = timeout;
782 }
783
784 /// Called after an edit to synchronize the buffer's main parse tree with
785 /// the buffer's new underlying state.
786 ///
787 /// Locks the syntax map and interpolates the edits since the last reparse
788 /// into the foreground syntax tree.
789 ///
790 /// Then takes a stable snapshot of the syntax map before unlocking it.
791 /// The snapshot with the interpolated edits is sent to a background thread,
792 /// where we ask Tree-sitter to perform an incremental parse.
793 ///
794 /// Meanwhile, in the foreground, we block the main thread for up to 1ms
795 /// waiting on the parse to complete. As soon as it completes, we proceed
796 /// synchronously, unless a 1ms timeout elapses.
797 ///
798 /// If we time out waiting on the parse, we spawn a second task waiting
799 /// until the parse does complete and return with the interpolated tree still
800 /// in the foreground. When the background parse completes, call back into
801 /// the main thread and assign the foreground parse state.
802 ///
803 /// If the buffer or grammar changed since the start of the background parse,
804 /// initiate an additional reparse recursively. To avoid concurrent parses
805 /// for the same buffer, we only initiate a new parse if we are not already
806 /// parsing in the background.
807 pub fn reparse(&mut self, cx: &mut ModelContext<Self>) {
808 if self.parsing_in_background {
809 return;
810 }
811 let language = if let Some(language) = self.language.clone() {
812 language
813 } else {
814 return;
815 };
816
817 let text = self.text_snapshot();
818 let parsed_version = self.version();
819
820 let mut syntax_map = self.syntax_map.lock();
821 syntax_map.interpolate(&text);
822 let language_registry = syntax_map.language_registry();
823 let mut syntax_snapshot = syntax_map.snapshot();
824 drop(syntax_map);
825
826 let parse_task = cx.background().spawn({
827 let language = language.clone();
828 let language_registry = language_registry.clone();
829 async move {
830 syntax_snapshot.reparse(&text, language_registry, language);
831 syntax_snapshot
832 }
833 });
834
835 match cx
836 .background()
837 .block_with_timeout(self.sync_parse_timeout, parse_task)
838 {
839 Ok(new_syntax_snapshot) => {
840 self.did_finish_parsing(new_syntax_snapshot, cx);
841 return;
842 }
843 Err(parse_task) => {
844 self.parsing_in_background = true;
845 cx.spawn(move |this, mut cx| async move {
846 let new_syntax_map = parse_task.await;
847 this.update(&mut cx, move |this, cx| {
848 let grammar_changed =
849 this.language.as_ref().map_or(true, |current_language| {
850 !Arc::ptr_eq(&language, current_language)
851 });
852 let language_registry_changed = new_syntax_map
853 .contains_unknown_injections()
854 && language_registry.map_or(false, |registry| {
855 registry.version() != new_syntax_map.language_registry_version()
856 });
857 let parse_again = language_registry_changed
858 || grammar_changed
859 || this.version.changed_since(&parsed_version);
860 this.did_finish_parsing(new_syntax_map, cx);
861 this.parsing_in_background = false;
862 if parse_again {
863 this.reparse(cx);
864 }
865 });
866 })
867 .detach();
868 }
869 }
870 }
871
872 fn did_finish_parsing(&mut self, syntax_snapshot: SyntaxSnapshot, cx: &mut ModelContext<Self>) {
873 self.parse_count += 1;
874 self.syntax_map.lock().did_parse(syntax_snapshot);
875 self.request_autoindent(cx);
876 cx.emit(Event::Reparsed);
877 cx.notify();
878 }
879
880 pub fn update_diagnostics(
881 &mut self,
882 server_id: usize,
883 diagnostics: DiagnosticSet,
884 cx: &mut ModelContext<Self>,
885 ) {
886 let lamport_timestamp = self.text.lamport_clock.tick();
887 let op = Operation::UpdateDiagnostics {
888 server_id,
889 diagnostics: diagnostics.iter().cloned().collect(),
890 lamport_timestamp,
891 };
892 self.apply_diagnostic_update(server_id, diagnostics, lamport_timestamp, cx);
893 self.send_operation(op, cx);
894 }
895
896 fn request_autoindent(&mut self, cx: &mut ModelContext<Self>) {
897 if let Some(indent_sizes) = self.compute_autoindents() {
898 let indent_sizes = cx.background().spawn(indent_sizes);
899 match cx
900 .background()
901 .block_with_timeout(Duration::from_micros(500), indent_sizes)
902 {
903 Ok(indent_sizes) => self.apply_autoindents(indent_sizes, cx),
904 Err(indent_sizes) => {
905 self.pending_autoindent = Some(cx.spawn(|this, mut cx| async move {
906 let indent_sizes = indent_sizes.await;
907 this.update(&mut cx, |this, cx| {
908 this.apply_autoindents(indent_sizes, cx);
909 });
910 }));
911 }
912 }
913 } else {
914 self.autoindent_requests.clear();
915 }
916 }
917
918 fn compute_autoindents(&self) -> Option<impl Future<Output = BTreeMap<u32, IndentSize>>> {
919 let max_rows_between_yields = 100;
920 let snapshot = self.snapshot();
921 if snapshot.syntax.is_empty() || self.autoindent_requests.is_empty() {
922 return None;
923 }
924
925 let autoindent_requests = self.autoindent_requests.clone();
926 Some(async move {
927 let mut indent_sizes = BTreeMap::new();
928 for request in autoindent_requests {
929 // Resolve each edited range to its row in the current buffer and in the
930 // buffer before this batch of edits.
931 let mut row_ranges = Vec::new();
932 let mut old_to_new_rows = BTreeMap::new();
933 let mut language_indent_sizes_by_new_row = Vec::new();
934 for entry in &request.entries {
935 let position = entry.range.start;
936 let new_row = position.to_point(&snapshot).row;
937 let new_end_row = entry.range.end.to_point(&snapshot).row + 1;
938 language_indent_sizes_by_new_row.push((new_row, entry.indent_size));
939
940 if !entry.first_line_is_new {
941 let old_row = position.to_point(&request.before_edit).row;
942 old_to_new_rows.insert(old_row, new_row);
943 }
944 row_ranges.push((new_row..new_end_row, entry.original_indent_column));
945 }
946
947 // Build a map containing the suggested indentation for each of the edited lines
948 // with respect to the state of the buffer before these edits. This map is keyed
949 // by the rows for these lines in the current state of the buffer.
950 let mut old_suggestions = BTreeMap::<u32, (IndentSize, bool)>::default();
951 let old_edited_ranges =
952 contiguous_ranges(old_to_new_rows.keys().copied(), max_rows_between_yields);
953 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
954 let mut language_indent_size = IndentSize::default();
955 for old_edited_range in old_edited_ranges {
956 let suggestions = request
957 .before_edit
958 .suggest_autoindents(old_edited_range.clone())
959 .into_iter()
960 .flatten();
961 for (old_row, suggestion) in old_edited_range.zip(suggestions) {
962 if let Some(suggestion) = suggestion {
963 let new_row = *old_to_new_rows.get(&old_row).unwrap();
964
965 // Find the indent size based on the language for this row.
966 while let Some((row, size)) = language_indent_sizes.peek() {
967 if *row > new_row {
968 break;
969 }
970 language_indent_size = *size;
971 language_indent_sizes.next();
972 }
973
974 let suggested_indent = old_to_new_rows
975 .get(&suggestion.basis_row)
976 .and_then(|from_row| {
977 Some(old_suggestions.get(from_row).copied()?.0)
978 })
979 .unwrap_or_else(|| {
980 request
981 .before_edit
982 .indent_size_for_line(suggestion.basis_row)
983 })
984 .with_delta(suggestion.delta, language_indent_size);
985 old_suggestions
986 .insert(new_row, (suggested_indent, suggestion.within_error));
987 }
988 }
989 yield_now().await;
990 }
991
992 // In block mode, only compute indentation suggestions for the first line
993 // of each insertion. Otherwise, compute suggestions for every inserted line.
994 let new_edited_row_ranges = contiguous_ranges(
995 row_ranges.iter().flat_map(|(range, _)| {
996 if request.is_block_mode {
997 range.start..range.start + 1
998 } else {
999 range.clone()
1000 }
1001 }),
1002 max_rows_between_yields,
1003 );
1004
1005 // Compute new suggestions for each line, but only include them in the result
1006 // if they differ from the old suggestion for that line.
1007 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1008 let mut language_indent_size = IndentSize::default();
1009 for new_edited_row_range in new_edited_row_ranges {
1010 let suggestions = snapshot
1011 .suggest_autoindents(new_edited_row_range.clone())
1012 .into_iter()
1013 .flatten();
1014 for (new_row, suggestion) in new_edited_row_range.zip(suggestions) {
1015 if let Some(suggestion) = suggestion {
1016 // Find the indent size based on the language for this row.
1017 while let Some((row, size)) = language_indent_sizes.peek() {
1018 if *row > new_row {
1019 break;
1020 }
1021 language_indent_size = *size;
1022 language_indent_sizes.next();
1023 }
1024
1025 let suggested_indent = indent_sizes
1026 .get(&suggestion.basis_row)
1027 .copied()
1028 .unwrap_or_else(|| {
1029 snapshot.indent_size_for_line(suggestion.basis_row)
1030 })
1031 .with_delta(suggestion.delta, language_indent_size);
1032 if old_suggestions.get(&new_row).map_or(
1033 true,
1034 |(old_indentation, was_within_error)| {
1035 suggested_indent != *old_indentation
1036 && (!suggestion.within_error || *was_within_error)
1037 },
1038 ) {
1039 indent_sizes.insert(new_row, suggested_indent);
1040 }
1041 }
1042 }
1043 yield_now().await;
1044 }
1045
1046 // For each block of inserted text, adjust the indentation of the remaining
1047 // lines of the block by the same amount as the first line was adjusted.
1048 if request.is_block_mode {
1049 for (row_range, original_indent_column) in
1050 row_ranges
1051 .into_iter()
1052 .filter_map(|(range, original_indent_column)| {
1053 if range.len() > 1 {
1054 Some((range, original_indent_column?))
1055 } else {
1056 None
1057 }
1058 })
1059 {
1060 let new_indent = indent_sizes
1061 .get(&row_range.start)
1062 .copied()
1063 .unwrap_or_else(|| snapshot.indent_size_for_line(row_range.start));
1064 let delta = new_indent.len as i64 - original_indent_column as i64;
1065 if delta != 0 {
1066 for row in row_range.skip(1) {
1067 indent_sizes.entry(row).or_insert_with(|| {
1068 let mut size = snapshot.indent_size_for_line(row);
1069 if size.kind == new_indent.kind {
1070 match delta.cmp(&0) {
1071 Ordering::Greater => size.len += delta as u32,
1072 Ordering::Less => {
1073 size.len = size.len.saturating_sub(-delta as u32)
1074 }
1075 Ordering::Equal => {}
1076 }
1077 }
1078 size
1079 });
1080 }
1081 }
1082 }
1083 }
1084 }
1085
1086 indent_sizes
1087 })
1088 }
1089
1090 fn apply_autoindents(
1091 &mut self,
1092 indent_sizes: BTreeMap<u32, IndentSize>,
1093 cx: &mut ModelContext<Self>,
1094 ) {
1095 self.autoindent_requests.clear();
1096
1097 let edits: Vec<_> = indent_sizes
1098 .into_iter()
1099 .filter_map(|(row, indent_size)| {
1100 let current_size = indent_size_for_line(self, row);
1101 Self::edit_for_indent_size_adjustment(row, current_size, indent_size)
1102 })
1103 .collect();
1104
1105 self.edit(edits, None, cx);
1106 }
1107
1108 // Create a minimal edit that will cause the the given row to be indented
1109 // with the given size. After applying this edit, the length of the line
1110 // will always be at least `new_size.len`.
1111 pub fn edit_for_indent_size_adjustment(
1112 row: u32,
1113 current_size: IndentSize,
1114 new_size: IndentSize,
1115 ) -> Option<(Range<Point>, String)> {
1116 if new_size.kind != current_size.kind {
1117 Some((
1118 Point::new(row, 0)..Point::new(row, current_size.len),
1119 iter::repeat(new_size.char())
1120 .take(new_size.len as usize)
1121 .collect::<String>(),
1122 ))
1123 } else {
1124 match new_size.len.cmp(¤t_size.len) {
1125 Ordering::Greater => {
1126 let point = Point::new(row, 0);
1127 Some((
1128 point..point,
1129 iter::repeat(new_size.char())
1130 .take((new_size.len - current_size.len) as usize)
1131 .collect::<String>(),
1132 ))
1133 }
1134
1135 Ordering::Less => Some((
1136 Point::new(row, 0)..Point::new(row, current_size.len - new_size.len),
1137 String::new(),
1138 )),
1139
1140 Ordering::Equal => None,
1141 }
1142 }
1143 }
1144
1145 pub fn diff(&self, mut new_text: String, cx: &AppContext) -> Task<Diff> {
1146 let old_text = self.as_rope().clone();
1147 let base_version = self.version();
1148 cx.background().spawn(async move {
1149 let old_text = old_text.to_string();
1150 let line_ending = LineEnding::detect(&new_text);
1151 LineEnding::normalize(&mut new_text);
1152 let diff = TextDiff::from_chars(old_text.as_str(), new_text.as_str());
1153 let mut edits = Vec::new();
1154 let mut offset = 0;
1155 let empty: Arc<str> = "".into();
1156 for change in diff.iter_all_changes() {
1157 let value = change.value();
1158 let end_offset = offset + value.len();
1159 match change.tag() {
1160 ChangeTag::Equal => {
1161 offset = end_offset;
1162 }
1163 ChangeTag::Delete => {
1164 edits.push((offset..end_offset, empty.clone()));
1165 offset = end_offset;
1166 }
1167 ChangeTag::Insert => {
1168 edits.push((offset..offset, value.into()));
1169 }
1170 }
1171 }
1172 Diff {
1173 base_version,
1174 line_ending,
1175 edits,
1176 }
1177 })
1178 }
1179
1180 /// Spawn a background task that searches the buffer for any whitespace
1181 /// at the ends of a lines, and returns a `Diff` that removes that whitespace.
1182 pub fn remove_trailing_whitespace(&self, cx: &AppContext) -> Task<Diff> {
1183 let old_text = self.as_rope().clone();
1184 let line_ending = self.line_ending();
1185 let base_version = self.version();
1186 cx.background().spawn(async move {
1187 let ranges = trailing_whitespace_ranges(&old_text);
1188 let empty = Arc::<str>::from("");
1189 Diff {
1190 base_version,
1191 line_ending,
1192 edits: ranges
1193 .into_iter()
1194 .map(|range| (range, empty.clone()))
1195 .collect(),
1196 }
1197 })
1198 }
1199
1200 /// Ensure that the buffer ends with a single newline character, and
1201 /// no other whitespace.
1202 pub fn ensure_final_newline(&mut self, cx: &mut ModelContext<Self>) {
1203 let len = self.len();
1204 let mut offset = len;
1205 for chunk in self.as_rope().reversed_chunks_in_range(0..len) {
1206 let non_whitespace_len = chunk
1207 .trim_end_matches(|c: char| c.is_ascii_whitespace())
1208 .len();
1209 offset -= chunk.len();
1210 offset += non_whitespace_len;
1211 if non_whitespace_len != 0 {
1212 if offset == len - 1 && chunk.get(non_whitespace_len..) == Some("\n") {
1213 return;
1214 }
1215 break;
1216 }
1217 }
1218 self.edit([(offset..len, "\n")], None, cx);
1219 }
1220
1221 /// Apply a diff to the buffer. If the buffer has changed since the given diff was
1222 /// calculated, then adjust the diff to account for those changes, and discard any
1223 /// parts of the diff that conflict with those changes.
1224 pub fn apply_diff(&mut self, diff: Diff, cx: &mut ModelContext<Self>) -> Option<TransactionId> {
1225 // Check for any edits to the buffer that have occurred since this diff
1226 // was computed.
1227 let snapshot = self.snapshot();
1228 let mut edits_since = snapshot.edits_since::<usize>(&diff.base_version).peekable();
1229 let mut delta = 0;
1230 let adjusted_edits = diff.edits.into_iter().filter_map(|(range, new_text)| {
1231 while let Some(edit_since) = edits_since.peek() {
1232 // If the edit occurs after a diff hunk, then it does not
1233 // affect that hunk.
1234 if edit_since.old.start > range.end {
1235 break;
1236 }
1237 // If the edit precedes the diff hunk, then adjust the hunk
1238 // to reflect the edit.
1239 else if edit_since.old.end < range.start {
1240 delta += edit_since.new_len() as i64 - edit_since.old_len() as i64;
1241 edits_since.next();
1242 }
1243 // If the edit intersects a diff hunk, then discard that hunk.
1244 else {
1245 return None;
1246 }
1247 }
1248
1249 let start = (range.start as i64 + delta) as usize;
1250 let end = (range.end as i64 + delta) as usize;
1251 Some((start..end, new_text))
1252 });
1253
1254 self.start_transaction();
1255 self.text.set_line_ending(diff.line_ending);
1256 self.edit(adjusted_edits, None, cx);
1257 self.end_transaction(cx)
1258 }
1259
1260 pub fn is_dirty(&self) -> bool {
1261 self.saved_version_fingerprint != self.as_rope().fingerprint()
1262 || self.file.as_ref().map_or(false, |file| file.is_deleted())
1263 }
1264
1265 pub fn has_conflict(&self) -> bool {
1266 self.saved_version_fingerprint != self.as_rope().fingerprint()
1267 && self
1268 .file
1269 .as_ref()
1270 .map_or(false, |file| file.mtime() > self.saved_mtime)
1271 }
1272
1273 pub fn subscribe(&mut self) -> Subscription {
1274 self.text.subscribe()
1275 }
1276
1277 pub fn start_transaction(&mut self) -> Option<TransactionId> {
1278 self.start_transaction_at(Instant::now())
1279 }
1280
1281 pub fn start_transaction_at(&mut self, now: Instant) -> Option<TransactionId> {
1282 self.transaction_depth += 1;
1283 if self.was_dirty_before_starting_transaction.is_none() {
1284 self.was_dirty_before_starting_transaction = Some(self.is_dirty());
1285 }
1286 self.text.start_transaction_at(now)
1287 }
1288
1289 pub fn end_transaction(&mut self, cx: &mut ModelContext<Self>) -> Option<TransactionId> {
1290 self.end_transaction_at(Instant::now(), cx)
1291 }
1292
1293 pub fn end_transaction_at(
1294 &mut self,
1295 now: Instant,
1296 cx: &mut ModelContext<Self>,
1297 ) -> Option<TransactionId> {
1298 assert!(self.transaction_depth > 0);
1299 self.transaction_depth -= 1;
1300 let was_dirty = if self.transaction_depth == 0 {
1301 self.was_dirty_before_starting_transaction.take().unwrap()
1302 } else {
1303 false
1304 };
1305 if let Some((transaction_id, start_version)) = self.text.end_transaction_at(now) {
1306 self.did_edit(&start_version, was_dirty, cx);
1307 Some(transaction_id)
1308 } else {
1309 None
1310 }
1311 }
1312
1313 pub fn push_transaction(&mut self, transaction: Transaction, now: Instant) {
1314 self.text.push_transaction(transaction, now);
1315 }
1316
1317 pub fn finalize_last_transaction(&mut self) -> Option<&Transaction> {
1318 self.text.finalize_last_transaction()
1319 }
1320
1321 pub fn group_until_transaction(&mut self, transaction_id: TransactionId) {
1322 self.text.group_until_transaction(transaction_id);
1323 }
1324
1325 pub fn forget_transaction(&mut self, transaction_id: TransactionId) {
1326 self.text.forget_transaction(transaction_id);
1327 }
1328
1329 pub fn wait_for_edits(
1330 &mut self,
1331 edit_ids: impl IntoIterator<Item = clock::Local>,
1332 ) -> impl Future<Output = Result<()>> {
1333 self.text.wait_for_edits(edit_ids)
1334 }
1335
1336 pub fn wait_for_anchors(
1337 &mut self,
1338 anchors: impl IntoIterator<Item = Anchor>,
1339 ) -> impl 'static + Future<Output = Result<()>> {
1340 self.text.wait_for_anchors(anchors)
1341 }
1342
1343 pub fn wait_for_version(&mut self, version: clock::Global) -> impl Future<Output = Result<()>> {
1344 self.text.wait_for_version(version)
1345 }
1346
1347 pub fn give_up_waiting(&mut self) {
1348 self.text.give_up_waiting();
1349 }
1350
1351 pub fn set_active_selections(
1352 &mut self,
1353 selections: Arc<[Selection<Anchor>]>,
1354 line_mode: bool,
1355 cursor_shape: CursorShape,
1356 cx: &mut ModelContext<Self>,
1357 ) {
1358 let lamport_timestamp = self.text.lamport_clock.tick();
1359 self.remote_selections.insert(
1360 self.text.replica_id(),
1361 SelectionSet {
1362 selections: selections.clone(),
1363 lamport_timestamp,
1364 line_mode,
1365 cursor_shape,
1366 },
1367 );
1368 self.send_operation(
1369 Operation::UpdateSelections {
1370 selections,
1371 line_mode,
1372 lamport_timestamp,
1373 cursor_shape,
1374 },
1375 cx,
1376 );
1377 }
1378
1379 pub fn remove_active_selections(&mut self, cx: &mut ModelContext<Self>) {
1380 if self
1381 .remote_selections
1382 .get(&self.text.replica_id())
1383 .map_or(true, |set| !set.selections.is_empty())
1384 {
1385 self.set_active_selections(Arc::from([]), false, Default::default(), cx);
1386 }
1387 }
1388
1389 pub fn set_text<T>(&mut self, text: T, cx: &mut ModelContext<Self>) -> Option<clock::Local>
1390 where
1391 T: Into<Arc<str>>,
1392 {
1393 self.autoindent_requests.clear();
1394 self.edit([(0..self.len(), text)], None, cx)
1395 }
1396
1397 pub fn edit<I, S, T>(
1398 &mut self,
1399 edits_iter: I,
1400 autoindent_mode: Option<AutoindentMode>,
1401 cx: &mut ModelContext<Self>,
1402 ) -> Option<clock::Local>
1403 where
1404 I: IntoIterator<Item = (Range<S>, T)>,
1405 S: ToOffset,
1406 T: Into<Arc<str>>,
1407 {
1408 // Skip invalid edits and coalesce contiguous ones.
1409 let mut edits: Vec<(Range<usize>, Arc<str>)> = Vec::new();
1410 for (range, new_text) in edits_iter {
1411 let mut range = range.start.to_offset(self)..range.end.to_offset(self);
1412 if range.start > range.end {
1413 mem::swap(&mut range.start, &mut range.end);
1414 }
1415 let new_text = new_text.into();
1416 if !new_text.is_empty() || !range.is_empty() {
1417 if let Some((prev_range, prev_text)) = edits.last_mut() {
1418 if prev_range.end >= range.start {
1419 prev_range.end = cmp::max(prev_range.end, range.end);
1420 *prev_text = format!("{prev_text}{new_text}").into();
1421 } else {
1422 edits.push((range, new_text));
1423 }
1424 } else {
1425 edits.push((range, new_text));
1426 }
1427 }
1428 }
1429 if edits.is_empty() {
1430 return None;
1431 }
1432
1433 self.start_transaction();
1434 self.pending_autoindent.take();
1435 let autoindent_request = autoindent_mode
1436 .and_then(|mode| self.language.as_ref().map(|_| (self.snapshot(), mode)));
1437
1438 let edit_operation = self.text.edit(edits.iter().cloned());
1439 let edit_id = edit_operation.local_timestamp();
1440
1441 if let Some((before_edit, mode)) = autoindent_request {
1442 let mut delta = 0isize;
1443 let entries = edits
1444 .into_iter()
1445 .enumerate()
1446 .zip(&edit_operation.as_edit().unwrap().new_text)
1447 .map(|((ix, (range, _)), new_text)| {
1448 let new_text_length = new_text.len();
1449 let old_start = range.start.to_point(&before_edit);
1450 let new_start = (delta + range.start as isize) as usize;
1451 delta += new_text_length as isize - (range.end as isize - range.start as isize);
1452
1453 let mut range_of_insertion_to_indent = 0..new_text_length;
1454 let mut first_line_is_new = false;
1455 let mut original_indent_column = None;
1456
1457 // When inserting an entire line at the beginning of an existing line,
1458 // treat the insertion as new.
1459 if new_text.contains('\n')
1460 && old_start.column <= before_edit.indent_size_for_line(old_start.row).len
1461 {
1462 first_line_is_new = true;
1463 }
1464
1465 // When inserting text starting with a newline, avoid auto-indenting the
1466 // previous line.
1467 if new_text.starts_with('\n') {
1468 range_of_insertion_to_indent.start += 1;
1469 first_line_is_new = true;
1470 }
1471
1472 // Avoid auto-indenting after the insertion.
1473 if let AutoindentMode::Block {
1474 original_indent_columns,
1475 } = &mode
1476 {
1477 original_indent_column =
1478 Some(original_indent_columns.get(ix).copied().unwrap_or_else(|| {
1479 indent_size_for_text(
1480 new_text[range_of_insertion_to_indent.clone()].chars(),
1481 )
1482 .len
1483 }));
1484 if new_text[range_of_insertion_to_indent.clone()].ends_with('\n') {
1485 range_of_insertion_to_indent.end -= 1;
1486 }
1487 }
1488
1489 AutoindentRequestEntry {
1490 first_line_is_new,
1491 original_indent_column,
1492 indent_size: before_edit.language_indent_size_at(range.start, cx),
1493 range: self.anchor_before(new_start + range_of_insertion_to_indent.start)
1494 ..self.anchor_after(new_start + range_of_insertion_to_indent.end),
1495 }
1496 })
1497 .collect();
1498
1499 self.autoindent_requests.push(Arc::new(AutoindentRequest {
1500 before_edit,
1501 entries,
1502 is_block_mode: matches!(mode, AutoindentMode::Block { .. }),
1503 }));
1504 }
1505
1506 self.end_transaction(cx);
1507 self.send_operation(Operation::Buffer(edit_operation), cx);
1508 Some(edit_id)
1509 }
1510
1511 fn did_edit(
1512 &mut self,
1513 old_version: &clock::Global,
1514 was_dirty: bool,
1515 cx: &mut ModelContext<Self>,
1516 ) {
1517 if self.edits_since::<usize>(old_version).next().is_none() {
1518 return;
1519 }
1520
1521 self.reparse(cx);
1522
1523 cx.emit(Event::Edited);
1524 if was_dirty != self.is_dirty() {
1525 cx.emit(Event::DirtyChanged);
1526 }
1527 cx.notify();
1528 }
1529
1530 pub fn apply_ops<I: IntoIterator<Item = Operation>>(
1531 &mut self,
1532 ops: I,
1533 cx: &mut ModelContext<Self>,
1534 ) -> Result<()> {
1535 self.pending_autoindent.take();
1536 let was_dirty = self.is_dirty();
1537 let old_version = self.version.clone();
1538 let mut deferred_ops = Vec::new();
1539 let buffer_ops = ops
1540 .into_iter()
1541 .filter_map(|op| match op {
1542 Operation::Buffer(op) => Some(op),
1543 _ => {
1544 if self.can_apply_op(&op) {
1545 self.apply_op(op, cx);
1546 } else {
1547 deferred_ops.push(op);
1548 }
1549 None
1550 }
1551 })
1552 .collect::<Vec<_>>();
1553 self.text.apply_ops(buffer_ops)?;
1554 self.deferred_ops.insert(deferred_ops);
1555 self.flush_deferred_ops(cx);
1556 self.did_edit(&old_version, was_dirty, cx);
1557 // Notify independently of whether the buffer was edited as the operations could include a
1558 // selection update.
1559 cx.notify();
1560 Ok(())
1561 }
1562
1563 fn flush_deferred_ops(&mut self, cx: &mut ModelContext<Self>) {
1564 let mut deferred_ops = Vec::new();
1565 for op in self.deferred_ops.drain().iter().cloned() {
1566 if self.can_apply_op(&op) {
1567 self.apply_op(op, cx);
1568 } else {
1569 deferred_ops.push(op);
1570 }
1571 }
1572 self.deferred_ops.insert(deferred_ops);
1573 }
1574
1575 fn can_apply_op(&self, operation: &Operation) -> bool {
1576 match operation {
1577 Operation::Buffer(_) => {
1578 unreachable!("buffer operations should never be applied at this layer")
1579 }
1580 Operation::UpdateDiagnostics {
1581 diagnostics: diagnostic_set,
1582 ..
1583 } => diagnostic_set.iter().all(|diagnostic| {
1584 self.text.can_resolve(&diagnostic.range.start)
1585 && self.text.can_resolve(&diagnostic.range.end)
1586 }),
1587 Operation::UpdateSelections { selections, .. } => selections
1588 .iter()
1589 .all(|s| self.can_resolve(&s.start) && self.can_resolve(&s.end)),
1590 Operation::UpdateCompletionTriggers { .. } => true,
1591 }
1592 }
1593
1594 fn apply_op(&mut self, operation: Operation, cx: &mut ModelContext<Self>) {
1595 match operation {
1596 Operation::Buffer(_) => {
1597 unreachable!("buffer operations should never be applied at this layer")
1598 }
1599 Operation::UpdateDiagnostics {
1600 server_id,
1601 diagnostics: diagnostic_set,
1602 lamport_timestamp,
1603 } => {
1604 let snapshot = self.snapshot();
1605 self.apply_diagnostic_update(
1606 server_id,
1607 DiagnosticSet::from_sorted_entries(diagnostic_set.iter().cloned(), &snapshot),
1608 lamport_timestamp,
1609 cx,
1610 );
1611 }
1612 Operation::UpdateSelections {
1613 selections,
1614 lamport_timestamp,
1615 line_mode,
1616 cursor_shape,
1617 } => {
1618 if let Some(set) = self.remote_selections.get(&lamport_timestamp.replica_id) {
1619 if set.lamport_timestamp > lamport_timestamp {
1620 return;
1621 }
1622 }
1623
1624 self.remote_selections.insert(
1625 lamport_timestamp.replica_id,
1626 SelectionSet {
1627 selections,
1628 lamport_timestamp,
1629 line_mode,
1630 cursor_shape,
1631 },
1632 );
1633 self.text.lamport_clock.observe(lamport_timestamp);
1634 self.selections_update_count += 1;
1635 }
1636 Operation::UpdateCompletionTriggers {
1637 triggers,
1638 lamport_timestamp,
1639 } => {
1640 self.completion_triggers = triggers;
1641 self.text.lamport_clock.observe(lamport_timestamp);
1642 }
1643 }
1644 }
1645
1646 fn apply_diagnostic_update(
1647 &mut self,
1648 server_id: usize,
1649 diagnostics: DiagnosticSet,
1650 lamport_timestamp: clock::Lamport,
1651 cx: &mut ModelContext<Self>,
1652 ) {
1653 if lamport_timestamp > self.diagnostics_timestamp {
1654 self.diagnostics.insert(server_id, diagnostics);
1655 self.diagnostics_timestamp = lamport_timestamp;
1656 self.diagnostics_update_count += 1;
1657 self.text.lamport_clock.observe(lamport_timestamp);
1658 cx.notify();
1659 cx.emit(Event::DiagnosticsUpdated);
1660 }
1661 }
1662
1663 fn send_operation(&mut self, operation: Operation, cx: &mut ModelContext<Self>) {
1664 cx.emit(Event::Operation(operation));
1665 }
1666
1667 pub fn remove_peer(&mut self, replica_id: ReplicaId, cx: &mut ModelContext<Self>) {
1668 self.remote_selections.remove(&replica_id);
1669 cx.notify();
1670 }
1671
1672 pub fn undo(&mut self, cx: &mut ModelContext<Self>) -> Option<TransactionId> {
1673 let was_dirty = self.is_dirty();
1674 let old_version = self.version.clone();
1675
1676 if let Some((transaction_id, operation)) = self.text.undo() {
1677 self.send_operation(Operation::Buffer(operation), cx);
1678 self.did_edit(&old_version, was_dirty, cx);
1679 Some(transaction_id)
1680 } else {
1681 None
1682 }
1683 }
1684
1685 pub fn undo_to_transaction(
1686 &mut self,
1687 transaction_id: TransactionId,
1688 cx: &mut ModelContext<Self>,
1689 ) -> bool {
1690 let was_dirty = self.is_dirty();
1691 let old_version = self.version.clone();
1692
1693 let operations = self.text.undo_to_transaction(transaction_id);
1694 let undone = !operations.is_empty();
1695 for operation in operations {
1696 self.send_operation(Operation::Buffer(operation), cx);
1697 }
1698 if undone {
1699 self.did_edit(&old_version, was_dirty, cx)
1700 }
1701 undone
1702 }
1703
1704 pub fn redo(&mut self, cx: &mut ModelContext<Self>) -> Option<TransactionId> {
1705 let was_dirty = self.is_dirty();
1706 let old_version = self.version.clone();
1707
1708 if let Some((transaction_id, operation)) = self.text.redo() {
1709 self.send_operation(Operation::Buffer(operation), cx);
1710 self.did_edit(&old_version, was_dirty, cx);
1711 Some(transaction_id)
1712 } else {
1713 None
1714 }
1715 }
1716
1717 pub fn redo_to_transaction(
1718 &mut self,
1719 transaction_id: TransactionId,
1720 cx: &mut ModelContext<Self>,
1721 ) -> bool {
1722 let was_dirty = self.is_dirty();
1723 let old_version = self.version.clone();
1724
1725 let operations = self.text.redo_to_transaction(transaction_id);
1726 let redone = !operations.is_empty();
1727 for operation in operations {
1728 self.send_operation(Operation::Buffer(operation), cx);
1729 }
1730 if redone {
1731 self.did_edit(&old_version, was_dirty, cx)
1732 }
1733 redone
1734 }
1735
1736 pub fn set_completion_triggers(&mut self, triggers: Vec<String>, cx: &mut ModelContext<Self>) {
1737 self.completion_triggers = triggers.clone();
1738 self.completion_triggers_timestamp = self.text.lamport_clock.tick();
1739 self.send_operation(
1740 Operation::UpdateCompletionTriggers {
1741 triggers,
1742 lamport_timestamp: self.completion_triggers_timestamp,
1743 },
1744 cx,
1745 );
1746 cx.notify();
1747 }
1748
1749 pub fn completion_triggers(&self) -> &[String] {
1750 &self.completion_triggers
1751 }
1752}
1753
1754#[cfg(any(test, feature = "test-support"))]
1755impl Buffer {
1756 pub fn edit_via_marked_text(
1757 &mut self,
1758 marked_string: &str,
1759 autoindent_mode: Option<AutoindentMode>,
1760 cx: &mut ModelContext<Self>,
1761 ) {
1762 let edits = self.edits_for_marked_text(marked_string);
1763 self.edit(edits, autoindent_mode, cx);
1764 }
1765
1766 pub fn set_group_interval(&mut self, group_interval: Duration) {
1767 self.text.set_group_interval(group_interval);
1768 }
1769
1770 pub fn randomly_edit<T>(
1771 &mut self,
1772 rng: &mut T,
1773 old_range_count: usize,
1774 cx: &mut ModelContext<Self>,
1775 ) where
1776 T: rand::Rng,
1777 {
1778 let mut edits: Vec<(Range<usize>, String)> = Vec::new();
1779 let mut last_end = None;
1780 for _ in 0..old_range_count {
1781 if last_end.map_or(false, |last_end| last_end >= self.len()) {
1782 break;
1783 }
1784
1785 let new_start = last_end.map_or(0, |last_end| last_end + 1);
1786 let mut range = self.random_byte_range(new_start, rng);
1787 if rng.gen_bool(0.2) {
1788 mem::swap(&mut range.start, &mut range.end);
1789 }
1790 last_end = Some(range.end);
1791
1792 let new_text_len = rng.gen_range(0..10);
1793 let new_text: String = RandomCharIter::new(&mut *rng).take(new_text_len).collect();
1794
1795 edits.push((range, new_text));
1796 }
1797 log::info!("mutating buffer {} with {:?}", self.replica_id(), edits);
1798 self.edit(edits, None, cx);
1799 }
1800
1801 pub fn randomly_undo_redo(&mut self, rng: &mut impl rand::Rng, cx: &mut ModelContext<Self>) {
1802 let was_dirty = self.is_dirty();
1803 let old_version = self.version.clone();
1804
1805 let ops = self.text.randomly_undo_redo(rng);
1806 if !ops.is_empty() {
1807 for op in ops {
1808 self.send_operation(Operation::Buffer(op), cx);
1809 self.did_edit(&old_version, was_dirty, cx);
1810 }
1811 }
1812 }
1813}
1814
1815impl Entity for Buffer {
1816 type Event = Event;
1817}
1818
1819impl Deref for Buffer {
1820 type Target = TextBuffer;
1821
1822 fn deref(&self) -> &Self::Target {
1823 &self.text
1824 }
1825}
1826
1827impl BufferSnapshot {
1828 pub fn indent_size_for_line(&self, row: u32) -> IndentSize {
1829 indent_size_for_line(self, row)
1830 }
1831
1832 pub fn language_indent_size_at<T: ToOffset>(&self, position: T, cx: &AppContext) -> IndentSize {
1833 let language_name = self.language_at(position).map(|language| language.name());
1834 let settings = cx.global::<Settings>();
1835 if settings.hard_tabs(language_name.as_deref()) {
1836 IndentSize::tab()
1837 } else {
1838 IndentSize::spaces(settings.tab_size(language_name.as_deref()).get())
1839 }
1840 }
1841
1842 pub fn suggested_indents(
1843 &self,
1844 rows: impl Iterator<Item = u32>,
1845 single_indent_size: IndentSize,
1846 ) -> BTreeMap<u32, IndentSize> {
1847 let mut result = BTreeMap::new();
1848
1849 for row_range in contiguous_ranges(rows, 10) {
1850 let suggestions = match self.suggest_autoindents(row_range.clone()) {
1851 Some(suggestions) => suggestions,
1852 _ => break,
1853 };
1854
1855 for (row, suggestion) in row_range.zip(suggestions) {
1856 let indent_size = if let Some(suggestion) = suggestion {
1857 result
1858 .get(&suggestion.basis_row)
1859 .copied()
1860 .unwrap_or_else(|| self.indent_size_for_line(suggestion.basis_row))
1861 .with_delta(suggestion.delta, single_indent_size)
1862 } else {
1863 self.indent_size_for_line(row)
1864 };
1865
1866 result.insert(row, indent_size);
1867 }
1868 }
1869
1870 result
1871 }
1872
1873 fn suggest_autoindents(
1874 &self,
1875 row_range: Range<u32>,
1876 ) -> Option<impl Iterator<Item = Option<IndentSuggestion>> + '_> {
1877 let config = &self.language.as_ref()?.config;
1878 let prev_non_blank_row = self.prev_non_blank_row(row_range.start);
1879
1880 // Find the suggested indentation ranges based on the syntax tree.
1881 let start = Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0);
1882 let end = Point::new(row_range.end, 0);
1883 let range = (start..end).to_offset(&self.text);
1884 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
1885 Some(&grammar.indents_config.as_ref()?.query)
1886 });
1887 let indent_configs = matches
1888 .grammars()
1889 .iter()
1890 .map(|grammar| grammar.indents_config.as_ref().unwrap())
1891 .collect::<Vec<_>>();
1892
1893 let mut indent_ranges = Vec::<Range<Point>>::new();
1894 let mut outdent_positions = Vec::<Point>::new();
1895 while let Some(mat) = matches.peek() {
1896 let mut start: Option<Point> = None;
1897 let mut end: Option<Point> = None;
1898
1899 let config = &indent_configs[mat.grammar_index];
1900 for capture in mat.captures {
1901 if capture.index == config.indent_capture_ix {
1902 start.get_or_insert(Point::from_ts_point(capture.node.start_position()));
1903 end.get_or_insert(Point::from_ts_point(capture.node.end_position()));
1904 } else if Some(capture.index) == config.start_capture_ix {
1905 start = Some(Point::from_ts_point(capture.node.end_position()));
1906 } else if Some(capture.index) == config.end_capture_ix {
1907 end = Some(Point::from_ts_point(capture.node.start_position()));
1908 } else if Some(capture.index) == config.outdent_capture_ix {
1909 outdent_positions.push(Point::from_ts_point(capture.node.start_position()));
1910 }
1911 }
1912
1913 matches.advance();
1914 if let Some((start, end)) = start.zip(end) {
1915 if start.row == end.row {
1916 continue;
1917 }
1918
1919 let range = start..end;
1920 match indent_ranges.binary_search_by_key(&range.start, |r| r.start) {
1921 Err(ix) => indent_ranges.insert(ix, range),
1922 Ok(ix) => {
1923 let prev_range = &mut indent_ranges[ix];
1924 prev_range.end = prev_range.end.max(range.end);
1925 }
1926 }
1927 }
1928 }
1929
1930 let mut error_ranges = Vec::<Range<Point>>::new();
1931 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
1932 Some(&grammar.error_query)
1933 });
1934 while let Some(mat) = matches.peek() {
1935 let node = mat.captures[0].node;
1936 let start = Point::from_ts_point(node.start_position());
1937 let end = Point::from_ts_point(node.end_position());
1938 let range = start..end;
1939 let ix = match error_ranges.binary_search_by_key(&range.start, |r| r.start) {
1940 Ok(ix) | Err(ix) => ix,
1941 };
1942 let mut end_ix = ix;
1943 while let Some(existing_range) = error_ranges.get(end_ix) {
1944 if existing_range.end < end {
1945 end_ix += 1;
1946 } else {
1947 break;
1948 }
1949 }
1950 error_ranges.splice(ix..end_ix, [range]);
1951 matches.advance();
1952 }
1953
1954 outdent_positions.sort();
1955 for outdent_position in outdent_positions {
1956 // find the innermost indent range containing this outdent_position
1957 // set its end to the outdent position
1958 if let Some(range_to_truncate) = indent_ranges
1959 .iter_mut()
1960 .filter(|indent_range| indent_range.contains(&outdent_position))
1961 .last()
1962 {
1963 range_to_truncate.end = outdent_position;
1964 }
1965 }
1966
1967 // Find the suggested indentation increases and decreased based on regexes.
1968 let mut indent_change_rows = Vec::<(u32, Ordering)>::new();
1969 self.for_each_line(
1970 Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0)
1971 ..Point::new(row_range.end, 0),
1972 |row, line| {
1973 if config
1974 .decrease_indent_pattern
1975 .as_ref()
1976 .map_or(false, |regex| regex.is_match(line))
1977 {
1978 indent_change_rows.push((row, Ordering::Less));
1979 }
1980 if config
1981 .increase_indent_pattern
1982 .as_ref()
1983 .map_or(false, |regex| regex.is_match(line))
1984 {
1985 indent_change_rows.push((row + 1, Ordering::Greater));
1986 }
1987 },
1988 );
1989
1990 let mut indent_changes = indent_change_rows.into_iter().peekable();
1991 let mut prev_row = if config.auto_indent_using_last_non_empty_line {
1992 prev_non_blank_row.unwrap_or(0)
1993 } else {
1994 row_range.start.saturating_sub(1)
1995 };
1996 let mut prev_row_start = Point::new(prev_row, self.indent_size_for_line(prev_row).len);
1997 Some(row_range.map(move |row| {
1998 let row_start = Point::new(row, self.indent_size_for_line(row).len);
1999
2000 let mut indent_from_prev_row = false;
2001 let mut outdent_from_prev_row = false;
2002 let mut outdent_to_row = u32::MAX;
2003
2004 while let Some((indent_row, delta)) = indent_changes.peek() {
2005 match indent_row.cmp(&row) {
2006 Ordering::Equal => match delta {
2007 Ordering::Less => outdent_from_prev_row = true,
2008 Ordering::Greater => indent_from_prev_row = true,
2009 _ => {}
2010 },
2011
2012 Ordering::Greater => break,
2013 Ordering::Less => {}
2014 }
2015
2016 indent_changes.next();
2017 }
2018
2019 for range in &indent_ranges {
2020 if range.start.row >= row {
2021 break;
2022 }
2023 if range.start.row == prev_row && range.end > row_start {
2024 indent_from_prev_row = true;
2025 }
2026 if range.end > prev_row_start && range.end <= row_start {
2027 outdent_to_row = outdent_to_row.min(range.start.row);
2028 }
2029 }
2030
2031 let within_error = error_ranges
2032 .iter()
2033 .any(|e| e.start.row < row && e.end > row_start);
2034
2035 let suggestion = if outdent_to_row == prev_row
2036 || (outdent_from_prev_row && indent_from_prev_row)
2037 {
2038 Some(IndentSuggestion {
2039 basis_row: prev_row,
2040 delta: Ordering::Equal,
2041 within_error,
2042 })
2043 } else if indent_from_prev_row {
2044 Some(IndentSuggestion {
2045 basis_row: prev_row,
2046 delta: Ordering::Greater,
2047 within_error,
2048 })
2049 } else if outdent_to_row < prev_row {
2050 Some(IndentSuggestion {
2051 basis_row: outdent_to_row,
2052 delta: Ordering::Equal,
2053 within_error,
2054 })
2055 } else if outdent_from_prev_row {
2056 Some(IndentSuggestion {
2057 basis_row: prev_row,
2058 delta: Ordering::Less,
2059 within_error,
2060 })
2061 } else if config.auto_indent_using_last_non_empty_line || !self.is_line_blank(prev_row)
2062 {
2063 Some(IndentSuggestion {
2064 basis_row: prev_row,
2065 delta: Ordering::Equal,
2066 within_error,
2067 })
2068 } else {
2069 None
2070 };
2071
2072 prev_row = row;
2073 prev_row_start = row_start;
2074 suggestion
2075 }))
2076 }
2077
2078 fn prev_non_blank_row(&self, mut row: u32) -> Option<u32> {
2079 while row > 0 {
2080 row -= 1;
2081 if !self.is_line_blank(row) {
2082 return Some(row);
2083 }
2084 }
2085 None
2086 }
2087
2088 pub fn chunks<T: ToOffset>(&self, range: Range<T>, language_aware: bool) -> BufferChunks {
2089 let range = range.start.to_offset(self)..range.end.to_offset(self);
2090
2091 let mut syntax = None;
2092 let mut diagnostic_endpoints = Vec::new();
2093 if language_aware {
2094 let captures = self.syntax.captures(range.clone(), &self.text, |grammar| {
2095 grammar.highlights_query.as_ref()
2096 });
2097 let highlight_maps = captures
2098 .grammars()
2099 .into_iter()
2100 .map(|grammar| grammar.highlight_map())
2101 .collect();
2102 syntax = Some((captures, highlight_maps));
2103 for entry in self.diagnostics_in_range::<_, usize>(range.clone(), false) {
2104 diagnostic_endpoints.push(DiagnosticEndpoint {
2105 offset: entry.range.start,
2106 is_start: true,
2107 severity: entry.diagnostic.severity,
2108 is_unnecessary: entry.diagnostic.is_unnecessary,
2109 });
2110 diagnostic_endpoints.push(DiagnosticEndpoint {
2111 offset: entry.range.end,
2112 is_start: false,
2113 severity: entry.diagnostic.severity,
2114 is_unnecessary: entry.diagnostic.is_unnecessary,
2115 });
2116 }
2117 diagnostic_endpoints
2118 .sort_unstable_by_key(|endpoint| (endpoint.offset, !endpoint.is_start));
2119 }
2120
2121 BufferChunks::new(self.text.as_rope(), range, syntax, diagnostic_endpoints)
2122 }
2123
2124 pub fn for_each_line(&self, range: Range<Point>, mut callback: impl FnMut(u32, &str)) {
2125 let mut line = String::new();
2126 let mut row = range.start.row;
2127 for chunk in self
2128 .as_rope()
2129 .chunks_in_range(range.to_offset(self))
2130 .chain(["\n"])
2131 {
2132 for (newline_ix, text) in chunk.split('\n').enumerate() {
2133 if newline_ix > 0 {
2134 callback(row, &line);
2135 row += 1;
2136 line.clear();
2137 }
2138 line.push_str(text);
2139 }
2140 }
2141 }
2142
2143 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<&Arc<Language>> {
2144 let offset = position.to_offset(self);
2145 self.syntax
2146 .layers_for_range(offset..offset, &self.text)
2147 .filter(|l| l.node.end_byte() > offset)
2148 .last()
2149 .map(|info| info.language)
2150 .or(self.language.as_ref())
2151 }
2152
2153 pub fn language_scope_at<D: ToOffset>(&self, position: D) -> Option<LanguageScope> {
2154 let offset = position.to_offset(self);
2155
2156 if let Some(layer_info) = self
2157 .syntax
2158 .layers_for_range(offset..offset, &self.text)
2159 .filter(|l| l.node.end_byte() > offset)
2160 .last()
2161 {
2162 Some(LanguageScope {
2163 language: layer_info.language.clone(),
2164 override_id: layer_info.override_id(offset, &self.text),
2165 })
2166 } else {
2167 self.language.clone().map(|language| LanguageScope {
2168 language,
2169 override_id: None,
2170 })
2171 }
2172 }
2173
2174 pub fn surrounding_word<T: ToOffset>(&self, start: T) -> (Range<usize>, Option<CharKind>) {
2175 let mut start = start.to_offset(self);
2176 let mut end = start;
2177 let mut next_chars = self.chars_at(start).peekable();
2178 let mut prev_chars = self.reversed_chars_at(start).peekable();
2179 let word_kind = cmp::max(
2180 prev_chars.peek().copied().map(char_kind),
2181 next_chars.peek().copied().map(char_kind),
2182 );
2183
2184 for ch in prev_chars {
2185 if Some(char_kind(ch)) == word_kind && ch != '\n' {
2186 start -= ch.len_utf8();
2187 } else {
2188 break;
2189 }
2190 }
2191
2192 for ch in next_chars {
2193 if Some(char_kind(ch)) == word_kind && ch != '\n' {
2194 end += ch.len_utf8();
2195 } else {
2196 break;
2197 }
2198 }
2199
2200 (start..end, word_kind)
2201 }
2202
2203 pub fn range_for_syntax_ancestor<T: ToOffset>(&self, range: Range<T>) -> Option<Range<usize>> {
2204 let range = range.start.to_offset(self)..range.end.to_offset(self);
2205 let mut result: Option<Range<usize>> = None;
2206 'outer: for layer in self.syntax.layers_for_range(range.clone(), &self.text) {
2207 let mut cursor = layer.node.walk();
2208
2209 // Descend to the first leaf that touches the start of the range,
2210 // and if the range is non-empty, extends beyond the start.
2211 while cursor.goto_first_child_for_byte(range.start).is_some() {
2212 if !range.is_empty() && cursor.node().end_byte() == range.start {
2213 cursor.goto_next_sibling();
2214 }
2215 }
2216
2217 // Ascend to the smallest ancestor that strictly contains the range.
2218 loop {
2219 let node_range = cursor.node().byte_range();
2220 if node_range.start <= range.start
2221 && node_range.end >= range.end
2222 && node_range.len() > range.len()
2223 {
2224 break;
2225 }
2226 if !cursor.goto_parent() {
2227 continue 'outer;
2228 }
2229 }
2230
2231 let left_node = cursor.node();
2232 let mut layer_result = left_node.byte_range();
2233
2234 // For an empty range, try to find another node immediately to the right of the range.
2235 if left_node.end_byte() == range.start {
2236 let mut right_node = None;
2237 while !cursor.goto_next_sibling() {
2238 if !cursor.goto_parent() {
2239 break;
2240 }
2241 }
2242
2243 while cursor.node().start_byte() == range.start {
2244 right_node = Some(cursor.node());
2245 if !cursor.goto_first_child() {
2246 break;
2247 }
2248 }
2249
2250 // If there is a candidate node on both sides of the (empty) range, then
2251 // decide between the two by favoring a named node over an anonymous token.
2252 // If both nodes are the same in that regard, favor the right one.
2253 if let Some(right_node) = right_node {
2254 if right_node.is_named() || !left_node.is_named() {
2255 layer_result = right_node.byte_range();
2256 }
2257 }
2258 }
2259
2260 if let Some(previous_result) = &result {
2261 if previous_result.len() < layer_result.len() {
2262 continue;
2263 }
2264 }
2265 result = Some(layer_result);
2266 }
2267
2268 result
2269 }
2270
2271 pub fn outline(&self, theme: Option<&SyntaxTheme>) -> Option<Outline<Anchor>> {
2272 self.outline_items_containing(0..self.len(), theme)
2273 .map(Outline::new)
2274 }
2275
2276 pub fn symbols_containing<T: ToOffset>(
2277 &self,
2278 position: T,
2279 theme: Option<&SyntaxTheme>,
2280 ) -> Option<Vec<OutlineItem<Anchor>>> {
2281 let position = position.to_offset(self);
2282 let mut items = self.outline_items_containing(
2283 position.saturating_sub(1)..self.len().min(position + 1),
2284 theme,
2285 )?;
2286 let mut prev_depth = None;
2287 items.retain(|item| {
2288 let result = prev_depth.map_or(true, |prev_depth| item.depth > prev_depth);
2289 prev_depth = Some(item.depth);
2290 result
2291 });
2292 Some(items)
2293 }
2294
2295 fn outline_items_containing(
2296 &self,
2297 range: Range<usize>,
2298 theme: Option<&SyntaxTheme>,
2299 ) -> Option<Vec<OutlineItem<Anchor>>> {
2300 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
2301 grammar.outline_config.as_ref().map(|c| &c.query)
2302 });
2303 let configs = matches
2304 .grammars()
2305 .iter()
2306 .map(|g| g.outline_config.as_ref().unwrap())
2307 .collect::<Vec<_>>();
2308
2309 let mut stack = Vec::<Range<usize>>::new();
2310 let mut items = Vec::new();
2311 while let Some(mat) = matches.peek() {
2312 let config = &configs[mat.grammar_index];
2313 let item_node = mat.captures.iter().find_map(|cap| {
2314 if cap.index == config.item_capture_ix {
2315 Some(cap.node)
2316 } else {
2317 None
2318 }
2319 })?;
2320
2321 let item_range = item_node.byte_range();
2322 if item_range.end < range.start || item_range.start > range.end {
2323 matches.advance();
2324 continue;
2325 }
2326
2327 let mut buffer_ranges = Vec::new();
2328 for capture in mat.captures {
2329 let node_is_name;
2330 if capture.index == config.name_capture_ix {
2331 node_is_name = true;
2332 } else if Some(capture.index) == config.context_capture_ix {
2333 node_is_name = false;
2334 } else {
2335 continue;
2336 }
2337
2338 let mut range = capture.node.start_byte()..capture.node.end_byte();
2339 let start = capture.node.start_position();
2340 if capture.node.end_position().row > start.row {
2341 range.end =
2342 range.start + self.line_len(start.row as u32) as usize - start.column;
2343 }
2344
2345 buffer_ranges.push((range, node_is_name));
2346 }
2347
2348 if buffer_ranges.is_empty() {
2349 continue;
2350 }
2351
2352 let mut text = String::new();
2353 let mut highlight_ranges = Vec::new();
2354 let mut name_ranges = Vec::new();
2355 let mut chunks = self.chunks(
2356 buffer_ranges.first().unwrap().0.start..buffer_ranges.last().unwrap().0.end,
2357 true,
2358 );
2359 for (buffer_range, is_name) in buffer_ranges {
2360 if !text.is_empty() {
2361 text.push(' ');
2362 }
2363 if is_name {
2364 let mut start = text.len();
2365 let end = start + buffer_range.len();
2366
2367 // When multiple names are captured, then the matcheable text
2368 // includes the whitespace in between the names.
2369 if !name_ranges.is_empty() {
2370 start -= 1;
2371 }
2372
2373 name_ranges.push(start..end);
2374 }
2375
2376 let mut offset = buffer_range.start;
2377 chunks.seek(offset);
2378 for mut chunk in chunks.by_ref() {
2379 if chunk.text.len() > buffer_range.end - offset {
2380 chunk.text = &chunk.text[0..(buffer_range.end - offset)];
2381 offset = buffer_range.end;
2382 } else {
2383 offset += chunk.text.len();
2384 }
2385 let style = chunk
2386 .syntax_highlight_id
2387 .zip(theme)
2388 .and_then(|(highlight, theme)| highlight.style(theme));
2389 if let Some(style) = style {
2390 let start = text.len();
2391 let end = start + chunk.text.len();
2392 highlight_ranges.push((start..end, style));
2393 }
2394 text.push_str(chunk.text);
2395 if offset >= buffer_range.end {
2396 break;
2397 }
2398 }
2399 }
2400
2401 matches.advance();
2402 while stack.last().map_or(false, |prev_range| {
2403 prev_range.start > item_range.start || prev_range.end < item_range.end
2404 }) {
2405 stack.pop();
2406 }
2407 stack.push(item_range.clone());
2408
2409 items.push(OutlineItem {
2410 depth: stack.len() - 1,
2411 range: self.anchor_after(item_range.start)..self.anchor_before(item_range.end),
2412 text,
2413 highlight_ranges,
2414 name_ranges,
2415 })
2416 }
2417 Some(items)
2418 }
2419
2420 /// Returns bracket range pairs overlapping or adjacent to `range`
2421 pub fn bracket_ranges<'a, T: ToOffset>(
2422 &'a self,
2423 range: Range<T>,
2424 ) -> impl Iterator<Item = (Range<usize>, Range<usize>)> + 'a {
2425 // Find bracket pairs that *inclusively* contain the given range.
2426 let range = range.start.to_offset(self).saturating_sub(1)
2427 ..self.len().min(range.end.to_offset(self) + 1);
2428
2429 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
2430 grammar.brackets_config.as_ref().map(|c| &c.query)
2431 });
2432 let configs = matches
2433 .grammars()
2434 .iter()
2435 .map(|grammar| grammar.brackets_config.as_ref().unwrap())
2436 .collect::<Vec<_>>();
2437
2438 iter::from_fn(move || {
2439 while let Some(mat) = matches.peek() {
2440 let mut open = None;
2441 let mut close = None;
2442 let config = &configs[mat.grammar_index];
2443 for capture in mat.captures {
2444 if capture.index == config.open_capture_ix {
2445 open = Some(capture.node.byte_range());
2446 } else if capture.index == config.close_capture_ix {
2447 close = Some(capture.node.byte_range());
2448 }
2449 }
2450
2451 matches.advance();
2452
2453 let Some((open, close)) = open.zip(close) else { continue };
2454
2455 let bracket_range = open.start..=close.end;
2456 if !bracket_range.overlaps(&range) {
2457 continue;
2458 }
2459
2460 return Some((open, close));
2461 }
2462 None
2463 })
2464 }
2465
2466 #[allow(clippy::type_complexity)]
2467 pub fn remote_selections_in_range(
2468 &self,
2469 range: Range<Anchor>,
2470 ) -> impl Iterator<
2471 Item = (
2472 ReplicaId,
2473 bool,
2474 CursorShape,
2475 impl Iterator<Item = &Selection<Anchor>> + '_,
2476 ),
2477 > + '_ {
2478 self.remote_selections
2479 .iter()
2480 .filter(|(replica_id, set)| {
2481 **replica_id != self.text.replica_id() && !set.selections.is_empty()
2482 })
2483 .map(move |(replica_id, set)| {
2484 let start_ix = match set.selections.binary_search_by(|probe| {
2485 probe.end.cmp(&range.start, self).then(Ordering::Greater)
2486 }) {
2487 Ok(ix) | Err(ix) => ix,
2488 };
2489 let end_ix = match set.selections.binary_search_by(|probe| {
2490 probe.start.cmp(&range.end, self).then(Ordering::Less)
2491 }) {
2492 Ok(ix) | Err(ix) => ix,
2493 };
2494
2495 (
2496 *replica_id,
2497 set.line_mode,
2498 set.cursor_shape,
2499 set.selections[start_ix..end_ix].iter(),
2500 )
2501 })
2502 }
2503
2504 pub fn git_diff_hunks_in_row_range<'a>(
2505 &'a self,
2506 range: Range<u32>,
2507 reversed: bool,
2508 ) -> impl 'a + Iterator<Item = git::diff::DiffHunk<u32>> {
2509 self.git_diff.hunks_in_row_range(range, self, reversed)
2510 }
2511
2512 pub fn git_diff_hunks_intersecting_range<'a>(
2513 &'a self,
2514 range: Range<Anchor>,
2515 reversed: bool,
2516 ) -> impl 'a + Iterator<Item = git::diff::DiffHunk<u32>> {
2517 self.git_diff
2518 .hunks_intersecting_range(range, self, reversed)
2519 }
2520
2521 pub fn diagnostics_in_range<'a, T, O>(
2522 &'a self,
2523 search_range: Range<T>,
2524 reversed: bool,
2525 ) -> impl 'a + Iterator<Item = DiagnosticEntry<O>>
2526 where
2527 T: 'a + Clone + ToOffset,
2528 O: 'a + FromAnchor + Ord,
2529 {
2530 let mut iterators: Vec<_> = self
2531 .diagnostics
2532 .values()
2533 .map(|collection| {
2534 collection
2535 .range::<T, O>(search_range.clone(), self, true, reversed)
2536 .peekable()
2537 })
2538 .collect();
2539
2540 std::iter::from_fn(move || {
2541 let (next_ix, _) = iterators
2542 .iter_mut()
2543 .enumerate()
2544 .flat_map(|(ix, iter)| Some((ix, iter.peek()?)))
2545 .min_by(|(_, a), (_, b)| a.range.start.cmp(&b.range.start))?;
2546 iterators[next_ix].next()
2547 })
2548 }
2549
2550 pub fn diagnostic_groups(&self) -> Vec<DiagnosticGroup<Anchor>> {
2551 let mut groups = Vec::new();
2552 for diagnostics in self.diagnostics.values() {
2553 diagnostics.groups(&mut groups, self);
2554 }
2555
2556 groups.sort_by(|a, b| {
2557 let a_start = &a.entries[a.primary_ix].range.start;
2558 let b_start = &b.entries[b.primary_ix].range.start;
2559 a_start.cmp(b_start, self)
2560 });
2561
2562 groups
2563 }
2564
2565 pub fn diagnostic_group<'a, O>(
2566 &'a self,
2567 group_id: usize,
2568 ) -> impl 'a + Iterator<Item = DiagnosticEntry<O>>
2569 where
2570 O: 'a + FromAnchor,
2571 {
2572 self.diagnostics
2573 .values()
2574 .flat_map(move |set| set.group(group_id, self))
2575 }
2576
2577 pub fn diagnostics_update_count(&self) -> usize {
2578 self.diagnostics_update_count
2579 }
2580
2581 pub fn parse_count(&self) -> usize {
2582 self.parse_count
2583 }
2584
2585 pub fn selections_update_count(&self) -> usize {
2586 self.selections_update_count
2587 }
2588
2589 pub fn file(&self) -> Option<&Arc<dyn File>> {
2590 self.file.as_ref()
2591 }
2592
2593 pub fn resolve_file_path(&self, cx: &AppContext, include_root: bool) -> Option<PathBuf> {
2594 if let Some(file) = self.file() {
2595 if file.path().file_name().is_none() || include_root {
2596 Some(file.full_path(cx))
2597 } else {
2598 Some(file.path().to_path_buf())
2599 }
2600 } else {
2601 None
2602 }
2603 }
2604
2605 pub fn file_update_count(&self) -> usize {
2606 self.file_update_count
2607 }
2608
2609 pub fn git_diff_update_count(&self) -> usize {
2610 self.git_diff_update_count
2611 }
2612}
2613
2614fn indent_size_for_line(text: &text::BufferSnapshot, row: u32) -> IndentSize {
2615 indent_size_for_text(text.chars_at(Point::new(row, 0)))
2616}
2617
2618pub fn indent_size_for_text(text: impl Iterator<Item = char>) -> IndentSize {
2619 let mut result = IndentSize::spaces(0);
2620 for c in text {
2621 let kind = match c {
2622 ' ' => IndentKind::Space,
2623 '\t' => IndentKind::Tab,
2624 _ => break,
2625 };
2626 if result.len == 0 {
2627 result.kind = kind;
2628 }
2629 result.len += 1;
2630 }
2631 result
2632}
2633
2634impl Clone for BufferSnapshot {
2635 fn clone(&self) -> Self {
2636 Self {
2637 text: self.text.clone(),
2638 git_diff: self.git_diff.clone(),
2639 syntax: self.syntax.clone(),
2640 file: self.file.clone(),
2641 remote_selections: self.remote_selections.clone(),
2642 diagnostics: self.diagnostics.clone(),
2643 selections_update_count: self.selections_update_count,
2644 diagnostics_update_count: self.diagnostics_update_count,
2645 file_update_count: self.file_update_count,
2646 git_diff_update_count: self.git_diff_update_count,
2647 language: self.language.clone(),
2648 parse_count: self.parse_count,
2649 }
2650 }
2651}
2652
2653impl Deref for BufferSnapshot {
2654 type Target = text::BufferSnapshot;
2655
2656 fn deref(&self) -> &Self::Target {
2657 &self.text
2658 }
2659}
2660
2661unsafe impl<'a> Send for BufferChunks<'a> {}
2662
2663impl<'a> BufferChunks<'a> {
2664 pub(crate) fn new(
2665 text: &'a Rope,
2666 range: Range<usize>,
2667 syntax: Option<(SyntaxMapCaptures<'a>, Vec<HighlightMap>)>,
2668 diagnostic_endpoints: Vec<DiagnosticEndpoint>,
2669 ) -> Self {
2670 let mut highlights = None;
2671 if let Some((captures, highlight_maps)) = syntax {
2672 highlights = Some(BufferChunkHighlights {
2673 captures,
2674 next_capture: None,
2675 stack: Default::default(),
2676 highlight_maps,
2677 })
2678 }
2679
2680 let diagnostic_endpoints = diagnostic_endpoints.into_iter().peekable();
2681 let chunks = text.chunks_in_range(range.clone());
2682
2683 BufferChunks {
2684 range,
2685 chunks,
2686 diagnostic_endpoints,
2687 error_depth: 0,
2688 warning_depth: 0,
2689 information_depth: 0,
2690 hint_depth: 0,
2691 unnecessary_depth: 0,
2692 highlights,
2693 }
2694 }
2695
2696 pub fn seek(&mut self, offset: usize) {
2697 self.range.start = offset;
2698 self.chunks.seek(self.range.start);
2699 if let Some(highlights) = self.highlights.as_mut() {
2700 highlights
2701 .stack
2702 .retain(|(end_offset, _)| *end_offset > offset);
2703 if let Some(capture) = &highlights.next_capture {
2704 if offset >= capture.node.start_byte() {
2705 let next_capture_end = capture.node.end_byte();
2706 if offset < next_capture_end {
2707 highlights.stack.push((
2708 next_capture_end,
2709 highlights.highlight_maps[capture.grammar_index].get(capture.index),
2710 ));
2711 }
2712 highlights.next_capture.take();
2713 }
2714 }
2715 highlights.captures.set_byte_range(self.range.clone());
2716 }
2717 }
2718
2719 pub fn offset(&self) -> usize {
2720 self.range.start
2721 }
2722
2723 fn update_diagnostic_depths(&mut self, endpoint: DiagnosticEndpoint) {
2724 let depth = match endpoint.severity {
2725 DiagnosticSeverity::ERROR => &mut self.error_depth,
2726 DiagnosticSeverity::WARNING => &mut self.warning_depth,
2727 DiagnosticSeverity::INFORMATION => &mut self.information_depth,
2728 DiagnosticSeverity::HINT => &mut self.hint_depth,
2729 _ => return,
2730 };
2731 if endpoint.is_start {
2732 *depth += 1;
2733 } else {
2734 *depth -= 1;
2735 }
2736
2737 if endpoint.is_unnecessary {
2738 if endpoint.is_start {
2739 self.unnecessary_depth += 1;
2740 } else {
2741 self.unnecessary_depth -= 1;
2742 }
2743 }
2744 }
2745
2746 fn current_diagnostic_severity(&self) -> Option<DiagnosticSeverity> {
2747 if self.error_depth > 0 {
2748 Some(DiagnosticSeverity::ERROR)
2749 } else if self.warning_depth > 0 {
2750 Some(DiagnosticSeverity::WARNING)
2751 } else if self.information_depth > 0 {
2752 Some(DiagnosticSeverity::INFORMATION)
2753 } else if self.hint_depth > 0 {
2754 Some(DiagnosticSeverity::HINT)
2755 } else {
2756 None
2757 }
2758 }
2759
2760 fn current_code_is_unnecessary(&self) -> bool {
2761 self.unnecessary_depth > 0
2762 }
2763}
2764
2765impl<'a> Iterator for BufferChunks<'a> {
2766 type Item = Chunk<'a>;
2767
2768 fn next(&mut self) -> Option<Self::Item> {
2769 let mut next_capture_start = usize::MAX;
2770 let mut next_diagnostic_endpoint = usize::MAX;
2771
2772 if let Some(highlights) = self.highlights.as_mut() {
2773 while let Some((parent_capture_end, _)) = highlights.stack.last() {
2774 if *parent_capture_end <= self.range.start {
2775 highlights.stack.pop();
2776 } else {
2777 break;
2778 }
2779 }
2780
2781 if highlights.next_capture.is_none() {
2782 highlights.next_capture = highlights.captures.next();
2783 }
2784
2785 while let Some(capture) = highlights.next_capture.as_ref() {
2786 if self.range.start < capture.node.start_byte() {
2787 next_capture_start = capture.node.start_byte();
2788 break;
2789 } else {
2790 let highlight_id =
2791 highlights.highlight_maps[capture.grammar_index].get(capture.index);
2792 highlights
2793 .stack
2794 .push((capture.node.end_byte(), highlight_id));
2795 highlights.next_capture = highlights.captures.next();
2796 }
2797 }
2798 }
2799
2800 while let Some(endpoint) = self.diagnostic_endpoints.peek().copied() {
2801 if endpoint.offset <= self.range.start {
2802 self.update_diagnostic_depths(endpoint);
2803 self.diagnostic_endpoints.next();
2804 } else {
2805 next_diagnostic_endpoint = endpoint.offset;
2806 break;
2807 }
2808 }
2809
2810 if let Some(chunk) = self.chunks.peek() {
2811 let chunk_start = self.range.start;
2812 let mut chunk_end = (self.chunks.offset() + chunk.len())
2813 .min(next_capture_start)
2814 .min(next_diagnostic_endpoint);
2815 let mut highlight_id = None;
2816 if let Some(highlights) = self.highlights.as_ref() {
2817 if let Some((parent_capture_end, parent_highlight_id)) = highlights.stack.last() {
2818 chunk_end = chunk_end.min(*parent_capture_end);
2819 highlight_id = Some(*parent_highlight_id);
2820 }
2821 }
2822
2823 let slice =
2824 &chunk[chunk_start - self.chunks.offset()..chunk_end - self.chunks.offset()];
2825 self.range.start = chunk_end;
2826 if self.range.start == self.chunks.offset() + chunk.len() {
2827 self.chunks.next().unwrap();
2828 }
2829
2830 Some(Chunk {
2831 text: slice,
2832 syntax_highlight_id: highlight_id,
2833 highlight_style: None,
2834 diagnostic_severity: self.current_diagnostic_severity(),
2835 is_unnecessary: self.current_code_is_unnecessary(),
2836 })
2837 } else {
2838 None
2839 }
2840 }
2841}
2842
2843impl operation_queue::Operation for Operation {
2844 fn lamport_timestamp(&self) -> clock::Lamport {
2845 match self {
2846 Operation::Buffer(_) => {
2847 unreachable!("buffer operations should never be deferred at this layer")
2848 }
2849 Operation::UpdateDiagnostics {
2850 lamport_timestamp, ..
2851 }
2852 | Operation::UpdateSelections {
2853 lamport_timestamp, ..
2854 }
2855 | Operation::UpdateCompletionTriggers {
2856 lamport_timestamp, ..
2857 } => *lamport_timestamp,
2858 }
2859 }
2860}
2861
2862impl Default for Diagnostic {
2863 fn default() -> Self {
2864 Self {
2865 code: None,
2866 severity: DiagnosticSeverity::ERROR,
2867 message: Default::default(),
2868 group_id: 0,
2869 is_primary: false,
2870 is_valid: true,
2871 is_disk_based: false,
2872 is_unnecessary: false,
2873 }
2874 }
2875}
2876
2877impl IndentSize {
2878 pub fn spaces(len: u32) -> Self {
2879 Self {
2880 len,
2881 kind: IndentKind::Space,
2882 }
2883 }
2884
2885 pub fn tab() -> Self {
2886 Self {
2887 len: 1,
2888 kind: IndentKind::Tab,
2889 }
2890 }
2891
2892 pub fn chars(&self) -> impl Iterator<Item = char> {
2893 iter::repeat(self.char()).take(self.len as usize)
2894 }
2895
2896 pub fn char(&self) -> char {
2897 match self.kind {
2898 IndentKind::Space => ' ',
2899 IndentKind::Tab => '\t',
2900 }
2901 }
2902
2903 pub fn with_delta(mut self, direction: Ordering, size: IndentSize) -> Self {
2904 match direction {
2905 Ordering::Less => {
2906 if self.kind == size.kind && self.len >= size.len {
2907 self.len -= size.len;
2908 }
2909 }
2910 Ordering::Equal => {}
2911 Ordering::Greater => {
2912 if self.len == 0 {
2913 self = size;
2914 } else if self.kind == size.kind {
2915 self.len += size.len;
2916 }
2917 }
2918 }
2919 self
2920 }
2921}
2922
2923impl Completion {
2924 pub fn sort_key(&self) -> (usize, &str) {
2925 let kind_key = match self.lsp_completion.kind {
2926 Some(lsp::CompletionItemKind::VARIABLE) => 0,
2927 _ => 1,
2928 };
2929 (kind_key, &self.label.text[self.label.filter_range.clone()])
2930 }
2931
2932 pub fn is_snippet(&self) -> bool {
2933 self.lsp_completion.insert_text_format == Some(lsp::InsertTextFormat::SNIPPET)
2934 }
2935}
2936
2937pub fn contiguous_ranges(
2938 values: impl Iterator<Item = u32>,
2939 max_len: usize,
2940) -> impl Iterator<Item = Range<u32>> {
2941 let mut values = values;
2942 let mut current_range: Option<Range<u32>> = None;
2943 std::iter::from_fn(move || loop {
2944 if let Some(value) = values.next() {
2945 if let Some(range) = &mut current_range {
2946 if value == range.end && range.len() < max_len {
2947 range.end += 1;
2948 continue;
2949 }
2950 }
2951
2952 let prev_range = current_range.clone();
2953 current_range = Some(value..(value + 1));
2954 if prev_range.is_some() {
2955 return prev_range;
2956 }
2957 } else {
2958 return current_range.take();
2959 }
2960 })
2961}
2962
2963pub fn char_kind(c: char) -> CharKind {
2964 if c.is_whitespace() {
2965 CharKind::Whitespace
2966 } else if c.is_alphanumeric() || c == '_' {
2967 CharKind::Word
2968 } else {
2969 CharKind::Punctuation
2970 }
2971}
2972
2973/// Find all of the ranges of whitespace that occur at the ends of lines
2974/// in the given rope.
2975///
2976/// This could also be done with a regex search, but this implementation
2977/// avoids copying text.
2978pub fn trailing_whitespace_ranges(rope: &Rope) -> Vec<Range<usize>> {
2979 let mut ranges = Vec::new();
2980
2981 let mut offset = 0;
2982 let mut prev_chunk_trailing_whitespace_range = 0..0;
2983 for chunk in rope.chunks() {
2984 let mut prev_line_trailing_whitespace_range = 0..0;
2985 for (i, line) in chunk.split('\n').enumerate() {
2986 let line_end_offset = offset + line.len();
2987 let trimmed_line_len = line.trim_end_matches(|c| matches!(c, ' ' | '\t')).len();
2988 let mut trailing_whitespace_range = (offset + trimmed_line_len)..line_end_offset;
2989
2990 if i == 0 && trimmed_line_len == 0 {
2991 trailing_whitespace_range.start = prev_chunk_trailing_whitespace_range.start;
2992 }
2993 if !prev_line_trailing_whitespace_range.is_empty() {
2994 ranges.push(prev_line_trailing_whitespace_range);
2995 }
2996
2997 offset = line_end_offset + 1;
2998 prev_line_trailing_whitespace_range = trailing_whitespace_range;
2999 }
3000
3001 offset -= 1;
3002 prev_chunk_trailing_whitespace_range = prev_line_trailing_whitespace_range;
3003 }
3004
3005 if !prev_chunk_trailing_whitespace_range.is_empty() {
3006 ranges.push(prev_chunk_trailing_whitespace_range);
3007 }
3008
3009 ranges
3010}