1pub use crate::{
2 diagnostic_set::DiagnosticSet,
3 highlight_map::{HighlightId, HighlightMap},
4 markdown::ParsedMarkdown,
5 proto, Grammar, Language, LanguageRegistry,
6};
7use crate::{
8 diagnostic_set::{DiagnosticEntry, DiagnosticGroup},
9 language_settings::{language_settings, LanguageSettings},
10 markdown::parse_markdown,
11 outline::OutlineItem,
12 syntax_map::{
13 SyntaxLayer, SyntaxMap, SyntaxMapCapture, SyntaxMapCaptures, SyntaxMapMatches,
14 SyntaxSnapshot, ToTreeSitterPoint,
15 },
16 task_context::RunnableRange,
17 LanguageScope, Outline, RunnableTag,
18};
19use anyhow::{anyhow, Context, Result};
20pub use clock::ReplicaId;
21use futures::channel::oneshot;
22use gpui::{AppContext, EventEmitter, HighlightStyle, ModelContext, Task, TaskLabel};
23use lazy_static::lazy_static;
24use lsp::LanguageServerId;
25use parking_lot::Mutex;
26use similar::{ChangeTag, TextDiff};
27use smallvec::SmallVec;
28use smol::future::yield_now;
29use std::{
30 any::Any,
31 cmp::{self, Ordering},
32 collections::BTreeMap,
33 ffi::OsStr,
34 future::Future,
35 iter::{self, Iterator, Peekable},
36 mem,
37 ops::{Deref, Range},
38 path::{Path, PathBuf},
39 str,
40 sync::Arc,
41 time::{Duration, Instant, SystemTime},
42 vec,
43};
44use sum_tree::TreeMap;
45use text::operation_queue::OperationQueue;
46use text::*;
47pub use text::{
48 Anchor, Bias, Buffer as TextBuffer, BufferId, BufferSnapshot as TextBufferSnapshot, Edit,
49 OffsetRangeExt, OffsetUtf16, Patch, Point, PointUtf16, Rope, Selection, SelectionGoal,
50 Subscription, TextDimension, TextSummary, ToOffset, ToOffsetUtf16, ToPoint, ToPointUtf16,
51 Transaction, TransactionId, Unclipped,
52};
53use theme::SyntaxTheme;
54#[cfg(any(test, feature = "test-support"))]
55use util::RandomCharIter;
56use util::RangeExt;
57
58#[cfg(any(test, feature = "test-support"))]
59pub use {tree_sitter_rust, tree_sitter_typescript};
60
61pub use lsp::DiagnosticSeverity;
62
63lazy_static! {
64 /// A label for the background task spawned by the buffer to compute
65 /// a diff against the contents of its file.
66 pub static ref BUFFER_DIFF_TASK: TaskLabel = TaskLabel::new();
67}
68
69/// Indicate whether a [Buffer] has permissions to edit.
70#[derive(PartialEq, Clone, Copy, Debug)]
71pub enum Capability {
72 /// The buffer is a mutable replica.
73 ReadWrite,
74 /// The buffer is a read-only replica.
75 ReadOnly,
76}
77
78pub type BufferRow = u32;
79
80/// An in-memory representation of a source code file, including its text,
81/// syntax trees, git status, and diagnostics.
82pub struct Buffer {
83 text: TextBuffer,
84 diff_base: Option<Rope>,
85 git_diff: git::diff::BufferDiff,
86 file: Option<Arc<dyn File>>,
87 /// The mtime of the file when this buffer was last loaded from
88 /// or saved to disk.
89 saved_mtime: Option<SystemTime>,
90 /// The version vector when this buffer was last loaded from
91 /// or saved to disk.
92 saved_version: clock::Global,
93 transaction_depth: usize,
94 was_dirty_before_starting_transaction: Option<bool>,
95 reload_task: Option<Task<Result<()>>>,
96 language: Option<Arc<Language>>,
97 autoindent_requests: Vec<Arc<AutoindentRequest>>,
98 pending_autoindent: Option<Task<()>>,
99 sync_parse_timeout: Duration,
100 syntax_map: Mutex<SyntaxMap>,
101 parsing_in_background: bool,
102 parse_count: usize,
103 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
104 remote_selections: TreeMap<ReplicaId, SelectionSet>,
105 selections_update_count: usize,
106 diagnostics_update_count: usize,
107 diagnostics_timestamp: clock::Lamport,
108 file_update_count: usize,
109 git_diff_update_count: usize,
110 completion_triggers: Vec<String>,
111 completion_triggers_timestamp: clock::Lamport,
112 deferred_ops: OperationQueue<Operation>,
113 capability: Capability,
114 has_conflict: bool,
115 diff_base_version: usize,
116}
117
118/// An immutable, cheaply cloneable representation of a fixed
119/// state of a buffer.
120pub struct BufferSnapshot {
121 text: text::BufferSnapshot,
122 git_diff: git::diff::BufferDiff,
123 pub(crate) syntax: SyntaxSnapshot,
124 file: Option<Arc<dyn File>>,
125 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
126 diagnostics_update_count: usize,
127 file_update_count: usize,
128 git_diff_update_count: usize,
129 remote_selections: TreeMap<ReplicaId, SelectionSet>,
130 selections_update_count: usize,
131 language: Option<Arc<Language>>,
132 parse_count: usize,
133}
134
135/// The kind and amount of indentation in a particular line. For now,
136/// assumes that indentation is all the same character.
137#[derive(Clone, Copy, Debug, PartialEq, Eq, Default)]
138pub struct IndentSize {
139 /// The number of bytes that comprise the indentation.
140 pub len: u32,
141 /// The kind of whitespace used for indentation.
142 pub kind: IndentKind,
143}
144
145/// A whitespace character that's used for indentation.
146#[derive(Clone, Copy, Debug, PartialEq, Eq, Default)]
147pub enum IndentKind {
148 /// An ASCII space character.
149 #[default]
150 Space,
151 /// An ASCII tab character.
152 Tab,
153}
154
155/// The shape of a selection cursor.
156#[derive(Copy, Clone, PartialEq, Eq, Debug, Default)]
157pub enum CursorShape {
158 /// A vertical bar
159 #[default]
160 Bar,
161 /// A block that surrounds the following character
162 Block,
163 /// An underline that runs along the following character
164 Underscore,
165 /// A box drawn around the following character
166 Hollow,
167}
168
169#[derive(Clone, Debug)]
170struct SelectionSet {
171 line_mode: bool,
172 cursor_shape: CursorShape,
173 selections: Arc<[Selection<Anchor>]>,
174 lamport_timestamp: clock::Lamport,
175}
176
177/// A diagnostic associated with a certain range of a buffer.
178#[derive(Clone, Debug, PartialEq, Eq)]
179pub struct Diagnostic {
180 /// The name of the service that produced this diagnostic.
181 pub source: Option<String>,
182 /// A machine-readable code that identifies this diagnostic.
183 pub code: Option<String>,
184 /// Whether this diagnostic is a hint, warning, or error.
185 pub severity: DiagnosticSeverity,
186 /// The human-readable message associated with this diagnostic.
187 pub message: String,
188 /// An id that identifies the group to which this diagnostic belongs.
189 ///
190 /// When a language server produces a diagnostic with
191 /// one or more associated diagnostics, those diagnostics are all
192 /// assigned a single group id.
193 pub group_id: usize,
194 /// Whether this diagnostic is the primary diagnostic for its group.
195 ///
196 /// In a given group, the primary diagnostic is the top-level diagnostic
197 /// returned by the language server. The non-primary diagnostics are the
198 /// associated diagnostics.
199 pub is_primary: bool,
200 /// Whether this diagnostic is considered to originate from an analysis of
201 /// files on disk, as opposed to any unsaved buffer contents. This is a
202 /// property of a given diagnostic source, and is configured for a given
203 /// language server via the [`LspAdapter::disk_based_diagnostic_sources`](crate::LspAdapter::disk_based_diagnostic_sources) method
204 /// for the language server.
205 pub is_disk_based: bool,
206 /// Whether this diagnostic marks unnecessary code.
207 pub is_unnecessary: bool,
208}
209
210/// TODO - move this into the `project` crate and make it private.
211pub async fn prepare_completion_documentation(
212 documentation: &lsp::Documentation,
213 language_registry: &Arc<LanguageRegistry>,
214 language: Option<Arc<Language>>,
215) -> Documentation {
216 match documentation {
217 lsp::Documentation::String(text) => {
218 if text.lines().count() <= 1 {
219 Documentation::SingleLine(text.clone())
220 } else {
221 Documentation::MultiLinePlainText(text.clone())
222 }
223 }
224
225 lsp::Documentation::MarkupContent(lsp::MarkupContent { kind, value }) => match kind {
226 lsp::MarkupKind::PlainText => {
227 if value.lines().count() <= 1 {
228 Documentation::SingleLine(value.clone())
229 } else {
230 Documentation::MultiLinePlainText(value.clone())
231 }
232 }
233
234 lsp::MarkupKind::Markdown => {
235 let parsed = parse_markdown(value, language_registry, language).await;
236 Documentation::MultiLineMarkdown(parsed)
237 }
238 },
239 }
240}
241
242/// Documentation associated with a [`Completion`].
243#[derive(Clone, Debug)]
244pub enum Documentation {
245 /// There is no documentation for this completion.
246 Undocumented,
247 /// A single line of documentation.
248 SingleLine(String),
249 /// Multiple lines of plain text documentation.
250 MultiLinePlainText(String),
251 /// Markdown documentation.
252 MultiLineMarkdown(ParsedMarkdown),
253}
254
255/// An operation used to synchronize this buffer with its other replicas.
256#[derive(Clone, Debug, PartialEq)]
257pub enum Operation {
258 /// A text operation.
259 Buffer(text::Operation),
260
261 /// An update to the buffer's diagnostics.
262 UpdateDiagnostics {
263 /// The id of the language server that produced the new diagnostics.
264 server_id: LanguageServerId,
265 /// The diagnostics.
266 diagnostics: Arc<[DiagnosticEntry<Anchor>]>,
267 /// The buffer's lamport timestamp.
268 lamport_timestamp: clock::Lamport,
269 },
270
271 /// An update to the most recent selections in this buffer.
272 UpdateSelections {
273 /// The selections.
274 selections: Arc<[Selection<Anchor>]>,
275 /// The buffer's lamport timestamp.
276 lamport_timestamp: clock::Lamport,
277 /// Whether the selections are in 'line mode'.
278 line_mode: bool,
279 /// The [`CursorShape`] associated with these selections.
280 cursor_shape: CursorShape,
281 },
282
283 /// An update to the characters that should trigger autocompletion
284 /// for this buffer.
285 UpdateCompletionTriggers {
286 /// The characters that trigger autocompletion.
287 triggers: Vec<String>,
288 /// The buffer's lamport timestamp.
289 lamport_timestamp: clock::Lamport,
290 },
291}
292
293/// An event that occurs in a buffer.
294#[derive(Clone, Debug, PartialEq)]
295pub enum Event {
296 /// The buffer was changed in a way that must be
297 /// propagated to its other replicas.
298 Operation(Operation),
299 /// The buffer was edited.
300 Edited,
301 /// The buffer's `dirty` bit changed.
302 DirtyChanged,
303 /// The buffer was saved.
304 Saved,
305 /// The buffer's file was changed on disk.
306 FileHandleChanged,
307 /// The buffer was reloaded.
308 Reloaded,
309 /// The buffer's diff_base changed.
310 DiffBaseChanged,
311 /// Buffer's excerpts for a certain diff base were recalculated.
312 DiffUpdated,
313 /// The buffer's language was changed.
314 LanguageChanged,
315 /// The buffer's syntax trees were updated.
316 Reparsed,
317 /// The buffer's diagnostics were updated.
318 DiagnosticsUpdated,
319 /// The buffer gained or lost editing capabilities.
320 CapabilityChanged,
321 /// The buffer was explicitly requested to close.
322 Closed,
323}
324
325/// The file associated with a buffer.
326pub trait File: Send + Sync {
327 /// Returns the [`LocalFile`] associated with this file, if the
328 /// file is local.
329 fn as_local(&self) -> Option<&dyn LocalFile>;
330
331 /// Returns whether this file is local.
332 fn is_local(&self) -> bool {
333 self.as_local().is_some()
334 }
335
336 /// Returns the file's mtime.
337 fn mtime(&self) -> Option<SystemTime>;
338
339 /// Returns the path of this file relative to the worktree's root directory.
340 fn path(&self) -> &Arc<Path>;
341
342 /// Returns the path of this file relative to the worktree's parent directory (this means it
343 /// includes the name of the worktree's root folder).
344 fn full_path(&self, cx: &AppContext) -> PathBuf;
345
346 /// Returns the last component of this handle's absolute path. If this handle refers to the root
347 /// of its worktree, then this method will return the name of the worktree itself.
348 fn file_name<'a>(&'a self, cx: &'a AppContext) -> &'a OsStr;
349
350 /// Returns the id of the worktree to which this file belongs.
351 ///
352 /// This is needed for looking up project-specific settings.
353 fn worktree_id(&self) -> usize;
354
355 /// Returns whether the file has been deleted.
356 fn is_deleted(&self) -> bool;
357
358 /// Returns whether the file existed on disk at one point
359 fn is_created(&self) -> bool {
360 self.mtime().is_some()
361 }
362
363 /// Converts this file into an [`Any`] trait object.
364 fn as_any(&self) -> &dyn Any;
365
366 /// Converts this file into a protobuf message.
367 fn to_proto(&self) -> rpc::proto::File;
368
369 /// Return whether Zed considers this to be a private file.
370 fn is_private(&self) -> bool;
371}
372
373/// The file associated with a buffer, in the case where the file is on the local disk.
374pub trait LocalFile: File {
375 /// Returns the absolute path of this file.
376 fn abs_path(&self, cx: &AppContext) -> PathBuf;
377
378 /// Loads the file's contents from disk.
379 fn load(&self, cx: &AppContext) -> Task<Result<String>>;
380
381 /// Called when the buffer is reloaded from disk.
382 fn buffer_reloaded(
383 &self,
384 buffer_id: BufferId,
385 version: &clock::Global,
386 line_ending: LineEnding,
387 mtime: Option<SystemTime>,
388 cx: &mut AppContext,
389 );
390
391 /// Returns true if the file should not be shared with collaborators.
392 fn is_private(&self, _: &AppContext) -> bool {
393 false
394 }
395}
396
397/// The auto-indent behavior associated with an editing operation.
398/// For some editing operations, each affected line of text has its
399/// indentation recomputed. For other operations, the entire block
400/// of edited text is adjusted uniformly.
401#[derive(Clone, Debug)]
402pub enum AutoindentMode {
403 /// Indent each line of inserted text.
404 EachLine,
405 /// Apply the same indentation adjustment to all of the lines
406 /// in a given insertion.
407 Block {
408 /// The original indentation level of the first line of each
409 /// insertion, if it has been copied.
410 original_indent_columns: Vec<u32>,
411 },
412}
413
414#[derive(Clone)]
415struct AutoindentRequest {
416 before_edit: BufferSnapshot,
417 entries: Vec<AutoindentRequestEntry>,
418 is_block_mode: bool,
419}
420
421#[derive(Clone)]
422struct AutoindentRequestEntry {
423 /// A range of the buffer whose indentation should be adjusted.
424 range: Range<Anchor>,
425 /// Whether or not these lines should be considered brand new, for the
426 /// purpose of auto-indent. When text is not new, its indentation will
427 /// only be adjusted if the suggested indentation level has *changed*
428 /// since the edit was made.
429 first_line_is_new: bool,
430 indent_size: IndentSize,
431 original_indent_column: Option<u32>,
432}
433
434#[derive(Debug)]
435struct IndentSuggestion {
436 basis_row: u32,
437 delta: Ordering,
438 within_error: bool,
439}
440
441struct BufferChunkHighlights<'a> {
442 captures: SyntaxMapCaptures<'a>,
443 next_capture: Option<SyntaxMapCapture<'a>>,
444 stack: Vec<(usize, HighlightId)>,
445 highlight_maps: Vec<HighlightMap>,
446}
447
448/// An iterator that yields chunks of a buffer's text, along with their
449/// syntax highlights and diagnostic status.
450pub struct BufferChunks<'a> {
451 range: Range<usize>,
452 chunks: text::Chunks<'a>,
453 diagnostic_endpoints: Peekable<vec::IntoIter<DiagnosticEndpoint>>,
454 error_depth: usize,
455 warning_depth: usize,
456 information_depth: usize,
457 hint_depth: usize,
458 unnecessary_depth: usize,
459 highlights: Option<BufferChunkHighlights<'a>>,
460}
461
462/// A chunk of a buffer's text, along with its syntax highlight and
463/// diagnostic status.
464#[derive(Clone, Copy, Debug, Default)]
465pub struct Chunk<'a> {
466 /// The text of the chunk.
467 pub text: &'a str,
468 /// The syntax highlighting style of the chunk.
469 pub syntax_highlight_id: Option<HighlightId>,
470 /// The highlight style that has been applied to this chunk in
471 /// the editor.
472 pub highlight_style: Option<HighlightStyle>,
473 /// The severity of diagnostic associated with this chunk, if any.
474 pub diagnostic_severity: Option<DiagnosticSeverity>,
475 /// Whether this chunk of text is marked as unnecessary.
476 pub is_unnecessary: bool,
477 /// Whether this chunk of text was originally a tab character.
478 pub is_tab: bool,
479}
480
481/// A set of edits to a given version of a buffer, computed asynchronously.
482#[derive(Debug)]
483pub struct Diff {
484 pub(crate) base_version: clock::Global,
485 line_ending: LineEnding,
486 edits: Vec<(Range<usize>, Arc<str>)>,
487}
488
489#[derive(Clone, Copy)]
490pub(crate) struct DiagnosticEndpoint {
491 offset: usize,
492 is_start: bool,
493 severity: DiagnosticSeverity,
494 is_unnecessary: bool,
495}
496
497/// A class of characters, used for characterizing a run of text.
498#[derive(Copy, Clone, Eq, PartialEq, PartialOrd, Ord, Debug)]
499pub enum CharKind {
500 /// Whitespace.
501 Whitespace,
502 /// Punctuation.
503 Punctuation,
504 /// Word.
505 Word,
506}
507
508/// A runnable is a set of data about a region that could be resolved into a task
509pub struct Runnable {
510 pub tags: SmallVec<[RunnableTag; 1]>,
511 pub language: Arc<Language>,
512 pub buffer: BufferId,
513}
514
515#[derive(Clone, Debug, PartialEq)]
516pub struct IndentGuide {
517 pub buffer_id: BufferId,
518 pub start_row: BufferRow,
519 pub end_row: BufferRow,
520 pub depth: u32,
521 pub indent_size: u32,
522}
523
524impl IndentGuide {
525 pub fn new(
526 buffer_id: BufferId,
527 start_row: BufferRow,
528 end_row: BufferRow,
529 depth: u32,
530 indent_size: u32,
531 ) -> Self {
532 Self {
533 buffer_id,
534 start_row,
535 end_row,
536 depth,
537 indent_size,
538 }
539 }
540
541 pub fn indent_width(&self) -> u32 {
542 self.indent_size * self.depth
543 }
544}
545
546impl Buffer {
547 /// Create a new buffer with the given base text.
548 pub fn local<T: Into<String>>(base_text: T, cx: &mut ModelContext<Self>) -> Self {
549 Self::build(
550 TextBuffer::new(0, cx.entity_id().as_non_zero_u64().into(), base_text.into()),
551 None,
552 None,
553 Capability::ReadWrite,
554 )
555 }
556
557 /// Create a new buffer with the given base text that has proper line endings and other normalization applied.
558 pub fn local_normalized(
559 base_text_normalized: Rope,
560 line_ending: LineEnding,
561 cx: &mut ModelContext<Self>,
562 ) -> Self {
563 Self::build(
564 TextBuffer::new_normalized(
565 0,
566 cx.entity_id().as_non_zero_u64().into(),
567 line_ending,
568 base_text_normalized,
569 ),
570 None,
571 None,
572 Capability::ReadWrite,
573 )
574 }
575
576 /// Create a new buffer that is a replica of a remote buffer.
577 pub fn remote(
578 remote_id: BufferId,
579 replica_id: ReplicaId,
580 capability: Capability,
581 base_text: impl Into<String>,
582 ) -> Self {
583 Self::build(
584 TextBuffer::new(replica_id, remote_id, base_text.into()),
585 None,
586 None,
587 capability,
588 )
589 }
590
591 /// Create a new buffer that is a replica of a remote buffer, populating its
592 /// state from the given protobuf message.
593 pub fn from_proto(
594 replica_id: ReplicaId,
595 capability: Capability,
596 message: proto::BufferState,
597 file: Option<Arc<dyn File>>,
598 ) -> Result<Self> {
599 let buffer_id = BufferId::new(message.id)
600 .with_context(|| anyhow!("Could not deserialize buffer_id"))?;
601 let buffer = TextBuffer::new(replica_id, buffer_id, message.base_text);
602 let mut this = Self::build(buffer, message.diff_base, file, capability);
603 this.text.set_line_ending(proto::deserialize_line_ending(
604 rpc::proto::LineEnding::from_i32(message.line_ending)
605 .ok_or_else(|| anyhow!("missing line_ending"))?,
606 ));
607 this.saved_version = proto::deserialize_version(&message.saved_version);
608 this.saved_mtime = message.saved_mtime.map(|time| time.into());
609 Ok(this)
610 }
611
612 /// Serialize the buffer's state to a protobuf message.
613 pub fn to_proto(&self) -> proto::BufferState {
614 proto::BufferState {
615 id: self.remote_id().into(),
616 file: self.file.as_ref().map(|f| f.to_proto()),
617 base_text: self.base_text().to_string(),
618 diff_base: self.diff_base.as_ref().map(|h| h.to_string()),
619 line_ending: proto::serialize_line_ending(self.line_ending()) as i32,
620 saved_version: proto::serialize_version(&self.saved_version),
621 saved_mtime: self.saved_mtime.map(|time| time.into()),
622 }
623 }
624
625 /// Serialize as protobufs all of the changes to the buffer since the given version.
626 pub fn serialize_ops(
627 &self,
628 since: Option<clock::Global>,
629 cx: &AppContext,
630 ) -> Task<Vec<proto::Operation>> {
631 let mut operations = Vec::new();
632 operations.extend(self.deferred_ops.iter().map(proto::serialize_operation));
633
634 operations.extend(self.remote_selections.iter().map(|(_, set)| {
635 proto::serialize_operation(&Operation::UpdateSelections {
636 selections: set.selections.clone(),
637 lamport_timestamp: set.lamport_timestamp,
638 line_mode: set.line_mode,
639 cursor_shape: set.cursor_shape,
640 })
641 }));
642
643 for (server_id, diagnostics) in &self.diagnostics {
644 operations.push(proto::serialize_operation(&Operation::UpdateDiagnostics {
645 lamport_timestamp: self.diagnostics_timestamp,
646 server_id: *server_id,
647 diagnostics: diagnostics.iter().cloned().collect(),
648 }));
649 }
650
651 operations.push(proto::serialize_operation(
652 &Operation::UpdateCompletionTriggers {
653 triggers: self.completion_triggers.clone(),
654 lamport_timestamp: self.completion_triggers_timestamp,
655 },
656 ));
657
658 let text_operations = self.text.operations().clone();
659 cx.background_executor().spawn(async move {
660 let since = since.unwrap_or_default();
661 operations.extend(
662 text_operations
663 .iter()
664 .filter(|(_, op)| !since.observed(op.timestamp()))
665 .map(|(_, op)| proto::serialize_operation(&Operation::Buffer(op.clone()))),
666 );
667 operations.sort_unstable_by_key(proto::lamport_timestamp_for_operation);
668 operations
669 })
670 }
671
672 /// Assign a language to the buffer, returning the buffer.
673 pub fn with_language(mut self, language: Arc<Language>, cx: &mut ModelContext<Self>) -> Self {
674 self.set_language(Some(language), cx);
675 self
676 }
677
678 /// Returns the [Capability] of this buffer.
679 pub fn capability(&self) -> Capability {
680 self.capability
681 }
682
683 /// Whether this buffer can only be read.
684 pub fn read_only(&self) -> bool {
685 self.capability == Capability::ReadOnly
686 }
687
688 /// Builds a [Buffer] with the given underlying [TextBuffer], diff base, [File] and [Capability].
689 pub fn build(
690 buffer: TextBuffer,
691 diff_base: Option<String>,
692 file: Option<Arc<dyn File>>,
693 capability: Capability,
694 ) -> Self {
695 let saved_mtime = file.as_ref().and_then(|file| file.mtime());
696
697 Self {
698 saved_mtime,
699 saved_version: buffer.version(),
700 reload_task: None,
701 transaction_depth: 0,
702 was_dirty_before_starting_transaction: None,
703 text: buffer,
704 diff_base: diff_base
705 .map(|mut raw_diff_base| {
706 LineEnding::normalize(&mut raw_diff_base);
707 raw_diff_base
708 })
709 .map(Rope::from),
710 diff_base_version: 0,
711 git_diff: git::diff::BufferDiff::new(),
712 file,
713 capability,
714 syntax_map: Mutex::new(SyntaxMap::new()),
715 parsing_in_background: false,
716 parse_count: 0,
717 sync_parse_timeout: Duration::from_millis(1),
718 autoindent_requests: Default::default(),
719 pending_autoindent: Default::default(),
720 language: None,
721 remote_selections: Default::default(),
722 selections_update_count: 0,
723 diagnostics: Default::default(),
724 diagnostics_update_count: 0,
725 diagnostics_timestamp: Default::default(),
726 file_update_count: 0,
727 git_diff_update_count: 0,
728 completion_triggers: Default::default(),
729 completion_triggers_timestamp: Default::default(),
730 deferred_ops: OperationQueue::new(),
731 has_conflict: false,
732 }
733 }
734
735 /// Retrieve a snapshot of the buffer's current state. This is computationally
736 /// cheap, and allows reading from the buffer on a background thread.
737 pub fn snapshot(&self) -> BufferSnapshot {
738 let text = self.text.snapshot();
739 let mut syntax_map = self.syntax_map.lock();
740 syntax_map.interpolate(&text);
741 let syntax = syntax_map.snapshot();
742
743 BufferSnapshot {
744 text,
745 syntax,
746 git_diff: self.git_diff.clone(),
747 file: self.file.clone(),
748 remote_selections: self.remote_selections.clone(),
749 diagnostics: self.diagnostics.clone(),
750 diagnostics_update_count: self.diagnostics_update_count,
751 file_update_count: self.file_update_count,
752 git_diff_update_count: self.git_diff_update_count,
753 language: self.language.clone(),
754 parse_count: self.parse_count,
755 selections_update_count: self.selections_update_count,
756 }
757 }
758
759 #[cfg(test)]
760 pub(crate) fn as_text_snapshot(&self) -> &text::BufferSnapshot {
761 &self.text
762 }
763
764 /// Retrieve a snapshot of the buffer's raw text, without any
765 /// language-related state like the syntax tree or diagnostics.
766 pub fn text_snapshot(&self) -> text::BufferSnapshot {
767 self.text.snapshot()
768 }
769
770 /// The file associated with the buffer, if any.
771 pub fn file(&self) -> Option<&Arc<dyn File>> {
772 self.file.as_ref()
773 }
774
775 /// The version of the buffer that was last saved or reloaded from disk.
776 pub fn saved_version(&self) -> &clock::Global {
777 &self.saved_version
778 }
779
780 /// The mtime of the buffer's file when the buffer was last saved or reloaded from disk.
781 pub fn saved_mtime(&self) -> Option<SystemTime> {
782 self.saved_mtime
783 }
784
785 /// Assign a language to the buffer.
786 pub fn set_language(&mut self, language: Option<Arc<Language>>, cx: &mut ModelContext<Self>) {
787 self.parse_count += 1;
788 self.syntax_map.lock().clear();
789 self.language = language;
790 self.reparse(cx);
791 cx.emit(Event::LanguageChanged);
792 }
793
794 /// Assign a language registry to the buffer. This allows the buffer to retrieve
795 /// other languages if parts of the buffer are written in different languages.
796 pub fn set_language_registry(&mut self, language_registry: Arc<LanguageRegistry>) {
797 self.syntax_map
798 .lock()
799 .set_language_registry(language_registry);
800 }
801
802 /// Assign the buffer a new [Capability].
803 pub fn set_capability(&mut self, capability: Capability, cx: &mut ModelContext<Self>) {
804 self.capability = capability;
805 cx.emit(Event::CapabilityChanged)
806 }
807
808 /// This method is called to signal that the buffer has been saved.
809 pub fn did_save(
810 &mut self,
811 version: clock::Global,
812 mtime: Option<SystemTime>,
813 cx: &mut ModelContext<Self>,
814 ) {
815 self.saved_version = version;
816 self.has_conflict = false;
817 self.saved_mtime = mtime;
818 cx.emit(Event::Saved);
819 cx.notify();
820 }
821
822 /// Reloads the contents of the buffer from disk.
823 pub fn reload(
824 &mut self,
825 cx: &mut ModelContext<Self>,
826 ) -> oneshot::Receiver<Option<Transaction>> {
827 let (tx, rx) = futures::channel::oneshot::channel();
828 let prev_version = self.text.version();
829 self.reload_task = Some(cx.spawn(|this, mut cx| async move {
830 let Some((new_mtime, new_text)) = this.update(&mut cx, |this, cx| {
831 let file = this.file.as_ref()?.as_local()?;
832 Some((file.mtime(), file.load(cx)))
833 })?
834 else {
835 return Ok(());
836 };
837
838 let new_text = new_text.await?;
839 let diff = this
840 .update(&mut cx, |this, cx| this.diff(new_text.clone(), cx))?
841 .await;
842 this.update(&mut cx, |this, cx| {
843 if this.version() == diff.base_version {
844 this.finalize_last_transaction();
845 this.apply_diff(diff, cx);
846 tx.send(this.finalize_last_transaction().cloned()).ok();
847 this.has_conflict = false;
848 this.did_reload(this.version(), this.line_ending(), new_mtime, cx);
849 } else {
850 if !diff.edits.is_empty()
851 || this
852 .edits_since::<usize>(&diff.base_version)
853 .next()
854 .is_some()
855 {
856 this.has_conflict = true;
857 }
858
859 this.did_reload(prev_version, this.line_ending(), this.saved_mtime, cx);
860 }
861
862 this.reload_task.take();
863 })
864 }));
865 rx
866 }
867
868 /// This method is called to signal that the buffer has been reloaded.
869 pub fn did_reload(
870 &mut self,
871 version: clock::Global,
872 line_ending: LineEnding,
873 mtime: Option<SystemTime>,
874 cx: &mut ModelContext<Self>,
875 ) {
876 self.saved_version = version;
877 self.text.set_line_ending(line_ending);
878 self.saved_mtime = mtime;
879 if let Some(file) = self.file.as_ref().and_then(|f| f.as_local()) {
880 file.buffer_reloaded(
881 self.remote_id(),
882 &self.saved_version,
883 self.line_ending(),
884 self.saved_mtime,
885 cx,
886 );
887 }
888 cx.emit(Event::Reloaded);
889 cx.notify();
890 }
891
892 /// Updates the [File] backing this buffer. This should be called when
893 /// the file has changed or has been deleted.
894 pub fn file_updated(&mut self, new_file: Arc<dyn File>, cx: &mut ModelContext<Self>) {
895 let mut file_changed = false;
896
897 if let Some(old_file) = self.file.as_ref() {
898 if new_file.path() != old_file.path() {
899 file_changed = true;
900 }
901
902 if new_file.is_deleted() {
903 if !old_file.is_deleted() {
904 file_changed = true;
905 if !self.is_dirty() {
906 cx.emit(Event::DirtyChanged);
907 }
908 }
909 } else {
910 let new_mtime = new_file.mtime();
911 if new_mtime != old_file.mtime() {
912 file_changed = true;
913
914 if !self.is_dirty() {
915 self.reload(cx).close();
916 }
917 }
918 }
919 } else {
920 file_changed = true;
921 };
922
923 self.file = Some(new_file);
924 if file_changed {
925 self.file_update_count += 1;
926 cx.emit(Event::FileHandleChanged);
927 cx.notify();
928 }
929 }
930
931 /// Returns the current diff base, see [Buffer::set_diff_base].
932 pub fn diff_base(&self) -> Option<&Rope> {
933 self.diff_base.as_ref()
934 }
935
936 /// Sets the text that will be used to compute a Git diff
937 /// against the buffer text.
938 pub fn set_diff_base(&mut self, diff_base: Option<String>, cx: &mut ModelContext<Self>) {
939 self.diff_base = diff_base
940 .map(|mut raw_diff_base| {
941 LineEnding::normalize(&mut raw_diff_base);
942 raw_diff_base
943 })
944 .map(Rope::from);
945 self.diff_base_version += 1;
946 if let Some(recalc_task) = self.git_diff_recalc(cx) {
947 cx.spawn(|buffer, mut cx| async move {
948 recalc_task.await;
949 buffer
950 .update(&mut cx, |_, cx| {
951 cx.emit(Event::DiffBaseChanged);
952 })
953 .ok();
954 })
955 .detach();
956 }
957 }
958
959 /// Returns a number, unique per diff base set to the buffer.
960 pub fn diff_base_version(&self) -> usize {
961 self.diff_base_version
962 }
963
964 /// Recomputes the Git diff status.
965 pub fn git_diff_recalc(&mut self, cx: &mut ModelContext<Self>) -> Option<Task<()>> {
966 let diff_base = self.diff_base.clone()?;
967 let snapshot = self.snapshot();
968
969 let mut diff = self.git_diff.clone();
970 let diff = cx.background_executor().spawn(async move {
971 diff.update(&diff_base, &snapshot).await;
972 diff
973 });
974
975 Some(cx.spawn(|this, mut cx| async move {
976 let buffer_diff = diff.await;
977 this.update(&mut cx, |this, cx| {
978 this.git_diff = buffer_diff;
979 this.git_diff_update_count += 1;
980 cx.emit(Event::DiffUpdated);
981 })
982 .ok();
983 }))
984 }
985
986 /// Returns the primary [Language] assigned to this [Buffer].
987 pub fn language(&self) -> Option<&Arc<Language>> {
988 self.language.as_ref()
989 }
990
991 /// Returns the [Language] at the given location.
992 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<Arc<Language>> {
993 let offset = position.to_offset(self);
994 self.syntax_map
995 .lock()
996 .layers_for_range(offset..offset, &self.text)
997 .last()
998 .map(|info| info.language.clone())
999 .or_else(|| self.language.clone())
1000 }
1001
1002 /// The number of times the buffer was parsed.
1003 pub fn parse_count(&self) -> usize {
1004 self.parse_count
1005 }
1006
1007 /// The number of times selections were updated.
1008 pub fn selections_update_count(&self) -> usize {
1009 self.selections_update_count
1010 }
1011
1012 /// The number of times diagnostics were updated.
1013 pub fn diagnostics_update_count(&self) -> usize {
1014 self.diagnostics_update_count
1015 }
1016
1017 /// The number of times the underlying file was updated.
1018 pub fn file_update_count(&self) -> usize {
1019 self.file_update_count
1020 }
1021
1022 /// The number of times the git diff status was updated.
1023 pub fn git_diff_update_count(&self) -> usize {
1024 self.git_diff_update_count
1025 }
1026
1027 /// Whether the buffer is being parsed in the background.
1028 #[cfg(any(test, feature = "test-support"))]
1029 pub fn is_parsing(&self) -> bool {
1030 self.parsing_in_background
1031 }
1032
1033 /// Indicates whether the buffer contains any regions that may be
1034 /// written in a language that hasn't been loaded yet.
1035 pub fn contains_unknown_injections(&self) -> bool {
1036 self.syntax_map.lock().contains_unknown_injections()
1037 }
1038
1039 #[cfg(test)]
1040 pub fn set_sync_parse_timeout(&mut self, timeout: Duration) {
1041 self.sync_parse_timeout = timeout;
1042 }
1043
1044 /// Called after an edit to synchronize the buffer's main parse tree with
1045 /// the buffer's new underlying state.
1046 ///
1047 /// Locks the syntax map and interpolates the edits since the last reparse
1048 /// into the foreground syntax tree.
1049 ///
1050 /// Then takes a stable snapshot of the syntax map before unlocking it.
1051 /// The snapshot with the interpolated edits is sent to a background thread,
1052 /// where we ask Tree-sitter to perform an incremental parse.
1053 ///
1054 /// Meanwhile, in the foreground, we block the main thread for up to 1ms
1055 /// waiting on the parse to complete. As soon as it completes, we proceed
1056 /// synchronously, unless a 1ms timeout elapses.
1057 ///
1058 /// If we time out waiting on the parse, we spawn a second task waiting
1059 /// until the parse does complete and return with the interpolated tree still
1060 /// in the foreground. When the background parse completes, call back into
1061 /// the main thread and assign the foreground parse state.
1062 ///
1063 /// If the buffer or grammar changed since the start of the background parse,
1064 /// initiate an additional reparse recursively. To avoid concurrent parses
1065 /// for the same buffer, we only initiate a new parse if we are not already
1066 /// parsing in the background.
1067 pub fn reparse(&mut self, cx: &mut ModelContext<Self>) {
1068 if self.parsing_in_background {
1069 return;
1070 }
1071 let language = if let Some(language) = self.language.clone() {
1072 language
1073 } else {
1074 return;
1075 };
1076
1077 let text = self.text_snapshot();
1078 let parsed_version = self.version();
1079
1080 let mut syntax_map = self.syntax_map.lock();
1081 syntax_map.interpolate(&text);
1082 let language_registry = syntax_map.language_registry();
1083 let mut syntax_snapshot = syntax_map.snapshot();
1084 drop(syntax_map);
1085
1086 let parse_task = cx.background_executor().spawn({
1087 let language = language.clone();
1088 let language_registry = language_registry.clone();
1089 async move {
1090 syntax_snapshot.reparse(&text, language_registry, language);
1091 syntax_snapshot
1092 }
1093 });
1094
1095 match cx
1096 .background_executor()
1097 .block_with_timeout(self.sync_parse_timeout, parse_task)
1098 {
1099 Ok(new_syntax_snapshot) => {
1100 self.did_finish_parsing(new_syntax_snapshot, cx);
1101 return;
1102 }
1103 Err(parse_task) => {
1104 self.parsing_in_background = true;
1105 cx.spawn(move |this, mut cx| async move {
1106 let new_syntax_map = parse_task.await;
1107 this.update(&mut cx, move |this, cx| {
1108 let grammar_changed =
1109 this.language.as_ref().map_or(true, |current_language| {
1110 !Arc::ptr_eq(&language, current_language)
1111 });
1112 let language_registry_changed = new_syntax_map
1113 .contains_unknown_injections()
1114 && language_registry.map_or(false, |registry| {
1115 registry.version() != new_syntax_map.language_registry_version()
1116 });
1117 let parse_again = language_registry_changed
1118 || grammar_changed
1119 || this.version.changed_since(&parsed_version);
1120 this.did_finish_parsing(new_syntax_map, cx);
1121 this.parsing_in_background = false;
1122 if parse_again {
1123 this.reparse(cx);
1124 }
1125 })
1126 .ok();
1127 })
1128 .detach();
1129 }
1130 }
1131 }
1132
1133 fn did_finish_parsing(&mut self, syntax_snapshot: SyntaxSnapshot, cx: &mut ModelContext<Self>) {
1134 self.parse_count += 1;
1135 self.syntax_map.lock().did_parse(syntax_snapshot);
1136 self.request_autoindent(cx);
1137 cx.emit(Event::Reparsed);
1138 cx.notify();
1139 }
1140
1141 /// Assign to the buffer a set of diagnostics created by a given language server.
1142 pub fn update_diagnostics(
1143 &mut self,
1144 server_id: LanguageServerId,
1145 diagnostics: DiagnosticSet,
1146 cx: &mut ModelContext<Self>,
1147 ) {
1148 let lamport_timestamp = self.text.lamport_clock.tick();
1149 let op = Operation::UpdateDiagnostics {
1150 server_id,
1151 diagnostics: diagnostics.iter().cloned().collect(),
1152 lamport_timestamp,
1153 };
1154 self.apply_diagnostic_update(server_id, diagnostics, lamport_timestamp, cx);
1155 self.send_operation(op, cx);
1156 }
1157
1158 fn request_autoindent(&mut self, cx: &mut ModelContext<Self>) {
1159 if let Some(indent_sizes) = self.compute_autoindents() {
1160 let indent_sizes = cx.background_executor().spawn(indent_sizes);
1161 match cx
1162 .background_executor()
1163 .block_with_timeout(Duration::from_micros(500), indent_sizes)
1164 {
1165 Ok(indent_sizes) => self.apply_autoindents(indent_sizes, cx),
1166 Err(indent_sizes) => {
1167 self.pending_autoindent = Some(cx.spawn(|this, mut cx| async move {
1168 let indent_sizes = indent_sizes.await;
1169 this.update(&mut cx, |this, cx| {
1170 this.apply_autoindents(indent_sizes, cx);
1171 })
1172 .ok();
1173 }));
1174 }
1175 }
1176 } else {
1177 self.autoindent_requests.clear();
1178 }
1179 }
1180
1181 fn compute_autoindents(&self) -> Option<impl Future<Output = BTreeMap<u32, IndentSize>>> {
1182 let max_rows_between_yields = 100;
1183 let snapshot = self.snapshot();
1184 if snapshot.syntax.is_empty() || self.autoindent_requests.is_empty() {
1185 return None;
1186 }
1187
1188 let autoindent_requests = self.autoindent_requests.clone();
1189 Some(async move {
1190 let mut indent_sizes = BTreeMap::new();
1191 for request in autoindent_requests {
1192 // Resolve each edited range to its row in the current buffer and in the
1193 // buffer before this batch of edits.
1194 let mut row_ranges = Vec::new();
1195 let mut old_to_new_rows = BTreeMap::new();
1196 let mut language_indent_sizes_by_new_row = Vec::new();
1197 for entry in &request.entries {
1198 let position = entry.range.start;
1199 let new_row = position.to_point(&snapshot).row;
1200 let new_end_row = entry.range.end.to_point(&snapshot).row + 1;
1201 language_indent_sizes_by_new_row.push((new_row, entry.indent_size));
1202
1203 if !entry.first_line_is_new {
1204 let old_row = position.to_point(&request.before_edit).row;
1205 old_to_new_rows.insert(old_row, new_row);
1206 }
1207 row_ranges.push((new_row..new_end_row, entry.original_indent_column));
1208 }
1209
1210 // Build a map containing the suggested indentation for each of the edited lines
1211 // with respect to the state of the buffer before these edits. This map is keyed
1212 // by the rows for these lines in the current state of the buffer.
1213 let mut old_suggestions = BTreeMap::<u32, (IndentSize, bool)>::default();
1214 let old_edited_ranges =
1215 contiguous_ranges(old_to_new_rows.keys().copied(), max_rows_between_yields);
1216 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1217 let mut language_indent_size = IndentSize::default();
1218 for old_edited_range in old_edited_ranges {
1219 let suggestions = request
1220 .before_edit
1221 .suggest_autoindents(old_edited_range.clone())
1222 .into_iter()
1223 .flatten();
1224 for (old_row, suggestion) in old_edited_range.zip(suggestions) {
1225 if let Some(suggestion) = suggestion {
1226 let new_row = *old_to_new_rows.get(&old_row).unwrap();
1227
1228 // Find the indent size based on the language for this row.
1229 while let Some((row, size)) = language_indent_sizes.peek() {
1230 if *row > new_row {
1231 break;
1232 }
1233 language_indent_size = *size;
1234 language_indent_sizes.next();
1235 }
1236
1237 let suggested_indent = old_to_new_rows
1238 .get(&suggestion.basis_row)
1239 .and_then(|from_row| {
1240 Some(old_suggestions.get(from_row).copied()?.0)
1241 })
1242 .unwrap_or_else(|| {
1243 request
1244 .before_edit
1245 .indent_size_for_line(suggestion.basis_row)
1246 })
1247 .with_delta(suggestion.delta, language_indent_size);
1248 old_suggestions
1249 .insert(new_row, (suggested_indent, suggestion.within_error));
1250 }
1251 }
1252 yield_now().await;
1253 }
1254
1255 // In block mode, only compute indentation suggestions for the first line
1256 // of each insertion. Otherwise, compute suggestions for every inserted line.
1257 let new_edited_row_ranges = contiguous_ranges(
1258 row_ranges.iter().flat_map(|(range, _)| {
1259 if request.is_block_mode {
1260 range.start..range.start + 1
1261 } else {
1262 range.clone()
1263 }
1264 }),
1265 max_rows_between_yields,
1266 );
1267
1268 // Compute new suggestions for each line, but only include them in the result
1269 // if they differ from the old suggestion for that line.
1270 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1271 let mut language_indent_size = IndentSize::default();
1272 for new_edited_row_range in new_edited_row_ranges {
1273 let suggestions = snapshot
1274 .suggest_autoindents(new_edited_row_range.clone())
1275 .into_iter()
1276 .flatten();
1277 for (new_row, suggestion) in new_edited_row_range.zip(suggestions) {
1278 if let Some(suggestion) = suggestion {
1279 // Find the indent size based on the language for this row.
1280 while let Some((row, size)) = language_indent_sizes.peek() {
1281 if *row > new_row {
1282 break;
1283 }
1284 language_indent_size = *size;
1285 language_indent_sizes.next();
1286 }
1287
1288 let suggested_indent = indent_sizes
1289 .get(&suggestion.basis_row)
1290 .copied()
1291 .unwrap_or_else(|| {
1292 snapshot.indent_size_for_line(suggestion.basis_row)
1293 })
1294 .with_delta(suggestion.delta, language_indent_size);
1295 if old_suggestions.get(&new_row).map_or(
1296 true,
1297 |(old_indentation, was_within_error)| {
1298 suggested_indent != *old_indentation
1299 && (!suggestion.within_error || *was_within_error)
1300 },
1301 ) {
1302 indent_sizes.insert(new_row, suggested_indent);
1303 }
1304 }
1305 }
1306 yield_now().await;
1307 }
1308
1309 // For each block of inserted text, adjust the indentation of the remaining
1310 // lines of the block by the same amount as the first line was adjusted.
1311 if request.is_block_mode {
1312 for (row_range, original_indent_column) in
1313 row_ranges
1314 .into_iter()
1315 .filter_map(|(range, original_indent_column)| {
1316 if range.len() > 1 {
1317 Some((range, original_indent_column?))
1318 } else {
1319 None
1320 }
1321 })
1322 {
1323 let new_indent = indent_sizes
1324 .get(&row_range.start)
1325 .copied()
1326 .unwrap_or_else(|| snapshot.indent_size_for_line(row_range.start));
1327 let delta = new_indent.len as i64 - original_indent_column as i64;
1328 if delta != 0 {
1329 for row in row_range.skip(1) {
1330 indent_sizes.entry(row).or_insert_with(|| {
1331 let mut size = snapshot.indent_size_for_line(row);
1332 if size.kind == new_indent.kind {
1333 match delta.cmp(&0) {
1334 Ordering::Greater => size.len += delta as u32,
1335 Ordering::Less => {
1336 size.len = size.len.saturating_sub(-delta as u32)
1337 }
1338 Ordering::Equal => {}
1339 }
1340 }
1341 size
1342 });
1343 }
1344 }
1345 }
1346 }
1347 }
1348
1349 indent_sizes
1350 })
1351 }
1352
1353 fn apply_autoindents(
1354 &mut self,
1355 indent_sizes: BTreeMap<u32, IndentSize>,
1356 cx: &mut ModelContext<Self>,
1357 ) {
1358 self.autoindent_requests.clear();
1359
1360 let edits: Vec<_> = indent_sizes
1361 .into_iter()
1362 .filter_map(|(row, indent_size)| {
1363 let current_size = indent_size_for_line(self, row);
1364 Self::edit_for_indent_size_adjustment(row, current_size, indent_size)
1365 })
1366 .collect();
1367
1368 self.edit(edits, None, cx);
1369 }
1370
1371 /// Create a minimal edit that will cause the given row to be indented
1372 /// with the given size. After applying this edit, the length of the line
1373 /// will always be at least `new_size.len`.
1374 pub fn edit_for_indent_size_adjustment(
1375 row: u32,
1376 current_size: IndentSize,
1377 new_size: IndentSize,
1378 ) -> Option<(Range<Point>, String)> {
1379 if new_size.kind == current_size.kind {
1380 match new_size.len.cmp(¤t_size.len) {
1381 Ordering::Greater => {
1382 let point = Point::new(row, 0);
1383 Some((
1384 point..point,
1385 iter::repeat(new_size.char())
1386 .take((new_size.len - current_size.len) as usize)
1387 .collect::<String>(),
1388 ))
1389 }
1390
1391 Ordering::Less => Some((
1392 Point::new(row, 0)..Point::new(row, current_size.len - new_size.len),
1393 String::new(),
1394 )),
1395
1396 Ordering::Equal => None,
1397 }
1398 } else {
1399 Some((
1400 Point::new(row, 0)..Point::new(row, current_size.len),
1401 iter::repeat(new_size.char())
1402 .take(new_size.len as usize)
1403 .collect::<String>(),
1404 ))
1405 }
1406 }
1407
1408 /// Spawns a background task that asynchronously computes a `Diff` between the buffer's text
1409 /// and the given new text.
1410 pub fn diff(&self, mut new_text: String, cx: &AppContext) -> Task<Diff> {
1411 let old_text = self.as_rope().clone();
1412 let base_version = self.version();
1413 cx.background_executor()
1414 .spawn_labeled(*BUFFER_DIFF_TASK, async move {
1415 let old_text = old_text.to_string();
1416 let line_ending = LineEnding::detect(&new_text);
1417 LineEnding::normalize(&mut new_text);
1418
1419 let diff = TextDiff::from_chars(old_text.as_str(), new_text.as_str());
1420 let empty: Arc<str> = "".into();
1421
1422 let mut edits = Vec::new();
1423 let mut old_offset = 0;
1424 let mut new_offset = 0;
1425 let mut last_edit: Option<(Range<usize>, Range<usize>)> = None;
1426 for change in diff.iter_all_changes().map(Some).chain([None]) {
1427 if let Some(change) = &change {
1428 let len = change.value().len();
1429 match change.tag() {
1430 ChangeTag::Equal => {
1431 old_offset += len;
1432 new_offset += len;
1433 }
1434 ChangeTag::Delete => {
1435 let old_end_offset = old_offset + len;
1436 if let Some((last_old_range, _)) = &mut last_edit {
1437 last_old_range.end = old_end_offset;
1438 } else {
1439 last_edit =
1440 Some((old_offset..old_end_offset, new_offset..new_offset));
1441 }
1442 old_offset = old_end_offset;
1443 }
1444 ChangeTag::Insert => {
1445 let new_end_offset = new_offset + len;
1446 if let Some((_, last_new_range)) = &mut last_edit {
1447 last_new_range.end = new_end_offset;
1448 } else {
1449 last_edit =
1450 Some((old_offset..old_offset, new_offset..new_end_offset));
1451 }
1452 new_offset = new_end_offset;
1453 }
1454 }
1455 }
1456
1457 if let Some((old_range, new_range)) = &last_edit {
1458 if old_offset > old_range.end
1459 || new_offset > new_range.end
1460 || change.is_none()
1461 {
1462 let text = if new_range.is_empty() {
1463 empty.clone()
1464 } else {
1465 new_text[new_range.clone()].into()
1466 };
1467 edits.push((old_range.clone(), text));
1468 last_edit.take();
1469 }
1470 }
1471 }
1472
1473 Diff {
1474 base_version,
1475 line_ending,
1476 edits,
1477 }
1478 })
1479 }
1480
1481 /// Spawns a background task that searches the buffer for any whitespace
1482 /// at the ends of a lines, and returns a `Diff` that removes that whitespace.
1483 pub fn remove_trailing_whitespace(&self, cx: &AppContext) -> Task<Diff> {
1484 let old_text = self.as_rope().clone();
1485 let line_ending = self.line_ending();
1486 let base_version = self.version();
1487 cx.background_executor().spawn(async move {
1488 let ranges = trailing_whitespace_ranges(&old_text);
1489 let empty = Arc::<str>::from("");
1490 Diff {
1491 base_version,
1492 line_ending,
1493 edits: ranges
1494 .into_iter()
1495 .map(|range| (range, empty.clone()))
1496 .collect(),
1497 }
1498 })
1499 }
1500
1501 /// Ensures that the buffer ends with a single newline character, and
1502 /// no other whitespace.
1503 pub fn ensure_final_newline(&mut self, cx: &mut ModelContext<Self>) {
1504 let len = self.len();
1505 let mut offset = len;
1506 for chunk in self.as_rope().reversed_chunks_in_range(0..len) {
1507 let non_whitespace_len = chunk
1508 .trim_end_matches(|c: char| c.is_ascii_whitespace())
1509 .len();
1510 offset -= chunk.len();
1511 offset += non_whitespace_len;
1512 if non_whitespace_len != 0 {
1513 if offset == len - 1 && chunk.get(non_whitespace_len..) == Some("\n") {
1514 return;
1515 }
1516 break;
1517 }
1518 }
1519 self.edit([(offset..len, "\n")], None, cx);
1520 }
1521
1522 /// Applies a diff to the buffer. If the buffer has changed since the given diff was
1523 /// calculated, then adjust the diff to account for those changes, and discard any
1524 /// parts of the diff that conflict with those changes.
1525 pub fn apply_diff(&mut self, diff: Diff, cx: &mut ModelContext<Self>) -> Option<TransactionId> {
1526 // Check for any edits to the buffer that have occurred since this diff
1527 // was computed.
1528 let snapshot = self.snapshot();
1529 let mut edits_since = snapshot.edits_since::<usize>(&diff.base_version).peekable();
1530 let mut delta = 0;
1531 let adjusted_edits = diff.edits.into_iter().filter_map(|(range, new_text)| {
1532 while let Some(edit_since) = edits_since.peek() {
1533 // If the edit occurs after a diff hunk, then it does not
1534 // affect that hunk.
1535 if edit_since.old.start > range.end {
1536 break;
1537 }
1538 // If the edit precedes the diff hunk, then adjust the hunk
1539 // to reflect the edit.
1540 else if edit_since.old.end < range.start {
1541 delta += edit_since.new_len() as i64 - edit_since.old_len() as i64;
1542 edits_since.next();
1543 }
1544 // If the edit intersects a diff hunk, then discard that hunk.
1545 else {
1546 return None;
1547 }
1548 }
1549
1550 let start = (range.start as i64 + delta) as usize;
1551 let end = (range.end as i64 + delta) as usize;
1552 Some((start..end, new_text))
1553 });
1554
1555 self.start_transaction();
1556 self.text.set_line_ending(diff.line_ending);
1557 self.edit(adjusted_edits, None, cx);
1558 self.end_transaction(cx)
1559 }
1560
1561 /// Checks if the buffer has unsaved changes.
1562 pub fn is_dirty(&self) -> bool {
1563 self.has_conflict
1564 || self.has_edits_since(&self.saved_version)
1565 || self
1566 .file
1567 .as_ref()
1568 .map_or(false, |file| file.is_deleted() || !file.is_created())
1569 }
1570
1571 /// Checks if the buffer and its file have both changed since the buffer
1572 /// was last saved or reloaded.
1573 pub fn has_conflict(&self) -> bool {
1574 self.has_conflict
1575 || self.file.as_ref().map_or(false, |file| {
1576 file.mtime() > self.saved_mtime && self.has_edits_since(&self.saved_version)
1577 })
1578 }
1579
1580 /// Gets a [`Subscription`] that tracks all of the changes to the buffer's text.
1581 pub fn subscribe(&mut self) -> Subscription {
1582 self.text.subscribe()
1583 }
1584
1585 /// Starts a transaction, if one is not already in-progress. When undoing or
1586 /// redoing edits, all of the edits performed within a transaction are undone
1587 /// or redone together.
1588 pub fn start_transaction(&mut self) -> Option<TransactionId> {
1589 self.start_transaction_at(Instant::now())
1590 }
1591
1592 /// Starts a transaction, providing the current time. Subsequent transactions
1593 /// that occur within a short period of time will be grouped together. This
1594 /// is controlled by the buffer's undo grouping duration.
1595 pub fn start_transaction_at(&mut self, now: Instant) -> Option<TransactionId> {
1596 self.transaction_depth += 1;
1597 if self.was_dirty_before_starting_transaction.is_none() {
1598 self.was_dirty_before_starting_transaction = Some(self.is_dirty());
1599 }
1600 self.text.start_transaction_at(now)
1601 }
1602
1603 /// Terminates the current transaction, if this is the outermost transaction.
1604 pub fn end_transaction(&mut self, cx: &mut ModelContext<Self>) -> Option<TransactionId> {
1605 self.end_transaction_at(Instant::now(), cx)
1606 }
1607
1608 /// Terminates the current transaction, providing the current time. Subsequent transactions
1609 /// that occur within a short period of time will be grouped together. This
1610 /// is controlled by the buffer's undo grouping duration.
1611 pub fn end_transaction_at(
1612 &mut self,
1613 now: Instant,
1614 cx: &mut ModelContext<Self>,
1615 ) -> Option<TransactionId> {
1616 assert!(self.transaction_depth > 0);
1617 self.transaction_depth -= 1;
1618 let was_dirty = if self.transaction_depth == 0 {
1619 self.was_dirty_before_starting_transaction.take().unwrap()
1620 } else {
1621 false
1622 };
1623 if let Some((transaction_id, start_version)) = self.text.end_transaction_at(now) {
1624 self.did_edit(&start_version, was_dirty, cx);
1625 Some(transaction_id)
1626 } else {
1627 None
1628 }
1629 }
1630
1631 /// Manually add a transaction to the buffer's undo history.
1632 pub fn push_transaction(&mut self, transaction: Transaction, now: Instant) {
1633 self.text.push_transaction(transaction, now);
1634 }
1635
1636 /// Prevent the last transaction from being grouped with any subsequent transactions,
1637 /// even if they occur with the buffer's undo grouping duration.
1638 pub fn finalize_last_transaction(&mut self) -> Option<&Transaction> {
1639 self.text.finalize_last_transaction()
1640 }
1641
1642 /// Manually group all changes since a given transaction.
1643 pub fn group_until_transaction(&mut self, transaction_id: TransactionId) {
1644 self.text.group_until_transaction(transaction_id);
1645 }
1646
1647 /// Manually remove a transaction from the buffer's undo history
1648 pub fn forget_transaction(&mut self, transaction_id: TransactionId) {
1649 self.text.forget_transaction(transaction_id);
1650 }
1651
1652 /// Manually merge two adjacent transactions in the buffer's undo history.
1653 pub fn merge_transactions(&mut self, transaction: TransactionId, destination: TransactionId) {
1654 self.text.merge_transactions(transaction, destination);
1655 }
1656
1657 /// Waits for the buffer to receive operations with the given timestamps.
1658 pub fn wait_for_edits(
1659 &mut self,
1660 edit_ids: impl IntoIterator<Item = clock::Lamport>,
1661 ) -> impl Future<Output = Result<()>> {
1662 self.text.wait_for_edits(edit_ids)
1663 }
1664
1665 /// Waits for the buffer to receive the operations necessary for resolving the given anchors.
1666 pub fn wait_for_anchors(
1667 &mut self,
1668 anchors: impl IntoIterator<Item = Anchor>,
1669 ) -> impl 'static + Future<Output = Result<()>> {
1670 self.text.wait_for_anchors(anchors)
1671 }
1672
1673 /// Waits for the buffer to receive operations up to the given version.
1674 pub fn wait_for_version(&mut self, version: clock::Global) -> impl Future<Output = Result<()>> {
1675 self.text.wait_for_version(version)
1676 }
1677
1678 /// Forces all futures returned by [`Buffer::wait_for_version`], [`Buffer::wait_for_edits`], or
1679 /// [`Buffer::wait_for_version`] to resolve with an error.
1680 pub fn give_up_waiting(&mut self) {
1681 self.text.give_up_waiting();
1682 }
1683
1684 /// Stores a set of selections that should be broadcasted to all of the buffer's replicas.
1685 pub fn set_active_selections(
1686 &mut self,
1687 selections: Arc<[Selection<Anchor>]>,
1688 line_mode: bool,
1689 cursor_shape: CursorShape,
1690 cx: &mut ModelContext<Self>,
1691 ) {
1692 let lamport_timestamp = self.text.lamport_clock.tick();
1693 self.remote_selections.insert(
1694 self.text.replica_id(),
1695 SelectionSet {
1696 selections: selections.clone(),
1697 lamport_timestamp,
1698 line_mode,
1699 cursor_shape,
1700 },
1701 );
1702 self.send_operation(
1703 Operation::UpdateSelections {
1704 selections,
1705 line_mode,
1706 lamport_timestamp,
1707 cursor_shape,
1708 },
1709 cx,
1710 );
1711 }
1712
1713 /// Clears the selections, so that other replicas of the buffer do not see any selections for
1714 /// this replica.
1715 pub fn remove_active_selections(&mut self, cx: &mut ModelContext<Self>) {
1716 if self
1717 .remote_selections
1718 .get(&self.text.replica_id())
1719 .map_or(true, |set| !set.selections.is_empty())
1720 {
1721 self.set_active_selections(Arc::from([]), false, Default::default(), cx);
1722 }
1723 }
1724
1725 /// Replaces the buffer's entire text.
1726 pub fn set_text<T>(&mut self, text: T, cx: &mut ModelContext<Self>) -> Option<clock::Lamport>
1727 where
1728 T: Into<Arc<str>>,
1729 {
1730 self.autoindent_requests.clear();
1731 self.edit([(0..self.len(), text)], None, cx)
1732 }
1733
1734 /// Applies the given edits to the buffer. Each edit is specified as a range of text to
1735 /// delete, and a string of text to insert at that location.
1736 ///
1737 /// If an [`AutoindentMode`] is provided, then the buffer will enqueue an auto-indent
1738 /// request for the edited ranges, which will be processed when the buffer finishes
1739 /// parsing.
1740 ///
1741 /// Parsing takes place at the end of a transaction, and may compute synchronously
1742 /// or asynchronously, depending on the changes.
1743 pub fn edit<I, S, T>(
1744 &mut self,
1745 edits_iter: I,
1746 autoindent_mode: Option<AutoindentMode>,
1747 cx: &mut ModelContext<Self>,
1748 ) -> Option<clock::Lamport>
1749 where
1750 I: IntoIterator<Item = (Range<S>, T)>,
1751 S: ToOffset,
1752 T: Into<Arc<str>>,
1753 {
1754 // Skip invalid edits and coalesce contiguous ones.
1755 let mut edits: Vec<(Range<usize>, Arc<str>)> = Vec::new();
1756 for (range, new_text) in edits_iter {
1757 let mut range = range.start.to_offset(self)..range.end.to_offset(self);
1758 if range.start > range.end {
1759 mem::swap(&mut range.start, &mut range.end);
1760 }
1761 let new_text = new_text.into();
1762 if !new_text.is_empty() || !range.is_empty() {
1763 if let Some((prev_range, prev_text)) = edits.last_mut() {
1764 if prev_range.end >= range.start {
1765 prev_range.end = cmp::max(prev_range.end, range.end);
1766 *prev_text = format!("{prev_text}{new_text}").into();
1767 } else {
1768 edits.push((range, new_text));
1769 }
1770 } else {
1771 edits.push((range, new_text));
1772 }
1773 }
1774 }
1775 if edits.is_empty() {
1776 return None;
1777 }
1778
1779 self.start_transaction();
1780 self.pending_autoindent.take();
1781 let autoindent_request = autoindent_mode
1782 .and_then(|mode| self.language.as_ref().map(|_| (self.snapshot(), mode)));
1783
1784 let edit_operation = self.text.edit(edits.iter().cloned());
1785 let edit_id = edit_operation.timestamp();
1786
1787 if let Some((before_edit, mode)) = autoindent_request {
1788 let mut delta = 0isize;
1789 let entries = edits
1790 .into_iter()
1791 .enumerate()
1792 .zip(&edit_operation.as_edit().unwrap().new_text)
1793 .map(|((ix, (range, _)), new_text)| {
1794 let new_text_length = new_text.len();
1795 let old_start = range.start.to_point(&before_edit);
1796 let new_start = (delta + range.start as isize) as usize;
1797 delta += new_text_length as isize - (range.end as isize - range.start as isize);
1798
1799 let mut range_of_insertion_to_indent = 0..new_text_length;
1800 let mut first_line_is_new = false;
1801 let mut original_indent_column = None;
1802
1803 // When inserting an entire line at the beginning of an existing line,
1804 // treat the insertion as new.
1805 if new_text.contains('\n')
1806 && old_start.column <= before_edit.indent_size_for_line(old_start.row).len
1807 {
1808 first_line_is_new = true;
1809 }
1810
1811 // When inserting text starting with a newline, avoid auto-indenting the
1812 // previous line.
1813 if new_text.starts_with('\n') {
1814 range_of_insertion_to_indent.start += 1;
1815 first_line_is_new = true;
1816 }
1817
1818 // Avoid auto-indenting after the insertion.
1819 if let AutoindentMode::Block {
1820 original_indent_columns,
1821 } = &mode
1822 {
1823 original_indent_column =
1824 Some(original_indent_columns.get(ix).copied().unwrap_or_else(|| {
1825 indent_size_for_text(
1826 new_text[range_of_insertion_to_indent.clone()].chars(),
1827 )
1828 .len
1829 }));
1830 if new_text[range_of_insertion_to_indent.clone()].ends_with('\n') {
1831 range_of_insertion_to_indent.end -= 1;
1832 }
1833 }
1834
1835 AutoindentRequestEntry {
1836 first_line_is_new,
1837 original_indent_column,
1838 indent_size: before_edit.language_indent_size_at(range.start, cx),
1839 range: self.anchor_before(new_start + range_of_insertion_to_indent.start)
1840 ..self.anchor_after(new_start + range_of_insertion_to_indent.end),
1841 }
1842 })
1843 .collect();
1844
1845 self.autoindent_requests.push(Arc::new(AutoindentRequest {
1846 before_edit,
1847 entries,
1848 is_block_mode: matches!(mode, AutoindentMode::Block { .. }),
1849 }));
1850 }
1851
1852 self.end_transaction(cx);
1853 self.send_operation(Operation::Buffer(edit_operation), cx);
1854 Some(edit_id)
1855 }
1856
1857 fn did_edit(
1858 &mut self,
1859 old_version: &clock::Global,
1860 was_dirty: bool,
1861 cx: &mut ModelContext<Self>,
1862 ) {
1863 if self.edits_since::<usize>(old_version).next().is_none() {
1864 return;
1865 }
1866
1867 self.reparse(cx);
1868
1869 cx.emit(Event::Edited);
1870 if was_dirty != self.is_dirty() {
1871 cx.emit(Event::DirtyChanged);
1872 }
1873 cx.notify();
1874 }
1875
1876 /// Applies the given remote operations to the buffer.
1877 pub fn apply_ops<I: IntoIterator<Item = Operation>>(
1878 &mut self,
1879 ops: I,
1880 cx: &mut ModelContext<Self>,
1881 ) -> Result<()> {
1882 self.pending_autoindent.take();
1883 let was_dirty = self.is_dirty();
1884 let old_version = self.version.clone();
1885 let mut deferred_ops = Vec::new();
1886 let buffer_ops = ops
1887 .into_iter()
1888 .filter_map(|op| match op {
1889 Operation::Buffer(op) => Some(op),
1890 _ => {
1891 if self.can_apply_op(&op) {
1892 self.apply_op(op, cx);
1893 } else {
1894 deferred_ops.push(op);
1895 }
1896 None
1897 }
1898 })
1899 .collect::<Vec<_>>();
1900 self.text.apply_ops(buffer_ops)?;
1901 self.deferred_ops.insert(deferred_ops);
1902 self.flush_deferred_ops(cx);
1903 self.did_edit(&old_version, was_dirty, cx);
1904 // Notify independently of whether the buffer was edited as the operations could include a
1905 // selection update.
1906 cx.notify();
1907 Ok(())
1908 }
1909
1910 fn flush_deferred_ops(&mut self, cx: &mut ModelContext<Self>) {
1911 let mut deferred_ops = Vec::new();
1912 for op in self.deferred_ops.drain().iter().cloned() {
1913 if self.can_apply_op(&op) {
1914 self.apply_op(op, cx);
1915 } else {
1916 deferred_ops.push(op);
1917 }
1918 }
1919 self.deferred_ops.insert(deferred_ops);
1920 }
1921
1922 fn can_apply_op(&self, operation: &Operation) -> bool {
1923 match operation {
1924 Operation::Buffer(_) => {
1925 unreachable!("buffer operations should never be applied at this layer")
1926 }
1927 Operation::UpdateDiagnostics {
1928 diagnostics: diagnostic_set,
1929 ..
1930 } => diagnostic_set.iter().all(|diagnostic| {
1931 self.text.can_resolve(&diagnostic.range.start)
1932 && self.text.can_resolve(&diagnostic.range.end)
1933 }),
1934 Operation::UpdateSelections { selections, .. } => selections
1935 .iter()
1936 .all(|s| self.can_resolve(&s.start) && self.can_resolve(&s.end)),
1937 Operation::UpdateCompletionTriggers { .. } => true,
1938 }
1939 }
1940
1941 fn apply_op(&mut self, operation: Operation, cx: &mut ModelContext<Self>) {
1942 match operation {
1943 Operation::Buffer(_) => {
1944 unreachable!("buffer operations should never be applied at this layer")
1945 }
1946 Operation::UpdateDiagnostics {
1947 server_id,
1948 diagnostics: diagnostic_set,
1949 lamport_timestamp,
1950 } => {
1951 let snapshot = self.snapshot();
1952 self.apply_diagnostic_update(
1953 server_id,
1954 DiagnosticSet::from_sorted_entries(diagnostic_set.iter().cloned(), &snapshot),
1955 lamport_timestamp,
1956 cx,
1957 );
1958 }
1959 Operation::UpdateSelections {
1960 selections,
1961 lamport_timestamp,
1962 line_mode,
1963 cursor_shape,
1964 } => {
1965 if let Some(set) = self.remote_selections.get(&lamport_timestamp.replica_id) {
1966 if set.lamport_timestamp > lamport_timestamp {
1967 return;
1968 }
1969 }
1970
1971 self.remote_selections.insert(
1972 lamport_timestamp.replica_id,
1973 SelectionSet {
1974 selections,
1975 lamport_timestamp,
1976 line_mode,
1977 cursor_shape,
1978 },
1979 );
1980 self.text.lamport_clock.observe(lamport_timestamp);
1981 self.selections_update_count += 1;
1982 }
1983 Operation::UpdateCompletionTriggers {
1984 triggers,
1985 lamport_timestamp,
1986 } => {
1987 self.completion_triggers = triggers;
1988 self.text.lamport_clock.observe(lamport_timestamp);
1989 }
1990 }
1991 }
1992
1993 fn apply_diagnostic_update(
1994 &mut self,
1995 server_id: LanguageServerId,
1996 diagnostics: DiagnosticSet,
1997 lamport_timestamp: clock::Lamport,
1998 cx: &mut ModelContext<Self>,
1999 ) {
2000 if lamport_timestamp > self.diagnostics_timestamp {
2001 let ix = self.diagnostics.binary_search_by_key(&server_id, |e| e.0);
2002 if diagnostics.len() == 0 {
2003 if let Ok(ix) = ix {
2004 self.diagnostics.remove(ix);
2005 }
2006 } else {
2007 match ix {
2008 Err(ix) => self.diagnostics.insert(ix, (server_id, diagnostics)),
2009 Ok(ix) => self.diagnostics[ix].1 = diagnostics,
2010 };
2011 }
2012 self.diagnostics_timestamp = lamport_timestamp;
2013 self.diagnostics_update_count += 1;
2014 self.text.lamport_clock.observe(lamport_timestamp);
2015 cx.notify();
2016 cx.emit(Event::DiagnosticsUpdated);
2017 }
2018 }
2019
2020 fn send_operation(&mut self, operation: Operation, cx: &mut ModelContext<Self>) {
2021 cx.emit(Event::Operation(operation));
2022 }
2023
2024 /// Removes the selections for a given peer.
2025 pub fn remove_peer(&mut self, replica_id: ReplicaId, cx: &mut ModelContext<Self>) {
2026 self.remote_selections.remove(&replica_id);
2027 cx.notify();
2028 }
2029
2030 /// Undoes the most recent transaction.
2031 pub fn undo(&mut self, cx: &mut ModelContext<Self>) -> Option<TransactionId> {
2032 let was_dirty = self.is_dirty();
2033 let old_version = self.version.clone();
2034
2035 if let Some((transaction_id, operation)) = self.text.undo() {
2036 self.send_operation(Operation::Buffer(operation), cx);
2037 self.did_edit(&old_version, was_dirty, cx);
2038 Some(transaction_id)
2039 } else {
2040 None
2041 }
2042 }
2043
2044 /// Manually undoes a specific transaction in the buffer's undo history.
2045 pub fn undo_transaction(
2046 &mut self,
2047 transaction_id: TransactionId,
2048 cx: &mut ModelContext<Self>,
2049 ) -> bool {
2050 let was_dirty = self.is_dirty();
2051 let old_version = self.version.clone();
2052 if let Some(operation) = self.text.undo_transaction(transaction_id) {
2053 self.send_operation(Operation::Buffer(operation), cx);
2054 self.did_edit(&old_version, was_dirty, cx);
2055 true
2056 } else {
2057 false
2058 }
2059 }
2060
2061 /// Manually undoes all changes after a given transaction in the buffer's undo history.
2062 pub fn undo_to_transaction(
2063 &mut self,
2064 transaction_id: TransactionId,
2065 cx: &mut ModelContext<Self>,
2066 ) -> bool {
2067 let was_dirty = self.is_dirty();
2068 let old_version = self.version.clone();
2069
2070 let operations = self.text.undo_to_transaction(transaction_id);
2071 let undone = !operations.is_empty();
2072 for operation in operations {
2073 self.send_operation(Operation::Buffer(operation), cx);
2074 }
2075 if undone {
2076 self.did_edit(&old_version, was_dirty, cx)
2077 }
2078 undone
2079 }
2080
2081 /// Manually redoes a specific transaction in the buffer's redo history.
2082 pub fn redo(&mut self, cx: &mut ModelContext<Self>) -> Option<TransactionId> {
2083 let was_dirty = self.is_dirty();
2084 let old_version = self.version.clone();
2085
2086 if let Some((transaction_id, operation)) = self.text.redo() {
2087 self.send_operation(Operation::Buffer(operation), cx);
2088 self.did_edit(&old_version, was_dirty, cx);
2089 Some(transaction_id)
2090 } else {
2091 None
2092 }
2093 }
2094
2095 /// Manually undoes all changes until a given transaction in the buffer's redo history.
2096 pub fn redo_to_transaction(
2097 &mut self,
2098 transaction_id: TransactionId,
2099 cx: &mut ModelContext<Self>,
2100 ) -> bool {
2101 let was_dirty = self.is_dirty();
2102 let old_version = self.version.clone();
2103
2104 let operations = self.text.redo_to_transaction(transaction_id);
2105 let redone = !operations.is_empty();
2106 for operation in operations {
2107 self.send_operation(Operation::Buffer(operation), cx);
2108 }
2109 if redone {
2110 self.did_edit(&old_version, was_dirty, cx)
2111 }
2112 redone
2113 }
2114
2115 /// Override current completion triggers with the user-provided completion triggers.
2116 pub fn set_completion_triggers(&mut self, triggers: Vec<String>, cx: &mut ModelContext<Self>) {
2117 self.completion_triggers.clone_from(&triggers);
2118 self.completion_triggers_timestamp = self.text.lamport_clock.tick();
2119 self.send_operation(
2120 Operation::UpdateCompletionTriggers {
2121 triggers,
2122 lamport_timestamp: self.completion_triggers_timestamp,
2123 },
2124 cx,
2125 );
2126 cx.notify();
2127 }
2128
2129 /// Returns a list of strings which trigger a completion menu for this language.
2130 /// Usually this is driven by LSP server which returns a list of trigger characters for completions.
2131 pub fn completion_triggers(&self) -> &[String] {
2132 &self.completion_triggers
2133 }
2134}
2135
2136#[doc(hidden)]
2137#[cfg(any(test, feature = "test-support"))]
2138impl Buffer {
2139 pub fn edit_via_marked_text(
2140 &mut self,
2141 marked_string: &str,
2142 autoindent_mode: Option<AutoindentMode>,
2143 cx: &mut ModelContext<Self>,
2144 ) {
2145 let edits = self.edits_for_marked_text(marked_string);
2146 self.edit(edits, autoindent_mode, cx);
2147 }
2148
2149 pub fn set_group_interval(&mut self, group_interval: Duration) {
2150 self.text.set_group_interval(group_interval);
2151 }
2152
2153 pub fn randomly_edit<T>(
2154 &mut self,
2155 rng: &mut T,
2156 old_range_count: usize,
2157 cx: &mut ModelContext<Self>,
2158 ) where
2159 T: rand::Rng,
2160 {
2161 let mut edits: Vec<(Range<usize>, String)> = Vec::new();
2162 let mut last_end = None;
2163 for _ in 0..old_range_count {
2164 if last_end.map_or(false, |last_end| last_end >= self.len()) {
2165 break;
2166 }
2167
2168 let new_start = last_end.map_or(0, |last_end| last_end + 1);
2169 let mut range = self.random_byte_range(new_start, rng);
2170 if rng.gen_bool(0.2) {
2171 mem::swap(&mut range.start, &mut range.end);
2172 }
2173 last_end = Some(range.end);
2174
2175 let new_text_len = rng.gen_range(0..10);
2176 let new_text: String = RandomCharIter::new(&mut *rng).take(new_text_len).collect();
2177
2178 edits.push((range, new_text));
2179 }
2180 log::info!("mutating buffer {} with {:?}", self.replica_id(), edits);
2181 self.edit(edits, None, cx);
2182 }
2183
2184 pub fn randomly_undo_redo(&mut self, rng: &mut impl rand::Rng, cx: &mut ModelContext<Self>) {
2185 let was_dirty = self.is_dirty();
2186 let old_version = self.version.clone();
2187
2188 let ops = self.text.randomly_undo_redo(rng);
2189 if !ops.is_empty() {
2190 for op in ops {
2191 self.send_operation(Operation::Buffer(op), cx);
2192 self.did_edit(&old_version, was_dirty, cx);
2193 }
2194 }
2195 }
2196}
2197
2198impl EventEmitter<Event> for Buffer {}
2199
2200impl Deref for Buffer {
2201 type Target = TextBuffer;
2202
2203 fn deref(&self) -> &Self::Target {
2204 &self.text
2205 }
2206}
2207
2208impl BufferSnapshot {
2209 /// Returns [`IndentSize`] for a given line that respects user settings and /// language preferences.
2210 pub fn indent_size_for_line(&self, row: u32) -> IndentSize {
2211 indent_size_for_line(self, row)
2212 }
2213 /// Returns [`IndentSize`] for a given position that respects user settings
2214 /// and language preferences.
2215 pub fn language_indent_size_at<T: ToOffset>(&self, position: T, cx: &AppContext) -> IndentSize {
2216 let settings = language_settings(self.language_at(position), self.file(), cx);
2217 if settings.hard_tabs {
2218 IndentSize::tab()
2219 } else {
2220 IndentSize::spaces(settings.tab_size.get())
2221 }
2222 }
2223
2224 /// Retrieve the suggested indent size for all of the given rows. The unit of indentation
2225 /// is passed in as `single_indent_size`.
2226 pub fn suggested_indents(
2227 &self,
2228 rows: impl Iterator<Item = u32>,
2229 single_indent_size: IndentSize,
2230 ) -> BTreeMap<u32, IndentSize> {
2231 let mut result = BTreeMap::new();
2232
2233 for row_range in contiguous_ranges(rows, 10) {
2234 let suggestions = match self.suggest_autoindents(row_range.clone()) {
2235 Some(suggestions) => suggestions,
2236 _ => break,
2237 };
2238
2239 for (row, suggestion) in row_range.zip(suggestions) {
2240 let indent_size = if let Some(suggestion) = suggestion {
2241 result
2242 .get(&suggestion.basis_row)
2243 .copied()
2244 .unwrap_or_else(|| self.indent_size_for_line(suggestion.basis_row))
2245 .with_delta(suggestion.delta, single_indent_size)
2246 } else {
2247 self.indent_size_for_line(row)
2248 };
2249
2250 result.insert(row, indent_size);
2251 }
2252 }
2253
2254 result
2255 }
2256
2257 fn suggest_autoindents(
2258 &self,
2259 row_range: Range<u32>,
2260 ) -> Option<impl Iterator<Item = Option<IndentSuggestion>> + '_> {
2261 let config = &self.language.as_ref()?.config;
2262 let prev_non_blank_row = self.prev_non_blank_row(row_range.start);
2263
2264 // Find the suggested indentation ranges based on the syntax tree.
2265 let start = Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0);
2266 let end = Point::new(row_range.end, 0);
2267 let range = (start..end).to_offset(&self.text);
2268 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
2269 Some(&grammar.indents_config.as_ref()?.query)
2270 });
2271 let indent_configs = matches
2272 .grammars()
2273 .iter()
2274 .map(|grammar| grammar.indents_config.as_ref().unwrap())
2275 .collect::<Vec<_>>();
2276
2277 let mut indent_ranges = Vec::<Range<Point>>::new();
2278 let mut outdent_positions = Vec::<Point>::new();
2279 while let Some(mat) = matches.peek() {
2280 let mut start: Option<Point> = None;
2281 let mut end: Option<Point> = None;
2282
2283 let config = &indent_configs[mat.grammar_index];
2284 for capture in mat.captures {
2285 if capture.index == config.indent_capture_ix {
2286 start.get_or_insert(Point::from_ts_point(capture.node.start_position()));
2287 end.get_or_insert(Point::from_ts_point(capture.node.end_position()));
2288 } else if Some(capture.index) == config.start_capture_ix {
2289 start = Some(Point::from_ts_point(capture.node.end_position()));
2290 } else if Some(capture.index) == config.end_capture_ix {
2291 end = Some(Point::from_ts_point(capture.node.start_position()));
2292 } else if Some(capture.index) == config.outdent_capture_ix {
2293 outdent_positions.push(Point::from_ts_point(capture.node.start_position()));
2294 }
2295 }
2296
2297 matches.advance();
2298 if let Some((start, end)) = start.zip(end) {
2299 if start.row == end.row {
2300 continue;
2301 }
2302
2303 let range = start..end;
2304 match indent_ranges.binary_search_by_key(&range.start, |r| r.start) {
2305 Err(ix) => indent_ranges.insert(ix, range),
2306 Ok(ix) => {
2307 let prev_range = &mut indent_ranges[ix];
2308 prev_range.end = prev_range.end.max(range.end);
2309 }
2310 }
2311 }
2312 }
2313
2314 let mut error_ranges = Vec::<Range<Point>>::new();
2315 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
2316 Some(&grammar.error_query)
2317 });
2318 while let Some(mat) = matches.peek() {
2319 let node = mat.captures[0].node;
2320 let start = Point::from_ts_point(node.start_position());
2321 let end = Point::from_ts_point(node.end_position());
2322 let range = start..end;
2323 let ix = match error_ranges.binary_search_by_key(&range.start, |r| r.start) {
2324 Ok(ix) | Err(ix) => ix,
2325 };
2326 let mut end_ix = ix;
2327 while let Some(existing_range) = error_ranges.get(end_ix) {
2328 if existing_range.end < end {
2329 end_ix += 1;
2330 } else {
2331 break;
2332 }
2333 }
2334 error_ranges.splice(ix..end_ix, [range]);
2335 matches.advance();
2336 }
2337
2338 outdent_positions.sort();
2339 for outdent_position in outdent_positions {
2340 // find the innermost indent range containing this outdent_position
2341 // set its end to the outdent position
2342 if let Some(range_to_truncate) = indent_ranges
2343 .iter_mut()
2344 .filter(|indent_range| indent_range.contains(&outdent_position))
2345 .last()
2346 {
2347 range_to_truncate.end = outdent_position;
2348 }
2349 }
2350
2351 // Find the suggested indentation increases and decreased based on regexes.
2352 let mut indent_change_rows = Vec::<(u32, Ordering)>::new();
2353 self.for_each_line(
2354 Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0)
2355 ..Point::new(row_range.end, 0),
2356 |row, line| {
2357 if config
2358 .decrease_indent_pattern
2359 .as_ref()
2360 .map_or(false, |regex| regex.is_match(line))
2361 {
2362 indent_change_rows.push((row, Ordering::Less));
2363 }
2364 if config
2365 .increase_indent_pattern
2366 .as_ref()
2367 .map_or(false, |regex| regex.is_match(line))
2368 {
2369 indent_change_rows.push((row + 1, Ordering::Greater));
2370 }
2371 },
2372 );
2373
2374 let mut indent_changes = indent_change_rows.into_iter().peekable();
2375 let mut prev_row = if config.auto_indent_using_last_non_empty_line {
2376 prev_non_blank_row.unwrap_or(0)
2377 } else {
2378 row_range.start.saturating_sub(1)
2379 };
2380 let mut prev_row_start = Point::new(prev_row, self.indent_size_for_line(prev_row).len);
2381 Some(row_range.map(move |row| {
2382 let row_start = Point::new(row, self.indent_size_for_line(row).len);
2383
2384 let mut indent_from_prev_row = false;
2385 let mut outdent_from_prev_row = false;
2386 let mut outdent_to_row = u32::MAX;
2387
2388 while let Some((indent_row, delta)) = indent_changes.peek() {
2389 match indent_row.cmp(&row) {
2390 Ordering::Equal => match delta {
2391 Ordering::Less => outdent_from_prev_row = true,
2392 Ordering::Greater => indent_from_prev_row = true,
2393 _ => {}
2394 },
2395
2396 Ordering::Greater => break,
2397 Ordering::Less => {}
2398 }
2399
2400 indent_changes.next();
2401 }
2402
2403 for range in &indent_ranges {
2404 if range.start.row >= row {
2405 break;
2406 }
2407 if range.start.row == prev_row && range.end > row_start {
2408 indent_from_prev_row = true;
2409 }
2410 if range.end > prev_row_start && range.end <= row_start {
2411 outdent_to_row = outdent_to_row.min(range.start.row);
2412 }
2413 }
2414
2415 let within_error = error_ranges
2416 .iter()
2417 .any(|e| e.start.row < row && e.end > row_start);
2418
2419 let suggestion = if outdent_to_row == prev_row
2420 || (outdent_from_prev_row && indent_from_prev_row)
2421 {
2422 Some(IndentSuggestion {
2423 basis_row: prev_row,
2424 delta: Ordering::Equal,
2425 within_error,
2426 })
2427 } else if indent_from_prev_row {
2428 Some(IndentSuggestion {
2429 basis_row: prev_row,
2430 delta: Ordering::Greater,
2431 within_error,
2432 })
2433 } else if outdent_to_row < prev_row {
2434 Some(IndentSuggestion {
2435 basis_row: outdent_to_row,
2436 delta: Ordering::Equal,
2437 within_error,
2438 })
2439 } else if outdent_from_prev_row {
2440 Some(IndentSuggestion {
2441 basis_row: prev_row,
2442 delta: Ordering::Less,
2443 within_error,
2444 })
2445 } else if config.auto_indent_using_last_non_empty_line || !self.is_line_blank(prev_row)
2446 {
2447 Some(IndentSuggestion {
2448 basis_row: prev_row,
2449 delta: Ordering::Equal,
2450 within_error,
2451 })
2452 } else {
2453 None
2454 };
2455
2456 prev_row = row;
2457 prev_row_start = row_start;
2458 suggestion
2459 }))
2460 }
2461
2462 fn prev_non_blank_row(&self, mut row: u32) -> Option<u32> {
2463 while row > 0 {
2464 row -= 1;
2465 if !self.is_line_blank(row) {
2466 return Some(row);
2467 }
2468 }
2469 None
2470 }
2471
2472 /// Iterates over chunks of text in the given range of the buffer. Text is chunked
2473 /// in an arbitrary way due to being stored in a [`Rope`](text::Rope). The text is also
2474 /// returned in chunks where each chunk has a single syntax highlighting style and
2475 /// diagnostic status.
2476 pub fn chunks<T: ToOffset>(&self, range: Range<T>, language_aware: bool) -> BufferChunks {
2477 let range = range.start.to_offset(self)..range.end.to_offset(self);
2478
2479 let mut syntax = None;
2480 let mut diagnostic_endpoints = Vec::new();
2481 if language_aware {
2482 let captures = self.syntax.captures(range.clone(), &self.text, |grammar| {
2483 grammar.highlights_query.as_ref()
2484 });
2485 let highlight_maps = captures
2486 .grammars()
2487 .into_iter()
2488 .map(|grammar| grammar.highlight_map())
2489 .collect();
2490 syntax = Some((captures, highlight_maps));
2491 for entry in self.diagnostics_in_range::<_, usize>(range.clone(), false) {
2492 diagnostic_endpoints.push(DiagnosticEndpoint {
2493 offset: entry.range.start,
2494 is_start: true,
2495 severity: entry.diagnostic.severity,
2496 is_unnecessary: entry.diagnostic.is_unnecessary,
2497 });
2498 diagnostic_endpoints.push(DiagnosticEndpoint {
2499 offset: entry.range.end,
2500 is_start: false,
2501 severity: entry.diagnostic.severity,
2502 is_unnecessary: entry.diagnostic.is_unnecessary,
2503 });
2504 }
2505 diagnostic_endpoints
2506 .sort_unstable_by_key(|endpoint| (endpoint.offset, !endpoint.is_start));
2507 }
2508
2509 BufferChunks::new(self.text.as_rope(), range, syntax, diagnostic_endpoints)
2510 }
2511
2512 /// Invokes the given callback for each line of text in the given range of the buffer.
2513 /// Uses callback to avoid allocating a string for each line.
2514 fn for_each_line(&self, range: Range<Point>, mut callback: impl FnMut(u32, &str)) {
2515 let mut line = String::new();
2516 let mut row = range.start.row;
2517 for chunk in self
2518 .as_rope()
2519 .chunks_in_range(range.to_offset(self))
2520 .chain(["\n"])
2521 {
2522 for (newline_ix, text) in chunk.split('\n').enumerate() {
2523 if newline_ix > 0 {
2524 callback(row, &line);
2525 row += 1;
2526 line.clear();
2527 }
2528 line.push_str(text);
2529 }
2530 }
2531 }
2532
2533 /// Iterates over every [`SyntaxLayer`] in the buffer.
2534 pub fn syntax_layers(&self) -> impl Iterator<Item = SyntaxLayer> + '_ {
2535 self.syntax.layers_for_range(0..self.len(), &self.text)
2536 }
2537
2538 pub fn syntax_layer_at<D: ToOffset>(&self, position: D) -> Option<SyntaxLayer> {
2539 let offset = position.to_offset(self);
2540 self.syntax
2541 .layers_for_range(offset..offset, &self.text)
2542 .filter(|l| l.node().end_byte() > offset)
2543 .last()
2544 }
2545
2546 /// Returns the main [Language]
2547 pub fn language(&self) -> Option<&Arc<Language>> {
2548 self.language.as_ref()
2549 }
2550
2551 /// Returns the [Language] at the given location.
2552 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<&Arc<Language>> {
2553 self.syntax_layer_at(position)
2554 .map(|info| info.language)
2555 .or(self.language.as_ref())
2556 }
2557
2558 /// Returns the settings for the language at the given location.
2559 pub fn settings_at<'a, D: ToOffset>(
2560 &self,
2561 position: D,
2562 cx: &'a AppContext,
2563 ) -> &'a LanguageSettings {
2564 language_settings(self.language_at(position), self.file.as_ref(), cx)
2565 }
2566
2567 /// Returns the [LanguageScope] at the given location.
2568 pub fn language_scope_at<D: ToOffset>(&self, position: D) -> Option<LanguageScope> {
2569 let offset = position.to_offset(self);
2570 let mut scope = None;
2571 let mut smallest_range: Option<Range<usize>> = None;
2572
2573 // Use the layer that has the smallest node intersecting the given point.
2574 for layer in self.syntax.layers_for_range(offset..offset, &self.text) {
2575 let mut cursor = layer.node().walk();
2576
2577 let mut range = None;
2578 loop {
2579 let child_range = cursor.node().byte_range();
2580 if !child_range.to_inclusive().contains(&offset) {
2581 break;
2582 }
2583
2584 range = Some(child_range);
2585 if cursor.goto_first_child_for_byte(offset).is_none() {
2586 break;
2587 }
2588 }
2589
2590 if let Some(range) = range {
2591 if smallest_range
2592 .as_ref()
2593 .map_or(true, |smallest_range| range.len() < smallest_range.len())
2594 {
2595 smallest_range = Some(range);
2596 scope = Some(LanguageScope {
2597 language: layer.language.clone(),
2598 override_id: layer.override_id(offset, &self.text),
2599 });
2600 }
2601 }
2602 }
2603
2604 scope.or_else(|| {
2605 self.language.clone().map(|language| LanguageScope {
2606 language,
2607 override_id: None,
2608 })
2609 })
2610 }
2611
2612 /// Returns a tuple of the range and character kind of the word
2613 /// surrounding the given position.
2614 pub fn surrounding_word<T: ToOffset>(&self, start: T) -> (Range<usize>, Option<CharKind>) {
2615 let mut start = start.to_offset(self);
2616 let mut end = start;
2617 let mut next_chars = self.chars_at(start).peekable();
2618 let mut prev_chars = self.reversed_chars_at(start).peekable();
2619
2620 let scope = self.language_scope_at(start);
2621 let kind = |c| char_kind(&scope, c);
2622 let word_kind = cmp::max(
2623 prev_chars.peek().copied().map(kind),
2624 next_chars.peek().copied().map(kind),
2625 );
2626
2627 for ch in prev_chars {
2628 if Some(kind(ch)) == word_kind && ch != '\n' {
2629 start -= ch.len_utf8();
2630 } else {
2631 break;
2632 }
2633 }
2634
2635 for ch in next_chars {
2636 if Some(kind(ch)) == word_kind && ch != '\n' {
2637 end += ch.len_utf8();
2638 } else {
2639 break;
2640 }
2641 }
2642
2643 (start..end, word_kind)
2644 }
2645
2646 /// Returns the range for the closes syntax node enclosing the given range.
2647 pub fn range_for_syntax_ancestor<T: ToOffset>(&self, range: Range<T>) -> Option<Range<usize>> {
2648 let range = range.start.to_offset(self)..range.end.to_offset(self);
2649 let mut result: Option<Range<usize>> = None;
2650 'outer: for layer in self.syntax.layers_for_range(range.clone(), &self.text) {
2651 let mut cursor = layer.node().walk();
2652
2653 // Descend to the first leaf that touches the start of the range,
2654 // and if the range is non-empty, extends beyond the start.
2655 while cursor.goto_first_child_for_byte(range.start).is_some() {
2656 if !range.is_empty() && cursor.node().end_byte() == range.start {
2657 cursor.goto_next_sibling();
2658 }
2659 }
2660
2661 // Ascend to the smallest ancestor that strictly contains the range.
2662 loop {
2663 let node_range = cursor.node().byte_range();
2664 if node_range.start <= range.start
2665 && node_range.end >= range.end
2666 && node_range.len() > range.len()
2667 {
2668 break;
2669 }
2670 if !cursor.goto_parent() {
2671 continue 'outer;
2672 }
2673 }
2674
2675 let left_node = cursor.node();
2676 let mut layer_result = left_node.byte_range();
2677
2678 // For an empty range, try to find another node immediately to the right of the range.
2679 if left_node.end_byte() == range.start {
2680 let mut right_node = None;
2681 while !cursor.goto_next_sibling() {
2682 if !cursor.goto_parent() {
2683 break;
2684 }
2685 }
2686
2687 while cursor.node().start_byte() == range.start {
2688 right_node = Some(cursor.node());
2689 if !cursor.goto_first_child() {
2690 break;
2691 }
2692 }
2693
2694 // If there is a candidate node on both sides of the (empty) range, then
2695 // decide between the two by favoring a named node over an anonymous token.
2696 // If both nodes are the same in that regard, favor the right one.
2697 if let Some(right_node) = right_node {
2698 if right_node.is_named() || !left_node.is_named() {
2699 layer_result = right_node.byte_range();
2700 }
2701 }
2702 }
2703
2704 if let Some(previous_result) = &result {
2705 if previous_result.len() < layer_result.len() {
2706 continue;
2707 }
2708 }
2709 result = Some(layer_result);
2710 }
2711
2712 result
2713 }
2714
2715 /// Returns the outline for the buffer.
2716 ///
2717 /// This method allows passing an optional [SyntaxTheme] to
2718 /// syntax-highlight the returned symbols.
2719 pub fn outline(&self, theme: Option<&SyntaxTheme>) -> Option<Outline<Anchor>> {
2720 self.outline_items_containing(0..self.len(), true, theme)
2721 .map(Outline::new)
2722 }
2723
2724 /// Returns all the symbols that contain the given position.
2725 ///
2726 /// This method allows passing an optional [SyntaxTheme] to
2727 /// syntax-highlight the returned symbols.
2728 pub fn symbols_containing<T: ToOffset>(
2729 &self,
2730 position: T,
2731 theme: Option<&SyntaxTheme>,
2732 ) -> Option<Vec<OutlineItem<Anchor>>> {
2733 let position = position.to_offset(self);
2734 let mut items = self.outline_items_containing(
2735 position.saturating_sub(1)..self.len().min(position + 1),
2736 false,
2737 theme,
2738 )?;
2739 let mut prev_depth = None;
2740 items.retain(|item| {
2741 let result = prev_depth.map_or(true, |prev_depth| item.depth > prev_depth);
2742 prev_depth = Some(item.depth);
2743 result
2744 });
2745 Some(items)
2746 }
2747
2748 fn outline_items_containing(
2749 &self,
2750 range: Range<usize>,
2751 include_extra_context: bool,
2752 theme: Option<&SyntaxTheme>,
2753 ) -> Option<Vec<OutlineItem<Anchor>>> {
2754 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
2755 grammar.outline_config.as_ref().map(|c| &c.query)
2756 });
2757 let configs = matches
2758 .grammars()
2759 .iter()
2760 .map(|g| g.outline_config.as_ref().unwrap())
2761 .collect::<Vec<_>>();
2762
2763 let mut stack = Vec::<Range<usize>>::new();
2764 let mut items = Vec::new();
2765 while let Some(mat) = matches.peek() {
2766 let config = &configs[mat.grammar_index];
2767 let item_node = mat.captures.iter().find_map(|cap| {
2768 if cap.index == config.item_capture_ix {
2769 Some(cap.node)
2770 } else {
2771 None
2772 }
2773 })?;
2774
2775 let item_range = item_node.byte_range();
2776 if item_range.end < range.start || item_range.start > range.end {
2777 matches.advance();
2778 continue;
2779 }
2780
2781 let mut buffer_ranges = Vec::new();
2782 for capture in mat.captures {
2783 let node_is_name;
2784 if capture.index == config.name_capture_ix {
2785 node_is_name = true;
2786 } else if Some(capture.index) == config.context_capture_ix
2787 || (Some(capture.index) == config.extra_context_capture_ix
2788 && include_extra_context)
2789 {
2790 node_is_name = false;
2791 } else {
2792 continue;
2793 }
2794
2795 let mut range = capture.node.start_byte()..capture.node.end_byte();
2796 let start = capture.node.start_position();
2797 if capture.node.end_position().row > start.row {
2798 range.end =
2799 range.start + self.line_len(start.row as u32) as usize - start.column;
2800 }
2801
2802 if !range.is_empty() {
2803 buffer_ranges.push((range, node_is_name));
2804 }
2805 }
2806
2807 if buffer_ranges.is_empty() {
2808 matches.advance();
2809 continue;
2810 }
2811
2812 let mut text = String::new();
2813 let mut highlight_ranges = Vec::new();
2814 let mut name_ranges = Vec::new();
2815 let mut chunks = self.chunks(
2816 buffer_ranges.first().unwrap().0.start..buffer_ranges.last().unwrap().0.end,
2817 true,
2818 );
2819 let mut last_buffer_range_end = 0;
2820 for (buffer_range, is_name) in buffer_ranges {
2821 if !text.is_empty() && buffer_range.start > last_buffer_range_end {
2822 text.push(' ');
2823 }
2824 last_buffer_range_end = buffer_range.end;
2825 if is_name {
2826 let mut start = text.len();
2827 let end = start + buffer_range.len();
2828
2829 // When multiple names are captured, then the matcheable text
2830 // includes the whitespace in between the names.
2831 if !name_ranges.is_empty() {
2832 start -= 1;
2833 }
2834
2835 name_ranges.push(start..end);
2836 }
2837
2838 let mut offset = buffer_range.start;
2839 chunks.seek(offset);
2840 for mut chunk in chunks.by_ref() {
2841 if chunk.text.len() > buffer_range.end - offset {
2842 chunk.text = &chunk.text[0..(buffer_range.end - offset)];
2843 offset = buffer_range.end;
2844 } else {
2845 offset += chunk.text.len();
2846 }
2847 let style = chunk
2848 .syntax_highlight_id
2849 .zip(theme)
2850 .and_then(|(highlight, theme)| highlight.style(theme));
2851 if let Some(style) = style {
2852 let start = text.len();
2853 let end = start + chunk.text.len();
2854 highlight_ranges.push((start..end, style));
2855 }
2856 text.push_str(chunk.text);
2857 if offset >= buffer_range.end {
2858 break;
2859 }
2860 }
2861 }
2862
2863 matches.advance();
2864 while stack.last().map_or(false, |prev_range| {
2865 prev_range.start > item_range.start || prev_range.end < item_range.end
2866 }) {
2867 stack.pop();
2868 }
2869 stack.push(item_range.clone());
2870
2871 items.push(OutlineItem {
2872 depth: stack.len() - 1,
2873 range: self.anchor_after(item_range.start)..self.anchor_before(item_range.end),
2874 text,
2875 highlight_ranges,
2876 name_ranges,
2877 })
2878 }
2879 Some(items)
2880 }
2881
2882 /// For each grammar in the language, runs the provided
2883 /// [tree_sitter::Query] against the given range.
2884 pub fn matches(
2885 &self,
2886 range: Range<usize>,
2887 query: fn(&Grammar) -> Option<&tree_sitter::Query>,
2888 ) -> SyntaxMapMatches {
2889 self.syntax.matches(range, self, query)
2890 }
2891
2892 /// Returns bracket range pairs overlapping or adjacent to `range`
2893 pub fn bracket_ranges<T: ToOffset>(
2894 &self,
2895 range: Range<T>,
2896 ) -> impl Iterator<Item = (Range<usize>, Range<usize>)> + '_ {
2897 // Find bracket pairs that *inclusively* contain the given range.
2898 let range = range.start.to_offset(self).saturating_sub(1)
2899 ..self.len().min(range.end.to_offset(self) + 1);
2900
2901 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
2902 grammar.brackets_config.as_ref().map(|c| &c.query)
2903 });
2904 let configs = matches
2905 .grammars()
2906 .iter()
2907 .map(|grammar| grammar.brackets_config.as_ref().unwrap())
2908 .collect::<Vec<_>>();
2909
2910 iter::from_fn(move || {
2911 while let Some(mat) = matches.peek() {
2912 let mut open = None;
2913 let mut close = None;
2914 let config = &configs[mat.grammar_index];
2915 for capture in mat.captures {
2916 if capture.index == config.open_capture_ix {
2917 open = Some(capture.node.byte_range());
2918 } else if capture.index == config.close_capture_ix {
2919 close = Some(capture.node.byte_range());
2920 }
2921 }
2922
2923 matches.advance();
2924
2925 let Some((open, close)) = open.zip(close) else {
2926 continue;
2927 };
2928
2929 let bracket_range = open.start..=close.end;
2930 if !bracket_range.overlaps(&range) {
2931 continue;
2932 }
2933
2934 return Some((open, close));
2935 }
2936 None
2937 })
2938 }
2939
2940 /// Returns enclosing bracket ranges containing the given range
2941 pub fn enclosing_bracket_ranges<T: ToOffset>(
2942 &self,
2943 range: Range<T>,
2944 ) -> impl Iterator<Item = (Range<usize>, Range<usize>)> + '_ {
2945 let range = range.start.to_offset(self)..range.end.to_offset(self);
2946
2947 self.bracket_ranges(range.clone())
2948 .filter(move |(open, close)| open.start <= range.start && close.end >= range.end)
2949 }
2950
2951 /// Returns the smallest enclosing bracket ranges containing the given range or None if no brackets contain range
2952 ///
2953 /// Can optionally pass a range_filter to filter the ranges of brackets to consider
2954 pub fn innermost_enclosing_bracket_ranges<T: ToOffset>(
2955 &self,
2956 range: Range<T>,
2957 range_filter: Option<&dyn Fn(Range<usize>, Range<usize>) -> bool>,
2958 ) -> Option<(Range<usize>, Range<usize>)> {
2959 let range = range.start.to_offset(self)..range.end.to_offset(self);
2960
2961 // Get the ranges of the innermost pair of brackets.
2962 let mut result: Option<(Range<usize>, Range<usize>)> = None;
2963
2964 for (open, close) in self.enclosing_bracket_ranges(range.clone()) {
2965 if let Some(range_filter) = range_filter {
2966 if !range_filter(open.clone(), close.clone()) {
2967 continue;
2968 }
2969 }
2970
2971 let len = close.end - open.start;
2972
2973 if let Some((existing_open, existing_close)) = &result {
2974 let existing_len = existing_close.end - existing_open.start;
2975 if len > existing_len {
2976 continue;
2977 }
2978 }
2979
2980 result = Some((open, close));
2981 }
2982
2983 result
2984 }
2985
2986 /// Returns anchor ranges for any matches of the redaction query.
2987 /// The buffer can be associated with multiple languages, and the redaction query associated with each
2988 /// will be run on the relevant section of the buffer.
2989 pub fn redacted_ranges<T: ToOffset>(
2990 &self,
2991 range: Range<T>,
2992 ) -> impl Iterator<Item = Range<usize>> + '_ {
2993 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
2994 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
2995 grammar
2996 .redactions_config
2997 .as_ref()
2998 .map(|config| &config.query)
2999 });
3000
3001 let configs = syntax_matches
3002 .grammars()
3003 .iter()
3004 .map(|grammar| grammar.redactions_config.as_ref())
3005 .collect::<Vec<_>>();
3006
3007 iter::from_fn(move || {
3008 let redacted_range = syntax_matches
3009 .peek()
3010 .and_then(|mat| {
3011 configs[mat.grammar_index].and_then(|config| {
3012 mat.captures
3013 .iter()
3014 .find(|capture| capture.index == config.redaction_capture_ix)
3015 })
3016 })
3017 .map(|mat| mat.node.byte_range());
3018 syntax_matches.advance();
3019 redacted_range
3020 })
3021 }
3022
3023 pub fn runnable_ranges(
3024 &self,
3025 range: Range<Anchor>,
3026 ) -> impl Iterator<Item = RunnableRange> + '_ {
3027 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
3028
3029 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
3030 grammar.runnable_config.as_ref().map(|config| &config.query)
3031 });
3032
3033 let test_configs = syntax_matches
3034 .grammars()
3035 .iter()
3036 .map(|grammar| grammar.runnable_config.as_ref())
3037 .collect::<Vec<_>>();
3038
3039 iter::from_fn(move || loop {
3040 let mat = syntax_matches.peek()?;
3041 let test_range = test_configs[mat.grammar_index].and_then(|test_configs| {
3042 let mut tags: SmallVec<[(Range<usize>, RunnableTag); 1]> =
3043 SmallVec::from_iter(mat.captures.iter().filter_map(|capture| {
3044 test_configs
3045 .runnable_tags
3046 .get(&capture.index)
3047 .cloned()
3048 .map(|tag_name| (capture.node.byte_range(), tag_name))
3049 }));
3050 let maximum_range = tags
3051 .iter()
3052 .max_by_key(|(byte_range, _)| byte_range.len())
3053 .map(|(range, _)| range)?
3054 .clone();
3055 tags.sort_by_key(|(range, _)| range == &maximum_range);
3056 let split_point = tags.partition_point(|(range, _)| range != &maximum_range);
3057 let (extra_captures, tags) = tags.split_at(split_point);
3058
3059 let extra_captures = extra_captures
3060 .into_iter()
3061 .map(|(range, name)| {
3062 (
3063 name.0.to_string(),
3064 self.text_for_range(range.clone()).collect::<String>(),
3065 )
3066 })
3067 .collect();
3068 Some(RunnableRange {
3069 run_range: mat
3070 .captures
3071 .iter()
3072 .find(|capture| capture.index == test_configs.run_capture_ix)
3073 .map(|mat| mat.node.byte_range())?,
3074 runnable: Runnable {
3075 tags: tags.into_iter().cloned().map(|(_, tag)| tag).collect(),
3076 language: mat.language,
3077 buffer: self.remote_id(),
3078 },
3079 extra_captures,
3080 buffer_id: self.remote_id(),
3081 })
3082 });
3083
3084 syntax_matches.advance();
3085 if test_range.is_some() {
3086 // It's fine for us to short-circuit on .peek()? returning None. We don't want to return None from this iter if we
3087 // had a capture that did not contain a run marker, hence we'll just loop around for the next capture.
3088 return test_range;
3089 }
3090 })
3091 }
3092
3093 pub fn indent_guides_in_range(
3094 &self,
3095 range: Range<Anchor>,
3096 cx: &AppContext,
3097 ) -> Vec<IndentGuide> {
3098 fn indent_size_for_row(this: &BufferSnapshot, row: BufferRow, cx: &AppContext) -> u32 {
3099 let language = this.language_at(Point::new(row, 0));
3100 language_settings(language, None, cx).tab_size.get() as u32
3101 }
3102
3103 let start_row = range.start.to_point(self).row;
3104 let end_row = range.end.to_point(self).row;
3105 let row_range = start_row..end_row + 1;
3106
3107 let mut row_indents = self.line_indents_in_row_range(row_range.clone());
3108
3109 let mut result_vec = Vec::new();
3110 let mut indent_stack = SmallVec::<[IndentGuide; 8]>::new();
3111
3112 // TODO: This should be calculated for every row but it is pretty expensive
3113 let indent_size = indent_size_for_row(self, start_row, cx);
3114
3115 while let Some((first_row, mut line_indent, empty)) = row_indents.next() {
3116 let current_depth = indent_stack.len() as u32;
3117
3118 // When encountering empty, continue until found useful line indent
3119 // then add to the indent stack with the depth found
3120 let mut found_indent = false;
3121 let mut last_row = first_row;
3122 if empty {
3123 let mut trailing_row = end_row;
3124 while !found_indent {
3125 let (target_row, new_line_indent, empty) =
3126 if let Some(display_row) = row_indents.next() {
3127 display_row
3128 } else {
3129 // This means we reached the end of the given range and found empty lines at the end.
3130 // We need to traverse further until we find a non-empty line to know if we need to add
3131 // an indent guide for the last visible indent.
3132 trailing_row += 1;
3133
3134 const TRAILING_ROW_SEARCH_LIMIT: u32 = 25;
3135 if trailing_row > self.max_point().row
3136 || trailing_row > end_row + TRAILING_ROW_SEARCH_LIMIT
3137 {
3138 break;
3139 }
3140 let (new_line_indent, empty) = self.line_indent_for_row(trailing_row);
3141 (trailing_row, new_line_indent, empty)
3142 };
3143
3144 if empty {
3145 continue;
3146 }
3147 last_row = target_row.min(end_row);
3148 line_indent = new_line_indent;
3149 found_indent = true;
3150 break;
3151 }
3152 } else {
3153 found_indent = true
3154 }
3155
3156 let depth = if found_indent {
3157 line_indent / indent_size + ((line_indent % indent_size) > 0) as u32
3158 } else {
3159 current_depth
3160 };
3161
3162 if depth < current_depth {
3163 for _ in 0..(current_depth - depth) {
3164 let mut indent = indent_stack.pop().unwrap();
3165 if last_row != first_row {
3166 // In this case, we landed on an empty row, had to seek forward,
3167 // and discovered that the indent we where on is ending.
3168 // This means that the last display row must
3169 // be on line that ends this indent range, so we
3170 // should display the range up to the first non-empty line
3171 indent.end_row = first_row.saturating_sub(1);
3172 }
3173
3174 result_vec.push(indent)
3175 }
3176 } else if depth > current_depth {
3177 for next_depth in current_depth..depth {
3178 indent_stack.push(IndentGuide {
3179 buffer_id: self.remote_id(),
3180 start_row: first_row,
3181 end_row: last_row,
3182 depth: next_depth,
3183 indent_size,
3184 });
3185 }
3186 }
3187
3188 for indent in indent_stack.iter_mut() {
3189 indent.end_row = last_row;
3190 }
3191 }
3192
3193 result_vec.extend(indent_stack);
3194
3195 result_vec
3196 }
3197
3198 pub async fn enclosing_indent(
3199 &self,
3200 mut buffer_row: BufferRow,
3201 ) -> Option<(Range<BufferRow>, u32)> {
3202 let max_row = self.max_point().row;
3203 if buffer_row >= max_row {
3204 return None;
3205 }
3206
3207 let (mut target_indent_size, is_blank) = self.line_indent_for_row(buffer_row);
3208
3209 // If the current row is at the start of an indented block, we want to return this
3210 // block as the enclosing indent.
3211 if !is_blank && buffer_row < max_row {
3212 let (next_line_indent, is_blank) = self.line_indent_for_row(buffer_row + 1);
3213 if !is_blank && target_indent_size < next_line_indent {
3214 target_indent_size = next_line_indent;
3215 buffer_row += 1;
3216 }
3217 }
3218
3219 const SEARCH_ROW_LIMIT: u32 = 25000;
3220 const SEARCH_WHITESPACE_ROW_LIMIT: u32 = 2500;
3221 const YIELD_INTERVAL: u32 = 100;
3222
3223 let mut accessed_row_counter = 0;
3224
3225 // If there is a blank line at the current row, search for the next non indented lines
3226 if is_blank {
3227 let start = buffer_row.saturating_sub(SEARCH_WHITESPACE_ROW_LIMIT);
3228 let end = (max_row + 1).min(buffer_row + SEARCH_WHITESPACE_ROW_LIMIT);
3229
3230 let mut non_empty_line_above = None;
3231 for (row, indent_size, is_blank) in self
3232 .text
3233 .reversed_line_indents_in_row_range(start..buffer_row)
3234 {
3235 accessed_row_counter += 1;
3236 if accessed_row_counter == YIELD_INTERVAL {
3237 accessed_row_counter = 0;
3238 yield_now().await;
3239 }
3240 if !is_blank {
3241 non_empty_line_above = Some((row, indent_size));
3242 break;
3243 }
3244 }
3245
3246 let mut non_empty_line_below = None;
3247 for (row, indent_size, is_blank) in
3248 self.text.line_indents_in_row_range((buffer_row + 1)..end)
3249 {
3250 accessed_row_counter += 1;
3251 if accessed_row_counter == YIELD_INTERVAL {
3252 accessed_row_counter = 0;
3253 yield_now().await;
3254 }
3255 if !is_blank {
3256 non_empty_line_below = Some((row, indent_size));
3257 break;
3258 }
3259 }
3260
3261 let (row, indent_size) = match (non_empty_line_above, non_empty_line_below) {
3262 (Some((above_row, above_indent)), Some((below_row, below_indent))) => {
3263 if above_indent >= below_indent {
3264 (above_row, above_indent)
3265 } else {
3266 (below_row, below_indent)
3267 }
3268 }
3269 (Some(above), None) => above,
3270 (None, Some(below)) => below,
3271 _ => return None,
3272 };
3273
3274 target_indent_size = indent_size;
3275 buffer_row = row;
3276 }
3277
3278 let start = buffer_row.saturating_sub(SEARCH_ROW_LIMIT);
3279 let end = (max_row + 1).min(buffer_row + SEARCH_ROW_LIMIT);
3280
3281 let mut start_indent = None;
3282 for (row, indent_size, is_blank) in self
3283 .text
3284 .reversed_line_indents_in_row_range(start..buffer_row)
3285 {
3286 accessed_row_counter += 1;
3287 if accessed_row_counter == YIELD_INTERVAL {
3288 accessed_row_counter = 0;
3289 yield_now().await;
3290 }
3291 if !is_blank && indent_size < target_indent_size {
3292 start_indent = Some((row, indent_size));
3293 break;
3294 }
3295 }
3296 let (start_row, start_indent_size) = start_indent?;
3297
3298 let mut end_indent = (end, None);
3299 for (row, indent_size, is_blank) in
3300 self.text.line_indents_in_row_range((buffer_row + 1)..end)
3301 {
3302 accessed_row_counter += 1;
3303 if accessed_row_counter == YIELD_INTERVAL {
3304 accessed_row_counter = 0;
3305 yield_now().await;
3306 }
3307 if !is_blank && indent_size < target_indent_size {
3308 end_indent = (row.saturating_sub(1), Some(indent_size));
3309 break;
3310 }
3311 }
3312 let (end_row, end_indent_size) = end_indent;
3313
3314 let indent_size = if let Some(end_indent_size) = end_indent_size {
3315 start_indent_size.max(end_indent_size)
3316 } else {
3317 start_indent_size
3318 };
3319
3320 Some((start_row..end_row, indent_size))
3321 }
3322
3323 /// Returns selections for remote peers intersecting the given range.
3324 #[allow(clippy::type_complexity)]
3325 pub fn remote_selections_in_range(
3326 &self,
3327 range: Range<Anchor>,
3328 ) -> impl Iterator<
3329 Item = (
3330 ReplicaId,
3331 bool,
3332 CursorShape,
3333 impl Iterator<Item = &Selection<Anchor>> + '_,
3334 ),
3335 > + '_ {
3336 self.remote_selections
3337 .iter()
3338 .filter(|(replica_id, set)| {
3339 **replica_id != self.text.replica_id() && !set.selections.is_empty()
3340 })
3341 .map(move |(replica_id, set)| {
3342 let start_ix = match set.selections.binary_search_by(|probe| {
3343 probe.end.cmp(&range.start, self).then(Ordering::Greater)
3344 }) {
3345 Ok(ix) | Err(ix) => ix,
3346 };
3347 let end_ix = match set.selections.binary_search_by(|probe| {
3348 probe.start.cmp(&range.end, self).then(Ordering::Less)
3349 }) {
3350 Ok(ix) | Err(ix) => ix,
3351 };
3352
3353 (
3354 *replica_id,
3355 set.line_mode,
3356 set.cursor_shape,
3357 set.selections[start_ix..end_ix].iter(),
3358 )
3359 })
3360 }
3361
3362 /// Whether the buffer contains any git changes.
3363 pub fn has_git_diff(&self) -> bool {
3364 !self.git_diff.is_empty()
3365 }
3366
3367 /// Returns all the Git diff hunks intersecting the given
3368 /// row range.
3369 pub fn git_diff_hunks_in_row_range(
3370 &self,
3371 range: Range<BufferRow>,
3372 ) -> impl '_ + Iterator<Item = git::diff::DiffHunk<u32>> {
3373 self.git_diff.hunks_in_row_range(range, self)
3374 }
3375
3376 /// Returns all the Git diff hunks intersecting the given
3377 /// range.
3378 pub fn git_diff_hunks_intersecting_range(
3379 &self,
3380 range: Range<Anchor>,
3381 ) -> impl '_ + Iterator<Item = git::diff::DiffHunk<u32>> {
3382 self.git_diff.hunks_intersecting_range(range, self)
3383 }
3384
3385 /// Returns all the Git diff hunks intersecting the given
3386 /// range, in reverse order.
3387 pub fn git_diff_hunks_intersecting_range_rev(
3388 &self,
3389 range: Range<Anchor>,
3390 ) -> impl '_ + Iterator<Item = git::diff::DiffHunk<u32>> {
3391 self.git_diff.hunks_intersecting_range_rev(range, self)
3392 }
3393
3394 /// Returns if the buffer contains any diagnostics.
3395 pub fn has_diagnostics(&self) -> bool {
3396 !self.diagnostics.is_empty()
3397 }
3398
3399 /// Returns all the diagnostics intersecting the given range.
3400 pub fn diagnostics_in_range<'a, T, O>(
3401 &'a self,
3402 search_range: Range<T>,
3403 reversed: bool,
3404 ) -> impl 'a + Iterator<Item = DiagnosticEntry<O>>
3405 where
3406 T: 'a + Clone + ToOffset,
3407 O: 'a + FromAnchor + Ord,
3408 {
3409 let mut iterators: Vec<_> = self
3410 .diagnostics
3411 .iter()
3412 .map(|(_, collection)| {
3413 collection
3414 .range::<T, O>(search_range.clone(), self, true, reversed)
3415 .peekable()
3416 })
3417 .collect();
3418
3419 std::iter::from_fn(move || {
3420 let (next_ix, _) = iterators
3421 .iter_mut()
3422 .enumerate()
3423 .flat_map(|(ix, iter)| Some((ix, iter.peek()?)))
3424 .min_by(|(_, a), (_, b)| {
3425 let cmp = a
3426 .range
3427 .start
3428 .cmp(&b.range.start)
3429 // when range is equal, sort by diagnostic severity
3430 .then(a.diagnostic.severity.cmp(&b.diagnostic.severity))
3431 // and stabilize order with group_id
3432 .then(a.diagnostic.group_id.cmp(&b.diagnostic.group_id));
3433 if reversed {
3434 cmp.reverse()
3435 } else {
3436 cmp
3437 }
3438 })?;
3439 iterators[next_ix].next()
3440 })
3441 }
3442
3443 /// Returns all the diagnostic groups associated with the given
3444 /// language server id. If no language server id is provided,
3445 /// all diagnostics groups are returned.
3446 pub fn diagnostic_groups(
3447 &self,
3448 language_server_id: Option<LanguageServerId>,
3449 ) -> Vec<(LanguageServerId, DiagnosticGroup<Anchor>)> {
3450 let mut groups = Vec::new();
3451
3452 if let Some(language_server_id) = language_server_id {
3453 if let Ok(ix) = self
3454 .diagnostics
3455 .binary_search_by_key(&language_server_id, |e| e.0)
3456 {
3457 self.diagnostics[ix]
3458 .1
3459 .groups(language_server_id, &mut groups, self);
3460 }
3461 } else {
3462 for (language_server_id, diagnostics) in self.diagnostics.iter() {
3463 diagnostics.groups(*language_server_id, &mut groups, self);
3464 }
3465 }
3466
3467 groups.sort_by(|(id_a, group_a), (id_b, group_b)| {
3468 let a_start = &group_a.entries[group_a.primary_ix].range.start;
3469 let b_start = &group_b.entries[group_b.primary_ix].range.start;
3470 a_start.cmp(b_start, self).then_with(|| id_a.cmp(id_b))
3471 });
3472
3473 groups
3474 }
3475
3476 /// Returns an iterator over the diagnostics for the given group.
3477 pub fn diagnostic_group<'a, O>(
3478 &'a self,
3479 group_id: usize,
3480 ) -> impl 'a + Iterator<Item = DiagnosticEntry<O>>
3481 where
3482 O: 'a + FromAnchor,
3483 {
3484 self.diagnostics
3485 .iter()
3486 .flat_map(move |(_, set)| set.group(group_id, self))
3487 }
3488
3489 /// The number of times diagnostics were updated.
3490 pub fn diagnostics_update_count(&self) -> usize {
3491 self.diagnostics_update_count
3492 }
3493
3494 /// The number of times the buffer was parsed.
3495 pub fn parse_count(&self) -> usize {
3496 self.parse_count
3497 }
3498
3499 /// The number of times selections were updated.
3500 pub fn selections_update_count(&self) -> usize {
3501 self.selections_update_count
3502 }
3503
3504 /// Returns a snapshot of underlying file.
3505 pub fn file(&self) -> Option<&Arc<dyn File>> {
3506 self.file.as_ref()
3507 }
3508
3509 /// Resolves the file path (relative to the worktree root) associated with the underlying file.
3510 pub fn resolve_file_path(&self, cx: &AppContext, include_root: bool) -> Option<PathBuf> {
3511 if let Some(file) = self.file() {
3512 if file.path().file_name().is_none() || include_root {
3513 Some(file.full_path(cx))
3514 } else {
3515 Some(file.path().to_path_buf())
3516 }
3517 } else {
3518 None
3519 }
3520 }
3521
3522 /// The number of times the underlying file was updated.
3523 pub fn file_update_count(&self) -> usize {
3524 self.file_update_count
3525 }
3526
3527 /// The number of times the git diff status was updated.
3528 pub fn git_diff_update_count(&self) -> usize {
3529 self.git_diff_update_count
3530 }
3531}
3532
3533fn indent_size_for_line(text: &text::BufferSnapshot, row: u32) -> IndentSize {
3534 indent_size_for_text(text.chars_at(Point::new(row, 0)))
3535}
3536
3537fn indent_size_for_text(text: impl Iterator<Item = char>) -> IndentSize {
3538 let mut result = IndentSize::spaces(0);
3539 for c in text {
3540 let kind = match c {
3541 ' ' => IndentKind::Space,
3542 '\t' => IndentKind::Tab,
3543 _ => break,
3544 };
3545 if result.len == 0 {
3546 result.kind = kind;
3547 }
3548 result.len += 1;
3549 }
3550 result
3551}
3552
3553impl Clone for BufferSnapshot {
3554 fn clone(&self) -> Self {
3555 Self {
3556 text: self.text.clone(),
3557 git_diff: self.git_diff.clone(),
3558 syntax: self.syntax.clone(),
3559 file: self.file.clone(),
3560 remote_selections: self.remote_selections.clone(),
3561 diagnostics: self.diagnostics.clone(),
3562 selections_update_count: self.selections_update_count,
3563 diagnostics_update_count: self.diagnostics_update_count,
3564 file_update_count: self.file_update_count,
3565 git_diff_update_count: self.git_diff_update_count,
3566 language: self.language.clone(),
3567 parse_count: self.parse_count,
3568 }
3569 }
3570}
3571
3572impl Deref for BufferSnapshot {
3573 type Target = text::BufferSnapshot;
3574
3575 fn deref(&self) -> &Self::Target {
3576 &self.text
3577 }
3578}
3579
3580unsafe impl<'a> Send for BufferChunks<'a> {}
3581
3582impl<'a> BufferChunks<'a> {
3583 pub(crate) fn new(
3584 text: &'a Rope,
3585 range: Range<usize>,
3586 syntax: Option<(SyntaxMapCaptures<'a>, Vec<HighlightMap>)>,
3587 diagnostic_endpoints: Vec<DiagnosticEndpoint>,
3588 ) -> Self {
3589 let mut highlights = None;
3590 if let Some((captures, highlight_maps)) = syntax {
3591 highlights = Some(BufferChunkHighlights {
3592 captures,
3593 next_capture: None,
3594 stack: Default::default(),
3595 highlight_maps,
3596 })
3597 }
3598
3599 let diagnostic_endpoints = diagnostic_endpoints.into_iter().peekable();
3600 let chunks = text.chunks_in_range(range.clone());
3601
3602 BufferChunks {
3603 range,
3604 chunks,
3605 diagnostic_endpoints,
3606 error_depth: 0,
3607 warning_depth: 0,
3608 information_depth: 0,
3609 hint_depth: 0,
3610 unnecessary_depth: 0,
3611 highlights,
3612 }
3613 }
3614
3615 /// Seeks to the given byte offset in the buffer.
3616 pub fn seek(&mut self, offset: usize) {
3617 self.range.start = offset;
3618 self.chunks.seek(self.range.start);
3619 if let Some(highlights) = self.highlights.as_mut() {
3620 highlights
3621 .stack
3622 .retain(|(end_offset, _)| *end_offset > offset);
3623 if let Some(capture) = &highlights.next_capture {
3624 if offset >= capture.node.start_byte() {
3625 let next_capture_end = capture.node.end_byte();
3626 if offset < next_capture_end {
3627 highlights.stack.push((
3628 next_capture_end,
3629 highlights.highlight_maps[capture.grammar_index].get(capture.index),
3630 ));
3631 }
3632 highlights.next_capture.take();
3633 }
3634 }
3635 highlights.captures.set_byte_range(self.range.clone());
3636 }
3637 }
3638
3639 /// The current byte offset in the buffer.
3640 pub fn offset(&self) -> usize {
3641 self.range.start
3642 }
3643
3644 fn update_diagnostic_depths(&mut self, endpoint: DiagnosticEndpoint) {
3645 let depth = match endpoint.severity {
3646 DiagnosticSeverity::ERROR => &mut self.error_depth,
3647 DiagnosticSeverity::WARNING => &mut self.warning_depth,
3648 DiagnosticSeverity::INFORMATION => &mut self.information_depth,
3649 DiagnosticSeverity::HINT => &mut self.hint_depth,
3650 _ => return,
3651 };
3652 if endpoint.is_start {
3653 *depth += 1;
3654 } else {
3655 *depth -= 1;
3656 }
3657
3658 if endpoint.is_unnecessary {
3659 if endpoint.is_start {
3660 self.unnecessary_depth += 1;
3661 } else {
3662 self.unnecessary_depth -= 1;
3663 }
3664 }
3665 }
3666
3667 fn current_diagnostic_severity(&self) -> Option<DiagnosticSeverity> {
3668 if self.error_depth > 0 {
3669 Some(DiagnosticSeverity::ERROR)
3670 } else if self.warning_depth > 0 {
3671 Some(DiagnosticSeverity::WARNING)
3672 } else if self.information_depth > 0 {
3673 Some(DiagnosticSeverity::INFORMATION)
3674 } else if self.hint_depth > 0 {
3675 Some(DiagnosticSeverity::HINT)
3676 } else {
3677 None
3678 }
3679 }
3680
3681 fn current_code_is_unnecessary(&self) -> bool {
3682 self.unnecessary_depth > 0
3683 }
3684}
3685
3686impl<'a> Iterator for BufferChunks<'a> {
3687 type Item = Chunk<'a>;
3688
3689 fn next(&mut self) -> Option<Self::Item> {
3690 let mut next_capture_start = usize::MAX;
3691 let mut next_diagnostic_endpoint = usize::MAX;
3692
3693 if let Some(highlights) = self.highlights.as_mut() {
3694 while let Some((parent_capture_end, _)) = highlights.stack.last() {
3695 if *parent_capture_end <= self.range.start {
3696 highlights.stack.pop();
3697 } else {
3698 break;
3699 }
3700 }
3701
3702 if highlights.next_capture.is_none() {
3703 highlights.next_capture = highlights.captures.next();
3704 }
3705
3706 while let Some(capture) = highlights.next_capture.as_ref() {
3707 if self.range.start < capture.node.start_byte() {
3708 next_capture_start = capture.node.start_byte();
3709 break;
3710 } else {
3711 let highlight_id =
3712 highlights.highlight_maps[capture.grammar_index].get(capture.index);
3713 highlights
3714 .stack
3715 .push((capture.node.end_byte(), highlight_id));
3716 highlights.next_capture = highlights.captures.next();
3717 }
3718 }
3719 }
3720
3721 while let Some(endpoint) = self.diagnostic_endpoints.peek().copied() {
3722 if endpoint.offset <= self.range.start {
3723 self.update_diagnostic_depths(endpoint);
3724 self.diagnostic_endpoints.next();
3725 } else {
3726 next_diagnostic_endpoint = endpoint.offset;
3727 break;
3728 }
3729 }
3730
3731 if let Some(chunk) = self.chunks.peek() {
3732 let chunk_start = self.range.start;
3733 let mut chunk_end = (self.chunks.offset() + chunk.len())
3734 .min(next_capture_start)
3735 .min(next_diagnostic_endpoint);
3736 let mut highlight_id = None;
3737 if let Some(highlights) = self.highlights.as_ref() {
3738 if let Some((parent_capture_end, parent_highlight_id)) = highlights.stack.last() {
3739 chunk_end = chunk_end.min(*parent_capture_end);
3740 highlight_id = Some(*parent_highlight_id);
3741 }
3742 }
3743
3744 let slice =
3745 &chunk[chunk_start - self.chunks.offset()..chunk_end - self.chunks.offset()];
3746 self.range.start = chunk_end;
3747 if self.range.start == self.chunks.offset() + chunk.len() {
3748 self.chunks.next().unwrap();
3749 }
3750
3751 Some(Chunk {
3752 text: slice,
3753 syntax_highlight_id: highlight_id,
3754 diagnostic_severity: self.current_diagnostic_severity(),
3755 is_unnecessary: self.current_code_is_unnecessary(),
3756 ..Default::default()
3757 })
3758 } else {
3759 None
3760 }
3761 }
3762}
3763
3764impl operation_queue::Operation for Operation {
3765 fn lamport_timestamp(&self) -> clock::Lamport {
3766 match self {
3767 Operation::Buffer(_) => {
3768 unreachable!("buffer operations should never be deferred at this layer")
3769 }
3770 Operation::UpdateDiagnostics {
3771 lamport_timestamp, ..
3772 }
3773 | Operation::UpdateSelections {
3774 lamport_timestamp, ..
3775 }
3776 | Operation::UpdateCompletionTriggers {
3777 lamport_timestamp, ..
3778 } => *lamport_timestamp,
3779 }
3780 }
3781}
3782
3783impl Default for Diagnostic {
3784 fn default() -> Self {
3785 Self {
3786 source: Default::default(),
3787 code: None,
3788 severity: DiagnosticSeverity::ERROR,
3789 message: Default::default(),
3790 group_id: 0,
3791 is_primary: false,
3792 is_disk_based: false,
3793 is_unnecessary: false,
3794 }
3795 }
3796}
3797
3798impl IndentSize {
3799 /// Returns an [IndentSize] representing the given spaces.
3800 pub fn spaces(len: u32) -> Self {
3801 Self {
3802 len,
3803 kind: IndentKind::Space,
3804 }
3805 }
3806
3807 /// Returns an [IndentSize] representing a tab.
3808 pub fn tab() -> Self {
3809 Self {
3810 len: 1,
3811 kind: IndentKind::Tab,
3812 }
3813 }
3814
3815 /// An iterator over the characters represented by this [IndentSize].
3816 pub fn chars(&self) -> impl Iterator<Item = char> {
3817 iter::repeat(self.char()).take(self.len as usize)
3818 }
3819
3820 /// The character representation of this [IndentSize].
3821 pub fn char(&self) -> char {
3822 match self.kind {
3823 IndentKind::Space => ' ',
3824 IndentKind::Tab => '\t',
3825 }
3826 }
3827
3828 /// Consumes the current [IndentSize] and returns a new one that has
3829 /// been shrunk or enlarged by the given size along the given direction.
3830 pub fn with_delta(mut self, direction: Ordering, size: IndentSize) -> Self {
3831 match direction {
3832 Ordering::Less => {
3833 if self.kind == size.kind && self.len >= size.len {
3834 self.len -= size.len;
3835 }
3836 }
3837 Ordering::Equal => {}
3838 Ordering::Greater => {
3839 if self.len == 0 {
3840 self = size;
3841 } else if self.kind == size.kind {
3842 self.len += size.len;
3843 }
3844 }
3845 }
3846 self
3847 }
3848}
3849
3850#[cfg(any(test, feature = "test-support"))]
3851pub struct TestFile {
3852 pub path: Arc<Path>,
3853 pub root_name: String,
3854}
3855
3856#[cfg(any(test, feature = "test-support"))]
3857impl File for TestFile {
3858 fn path(&self) -> &Arc<Path> {
3859 &self.path
3860 }
3861
3862 fn full_path(&self, _: &gpui::AppContext) -> PathBuf {
3863 PathBuf::from(&self.root_name).join(self.path.as_ref())
3864 }
3865
3866 fn as_local(&self) -> Option<&dyn LocalFile> {
3867 None
3868 }
3869
3870 fn mtime(&self) -> Option<SystemTime> {
3871 unimplemented!()
3872 }
3873
3874 fn file_name<'a>(&'a self, _: &'a gpui::AppContext) -> &'a std::ffi::OsStr {
3875 self.path().file_name().unwrap_or(self.root_name.as_ref())
3876 }
3877
3878 fn worktree_id(&self) -> usize {
3879 0
3880 }
3881
3882 fn is_deleted(&self) -> bool {
3883 unimplemented!()
3884 }
3885
3886 fn as_any(&self) -> &dyn std::any::Any {
3887 unimplemented!()
3888 }
3889
3890 fn to_proto(&self) -> rpc::proto::File {
3891 unimplemented!()
3892 }
3893
3894 fn is_private(&self) -> bool {
3895 false
3896 }
3897}
3898
3899pub(crate) fn contiguous_ranges(
3900 values: impl Iterator<Item = u32>,
3901 max_len: usize,
3902) -> impl Iterator<Item = Range<u32>> {
3903 let mut values = values;
3904 let mut current_range: Option<Range<u32>> = None;
3905 std::iter::from_fn(move || loop {
3906 if let Some(value) = values.next() {
3907 if let Some(range) = &mut current_range {
3908 if value == range.end && range.len() < max_len {
3909 range.end += 1;
3910 continue;
3911 }
3912 }
3913
3914 let prev_range = current_range.clone();
3915 current_range = Some(value..(value + 1));
3916 if prev_range.is_some() {
3917 return prev_range;
3918 }
3919 } else {
3920 return current_range.take();
3921 }
3922 })
3923}
3924
3925/// Returns the [CharKind] for the given character. When a scope is provided,
3926/// the function checks if the character is considered a word character
3927/// based on the language scope's word character settings.
3928pub fn char_kind(scope: &Option<LanguageScope>, c: char) -> CharKind {
3929 if c.is_whitespace() {
3930 return CharKind::Whitespace;
3931 } else if c.is_alphanumeric() || c == '_' {
3932 return CharKind::Word;
3933 }
3934
3935 if let Some(scope) = scope {
3936 if let Some(characters) = scope.word_characters() {
3937 if characters.contains(&c) {
3938 return CharKind::Word;
3939 }
3940 }
3941 }
3942
3943 CharKind::Punctuation
3944}
3945
3946/// Find all of the ranges of whitespace that occur at the ends of lines
3947/// in the given rope.
3948///
3949/// This could also be done with a regex search, but this implementation
3950/// avoids copying text.
3951pub fn trailing_whitespace_ranges(rope: &Rope) -> Vec<Range<usize>> {
3952 let mut ranges = Vec::new();
3953
3954 let mut offset = 0;
3955 let mut prev_chunk_trailing_whitespace_range = 0..0;
3956 for chunk in rope.chunks() {
3957 let mut prev_line_trailing_whitespace_range = 0..0;
3958 for (i, line) in chunk.split('\n').enumerate() {
3959 let line_end_offset = offset + line.len();
3960 let trimmed_line_len = line.trim_end_matches(|c| matches!(c, ' ' | '\t')).len();
3961 let mut trailing_whitespace_range = (offset + trimmed_line_len)..line_end_offset;
3962
3963 if i == 0 && trimmed_line_len == 0 {
3964 trailing_whitespace_range.start = prev_chunk_trailing_whitespace_range.start;
3965 }
3966 if !prev_line_trailing_whitespace_range.is_empty() {
3967 ranges.push(prev_line_trailing_whitespace_range);
3968 }
3969
3970 offset = line_end_offset + 1;
3971 prev_line_trailing_whitespace_range = trailing_whitespace_range;
3972 }
3973
3974 offset -= 1;
3975 prev_chunk_trailing_whitespace_range = prev_line_trailing_whitespace_range;
3976 }
3977
3978 if !prev_chunk_trailing_whitespace_range.is_empty() {
3979 ranges.push(prev_chunk_trailing_whitespace_range);
3980 }
3981
3982 ranges
3983}