1pub use crate::{
2 diagnostic_set::DiagnosticSet,
3 highlight_map::{HighlightId, HighlightMap},
4 markdown::ParsedMarkdown,
5 proto, Grammar, Language, LanguageRegistry,
6};
7use crate::{
8 diagnostic_set::{DiagnosticEntry, DiagnosticGroup},
9 language_settings::{language_settings, LanguageSettings},
10 markdown::parse_markdown,
11 outline::OutlineItem,
12 syntax_map::{
13 SyntaxLayer, SyntaxMap, SyntaxMapCapture, SyntaxMapCaptures, SyntaxMapMatches,
14 SyntaxSnapshot, ToTreeSitterPoint,
15 },
16 task_context::RunnableRange,
17 LanguageScope, Outline, RunnableCapture, RunnableTag,
18};
19use anyhow::{anyhow, Context, Result};
20pub use clock::ReplicaId;
21use futures::channel::oneshot;
22use gpui::{
23 AnyElement, AppContext, EventEmitter, HighlightStyle, ModelContext, Task, TaskLabel,
24 WindowContext,
25};
26use lazy_static::lazy_static;
27use lsp::LanguageServerId;
28use parking_lot::Mutex;
29use similar::{ChangeTag, TextDiff};
30use smallvec::SmallVec;
31use smol::future::yield_now;
32use std::{
33 any::Any,
34 cmp::{self, Ordering},
35 collections::BTreeMap,
36 ffi::OsStr,
37 fmt,
38 future::Future,
39 iter::{self, Iterator, Peekable},
40 mem,
41 ops::{Deref, Range},
42 path::{Path, PathBuf},
43 str,
44 sync::Arc,
45 time::{Duration, Instant, SystemTime},
46 vec,
47};
48use sum_tree::TreeMap;
49use text::operation_queue::OperationQueue;
50use text::*;
51pub use text::{
52 Anchor, Bias, Buffer as TextBuffer, BufferId, BufferSnapshot as TextBufferSnapshot, Edit,
53 OffsetRangeExt, OffsetUtf16, Patch, Point, PointUtf16, Rope, Selection, SelectionGoal,
54 Subscription, TextDimension, TextSummary, ToOffset, ToOffsetUtf16, ToPoint, ToPointUtf16,
55 Transaction, TransactionId, Unclipped,
56};
57use theme::SyntaxTheme;
58#[cfg(any(test, feature = "test-support"))]
59use util::RandomCharIter;
60use util::RangeExt;
61
62#[cfg(any(test, feature = "test-support"))]
63pub use {tree_sitter_rust, tree_sitter_typescript};
64
65pub use lsp::DiagnosticSeverity;
66
67lazy_static! {
68 /// A label for the background task spawned by the buffer to compute
69 /// a diff against the contents of its file.
70 pub static ref BUFFER_DIFF_TASK: TaskLabel = TaskLabel::new();
71}
72
73/// Indicate whether a [Buffer] has permissions to edit.
74#[derive(PartialEq, Clone, Copy, Debug)]
75pub enum Capability {
76 /// The buffer is a mutable replica.
77 ReadWrite,
78 /// The buffer is a read-only replica.
79 ReadOnly,
80}
81
82pub type BufferRow = u32;
83
84/// An in-memory representation of a source code file, including its text,
85/// syntax trees, git status, and diagnostics.
86pub struct Buffer {
87 text: TextBuffer,
88 diff_base: Option<Rope>,
89 git_diff: git::diff::BufferDiff,
90 file: Option<Arc<dyn File>>,
91 /// The mtime of the file when this buffer was last loaded from
92 /// or saved to disk.
93 saved_mtime: Option<SystemTime>,
94 /// The version vector when this buffer was last loaded from
95 /// or saved to disk.
96 saved_version: clock::Global,
97 transaction_depth: usize,
98 was_dirty_before_starting_transaction: Option<bool>,
99 reload_task: Option<Task<Result<()>>>,
100 language: Option<Arc<Language>>,
101 autoindent_requests: Vec<Arc<AutoindentRequest>>,
102 pending_autoindent: Option<Task<()>>,
103 sync_parse_timeout: Duration,
104 syntax_map: Mutex<SyntaxMap>,
105 parsing_in_background: bool,
106 parse_count: usize,
107 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
108 remote_selections: TreeMap<ReplicaId, SelectionSet>,
109 selections_update_count: usize,
110 diagnostics_update_count: usize,
111 diagnostics_timestamp: clock::Lamport,
112 file_update_count: usize,
113 git_diff_update_count: usize,
114 completion_triggers: Vec<String>,
115 completion_triggers_timestamp: clock::Lamport,
116 deferred_ops: OperationQueue<Operation>,
117 capability: Capability,
118 has_conflict: bool,
119 diff_base_version: usize,
120}
121
122/// An immutable, cheaply cloneable representation of a fixed
123/// state of a buffer.
124pub struct BufferSnapshot {
125 text: text::BufferSnapshot,
126 git_diff: git::diff::BufferDiff,
127 pub(crate) syntax: SyntaxSnapshot,
128 file: Option<Arc<dyn File>>,
129 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
130 diagnostics_update_count: usize,
131 file_update_count: usize,
132 git_diff_update_count: usize,
133 remote_selections: TreeMap<ReplicaId, SelectionSet>,
134 selections_update_count: usize,
135 language: Option<Arc<Language>>,
136 parse_count: usize,
137}
138
139/// The kind and amount of indentation in a particular line. For now,
140/// assumes that indentation is all the same character.
141#[derive(Clone, Copy, Debug, PartialEq, Eq, Default)]
142pub struct IndentSize {
143 /// The number of bytes that comprise the indentation.
144 pub len: u32,
145 /// The kind of whitespace used for indentation.
146 pub kind: IndentKind,
147}
148
149/// A whitespace character that's used for indentation.
150#[derive(Clone, Copy, Debug, PartialEq, Eq, Default)]
151pub enum IndentKind {
152 /// An ASCII space character.
153 #[default]
154 Space,
155 /// An ASCII tab character.
156 Tab,
157}
158
159/// The shape of a selection cursor.
160#[derive(Copy, Clone, PartialEq, Eq, Debug, Default)]
161pub enum CursorShape {
162 /// A vertical bar
163 #[default]
164 Bar,
165 /// A block that surrounds the following character
166 Block,
167 /// An underline that runs along the following character
168 Underscore,
169 /// A box drawn around the following character
170 Hollow,
171}
172
173#[derive(Clone, Debug)]
174struct SelectionSet {
175 line_mode: bool,
176 cursor_shape: CursorShape,
177 selections: Arc<[Selection<Anchor>]>,
178 lamport_timestamp: clock::Lamport,
179}
180
181/// A diagnostic associated with a certain range of a buffer.
182#[derive(Clone, Debug, PartialEq, Eq)]
183pub struct Diagnostic {
184 /// The name of the service that produced this diagnostic.
185 pub source: Option<String>,
186 /// A machine-readable code that identifies this diagnostic.
187 pub code: Option<String>,
188 /// Whether this diagnostic is a hint, warning, or error.
189 pub severity: DiagnosticSeverity,
190 /// The human-readable message associated with this diagnostic.
191 pub message: String,
192 /// An id that identifies the group to which this diagnostic belongs.
193 ///
194 /// When a language server produces a diagnostic with
195 /// one or more associated diagnostics, those diagnostics are all
196 /// assigned a single group id.
197 pub group_id: usize,
198 /// Whether this diagnostic is the primary diagnostic for its group.
199 ///
200 /// In a given group, the primary diagnostic is the top-level diagnostic
201 /// returned by the language server. The non-primary diagnostics are the
202 /// associated diagnostics.
203 pub is_primary: bool,
204 /// Whether this diagnostic is considered to originate from an analysis of
205 /// files on disk, as opposed to any unsaved buffer contents. This is a
206 /// property of a given diagnostic source, and is configured for a given
207 /// language server via the [`LspAdapter::disk_based_diagnostic_sources`](crate::LspAdapter::disk_based_diagnostic_sources) method
208 /// for the language server.
209 pub is_disk_based: bool,
210 /// Whether this diagnostic marks unnecessary code.
211 pub is_unnecessary: bool,
212}
213
214/// TODO - move this into the `project` crate and make it private.
215pub async fn prepare_completion_documentation(
216 documentation: &lsp::Documentation,
217 language_registry: &Arc<LanguageRegistry>,
218 language: Option<Arc<Language>>,
219) -> Documentation {
220 match documentation {
221 lsp::Documentation::String(text) => {
222 if text.lines().count() <= 1 {
223 Documentation::SingleLine(text.clone())
224 } else {
225 Documentation::MultiLinePlainText(text.clone())
226 }
227 }
228
229 lsp::Documentation::MarkupContent(lsp::MarkupContent { kind, value }) => match kind {
230 lsp::MarkupKind::PlainText => {
231 if value.lines().count() <= 1 {
232 Documentation::SingleLine(value.clone())
233 } else {
234 Documentation::MultiLinePlainText(value.clone())
235 }
236 }
237
238 lsp::MarkupKind::Markdown => {
239 let parsed = parse_markdown(value, language_registry, language).await;
240 Documentation::MultiLineMarkdown(parsed)
241 }
242 },
243 }
244}
245
246/// Documentation associated with a [`Completion`].
247#[derive(Clone, Debug)]
248pub enum Documentation {
249 /// There is no documentation for this completion.
250 Undocumented,
251 /// A single line of documentation.
252 SingleLine(String),
253 /// Multiple lines of plain text documentation.
254 MultiLinePlainText(String),
255 /// Markdown documentation.
256 MultiLineMarkdown(ParsedMarkdown),
257}
258
259/// An operation used to synchronize this buffer with its other replicas.
260#[derive(Clone, Debug, PartialEq)]
261pub enum Operation {
262 /// A text operation.
263 Buffer(text::Operation),
264
265 /// An update to the buffer's diagnostics.
266 UpdateDiagnostics {
267 /// The id of the language server that produced the new diagnostics.
268 server_id: LanguageServerId,
269 /// The diagnostics.
270 diagnostics: Arc<[DiagnosticEntry<Anchor>]>,
271 /// The buffer's lamport timestamp.
272 lamport_timestamp: clock::Lamport,
273 },
274
275 /// An update to the most recent selections in this buffer.
276 UpdateSelections {
277 /// The selections.
278 selections: Arc<[Selection<Anchor>]>,
279 /// The buffer's lamport timestamp.
280 lamport_timestamp: clock::Lamport,
281 /// Whether the selections are in 'line mode'.
282 line_mode: bool,
283 /// The [`CursorShape`] associated with these selections.
284 cursor_shape: CursorShape,
285 },
286
287 /// An update to the characters that should trigger autocompletion
288 /// for this buffer.
289 UpdateCompletionTriggers {
290 /// The characters that trigger autocompletion.
291 triggers: Vec<String>,
292 /// The buffer's lamport timestamp.
293 lamport_timestamp: clock::Lamport,
294 },
295}
296
297/// An event that occurs in a buffer.
298#[derive(Clone, Debug, PartialEq)]
299pub enum Event {
300 /// The buffer was changed in a way that must be
301 /// propagated to its other replicas.
302 Operation(Operation),
303 /// The buffer was edited.
304 Edited,
305 /// The buffer's `dirty` bit changed.
306 DirtyChanged,
307 /// The buffer was saved.
308 Saved,
309 /// The buffer's file was changed on disk.
310 FileHandleChanged,
311 /// The buffer was reloaded.
312 Reloaded,
313 /// The buffer's diff_base changed.
314 DiffBaseChanged,
315 /// Buffer's excerpts for a certain diff base were recalculated.
316 DiffUpdated,
317 /// The buffer's language was changed.
318 LanguageChanged,
319 /// The buffer's syntax trees were updated.
320 Reparsed,
321 /// The buffer's diagnostics were updated.
322 DiagnosticsUpdated,
323 /// The buffer gained or lost editing capabilities.
324 CapabilityChanged,
325 /// The buffer was explicitly requested to close.
326 Closed,
327}
328
329/// The file associated with a buffer.
330pub trait File: Send + Sync {
331 /// Returns the [`LocalFile`] associated with this file, if the
332 /// file is local.
333 fn as_local(&self) -> Option<&dyn LocalFile>;
334
335 /// Returns whether this file is local.
336 fn is_local(&self) -> bool {
337 self.as_local().is_some()
338 }
339
340 /// Returns the file's mtime.
341 fn mtime(&self) -> Option<SystemTime>;
342
343 /// Returns the path of this file relative to the worktree's root directory.
344 fn path(&self) -> &Arc<Path>;
345
346 /// Returns the path of this file relative to the worktree's parent directory (this means it
347 /// includes the name of the worktree's root folder).
348 fn full_path(&self, cx: &AppContext) -> PathBuf;
349
350 /// Returns the last component of this handle's absolute path. If this handle refers to the root
351 /// of its worktree, then this method will return the name of the worktree itself.
352 fn file_name<'a>(&'a self, cx: &'a AppContext) -> &'a OsStr;
353
354 /// Returns the id of the worktree to which this file belongs.
355 ///
356 /// This is needed for looking up project-specific settings.
357 fn worktree_id(&self) -> usize;
358
359 /// Returns whether the file has been deleted.
360 fn is_deleted(&self) -> bool;
361
362 /// Returns whether the file existed on disk at one point
363 fn is_created(&self) -> bool {
364 self.mtime().is_some()
365 }
366
367 /// Converts this file into an [`Any`] trait object.
368 fn as_any(&self) -> &dyn Any;
369
370 /// Converts this file into a protobuf message.
371 fn to_proto(&self) -> rpc::proto::File;
372
373 /// Return whether Zed considers this to be a private file.
374 fn is_private(&self) -> bool;
375}
376
377/// The file associated with a buffer, in the case where the file is on the local disk.
378pub trait LocalFile: File {
379 /// Returns the absolute path of this file.
380 fn abs_path(&self, cx: &AppContext) -> PathBuf;
381
382 /// Loads the file's contents from disk.
383 fn load(&self, cx: &AppContext) -> Task<Result<String>>;
384
385 /// Called when the buffer is reloaded from disk.
386 fn buffer_reloaded(
387 &self,
388 buffer_id: BufferId,
389 version: &clock::Global,
390 line_ending: LineEnding,
391 mtime: Option<SystemTime>,
392 cx: &mut AppContext,
393 );
394
395 /// Returns true if the file should not be shared with collaborators.
396 fn is_private(&self, _: &AppContext) -> bool {
397 false
398 }
399}
400
401/// The auto-indent behavior associated with an editing operation.
402/// For some editing operations, each affected line of text has its
403/// indentation recomputed. For other operations, the entire block
404/// of edited text is adjusted uniformly.
405#[derive(Clone, Debug)]
406pub enum AutoindentMode {
407 /// Indent each line of inserted text.
408 EachLine,
409 /// Apply the same indentation adjustment to all of the lines
410 /// in a given insertion.
411 Block {
412 /// The original indentation level of the first line of each
413 /// insertion, if it has been copied.
414 original_indent_columns: Vec<u32>,
415 },
416}
417
418#[derive(Clone)]
419struct AutoindentRequest {
420 before_edit: BufferSnapshot,
421 entries: Vec<AutoindentRequestEntry>,
422 is_block_mode: bool,
423}
424
425#[derive(Clone)]
426struct AutoindentRequestEntry {
427 /// A range of the buffer whose indentation should be adjusted.
428 range: Range<Anchor>,
429 /// Whether or not these lines should be considered brand new, for the
430 /// purpose of auto-indent. When text is not new, its indentation will
431 /// only be adjusted if the suggested indentation level has *changed*
432 /// since the edit was made.
433 first_line_is_new: bool,
434 indent_size: IndentSize,
435 original_indent_column: Option<u32>,
436}
437
438#[derive(Debug)]
439struct IndentSuggestion {
440 basis_row: u32,
441 delta: Ordering,
442 within_error: bool,
443}
444
445struct BufferChunkHighlights<'a> {
446 captures: SyntaxMapCaptures<'a>,
447 next_capture: Option<SyntaxMapCapture<'a>>,
448 stack: Vec<(usize, HighlightId)>,
449 highlight_maps: Vec<HighlightMap>,
450}
451
452/// An iterator that yields chunks of a buffer's text, along with their
453/// syntax highlights and diagnostic status.
454pub struct BufferChunks<'a> {
455 range: Range<usize>,
456 chunks: text::Chunks<'a>,
457 diagnostic_endpoints: Peekable<vec::IntoIter<DiagnosticEndpoint>>,
458 error_depth: usize,
459 warning_depth: usize,
460 information_depth: usize,
461 hint_depth: usize,
462 unnecessary_depth: usize,
463 highlights: Option<BufferChunkHighlights<'a>>,
464}
465
466/// A chunk of a buffer's text, along with its syntax highlight and
467/// diagnostic status.
468#[derive(Clone, Debug, Default)]
469pub struct Chunk<'a> {
470 /// The text of the chunk.
471 pub text: &'a str,
472 /// The syntax highlighting style of the chunk.
473 pub syntax_highlight_id: Option<HighlightId>,
474 /// The highlight style that has been applied to this chunk in
475 /// the editor.
476 pub highlight_style: Option<HighlightStyle>,
477 /// The severity of diagnostic associated with this chunk, if any.
478 pub diagnostic_severity: Option<DiagnosticSeverity>,
479 /// Whether this chunk of text is marked as unnecessary.
480 pub is_unnecessary: bool,
481 /// Whether this chunk of text was originally a tab character.
482 pub is_tab: bool,
483 /// An optional recipe for how the chunk should be presented.
484 pub renderer: Option<ChunkRenderer>,
485}
486
487/// A recipe for how the chunk should be presented.
488#[derive(Clone)]
489pub struct ChunkRenderer {
490 /// creates a custom element to represent this chunk.
491 pub render: Arc<dyn Send + Sync + Fn(&mut WindowContext) -> AnyElement>,
492 /// If true, the element is constrained to the shaped width of the text.
493 pub constrain_width: bool,
494}
495
496impl fmt::Debug for ChunkRenderer {
497 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
498 f.debug_struct("ChunkRenderer")
499 .field("constrain_width", &self.constrain_width)
500 .finish()
501 }
502}
503
504/// A set of edits to a given version of a buffer, computed asynchronously.
505#[derive(Debug)]
506pub struct Diff {
507 pub(crate) base_version: clock::Global,
508 line_ending: LineEnding,
509 edits: Vec<(Range<usize>, Arc<str>)>,
510}
511
512#[derive(Clone, Copy)]
513pub(crate) struct DiagnosticEndpoint {
514 offset: usize,
515 is_start: bool,
516 severity: DiagnosticSeverity,
517 is_unnecessary: bool,
518}
519
520/// A class of characters, used for characterizing a run of text.
521#[derive(Copy, Clone, Eq, PartialEq, PartialOrd, Ord, Debug)]
522pub enum CharKind {
523 /// Whitespace.
524 Whitespace,
525 /// Punctuation.
526 Punctuation,
527 /// Word.
528 Word,
529}
530
531/// A runnable is a set of data about a region that could be resolved into a task
532pub struct Runnable {
533 pub tags: SmallVec<[RunnableTag; 1]>,
534 pub language: Arc<Language>,
535 pub buffer: BufferId,
536}
537
538#[derive(Clone, Debug, PartialEq)]
539pub struct IndentGuide {
540 pub buffer_id: BufferId,
541 pub start_row: BufferRow,
542 pub end_row: BufferRow,
543 pub depth: u32,
544 pub tab_size: u32,
545}
546
547impl IndentGuide {
548 pub fn new(
549 buffer_id: BufferId,
550 start_row: BufferRow,
551 end_row: BufferRow,
552 depth: u32,
553 tab_size: u32,
554 ) -> Self {
555 Self {
556 buffer_id,
557 start_row,
558 end_row,
559 depth,
560 tab_size,
561 }
562 }
563
564 pub fn indent_level(&self) -> u32 {
565 self.depth * self.tab_size
566 }
567}
568
569impl Buffer {
570 /// Create a new buffer with the given base text.
571 pub fn local<T: Into<String>>(base_text: T, cx: &mut ModelContext<Self>) -> Self {
572 Self::build(
573 TextBuffer::new(0, cx.entity_id().as_non_zero_u64().into(), base_text.into()),
574 None,
575 None,
576 Capability::ReadWrite,
577 )
578 }
579
580 /// Create a new buffer with the given base text that has proper line endings and other normalization applied.
581 pub fn local_normalized(
582 base_text_normalized: Rope,
583 line_ending: LineEnding,
584 cx: &mut ModelContext<Self>,
585 ) -> Self {
586 Self::build(
587 TextBuffer::new_normalized(
588 0,
589 cx.entity_id().as_non_zero_u64().into(),
590 line_ending,
591 base_text_normalized,
592 ),
593 None,
594 None,
595 Capability::ReadWrite,
596 )
597 }
598
599 /// Create a new buffer that is a replica of a remote buffer.
600 pub fn remote(
601 remote_id: BufferId,
602 replica_id: ReplicaId,
603 capability: Capability,
604 base_text: impl Into<String>,
605 ) -> Self {
606 Self::build(
607 TextBuffer::new(replica_id, remote_id, base_text.into()),
608 None,
609 None,
610 capability,
611 )
612 }
613
614 /// Create a new buffer that is a replica of a remote buffer, populating its
615 /// state from the given protobuf message.
616 pub fn from_proto(
617 replica_id: ReplicaId,
618 capability: Capability,
619 message: proto::BufferState,
620 file: Option<Arc<dyn File>>,
621 ) -> Result<Self> {
622 let buffer_id = BufferId::new(message.id)
623 .with_context(|| anyhow!("Could not deserialize buffer_id"))?;
624 let buffer = TextBuffer::new(replica_id, buffer_id, message.base_text);
625 let mut this = Self::build(buffer, message.diff_base, file, capability);
626 this.text.set_line_ending(proto::deserialize_line_ending(
627 rpc::proto::LineEnding::from_i32(message.line_ending)
628 .ok_or_else(|| anyhow!("missing line_ending"))?,
629 ));
630 this.saved_version = proto::deserialize_version(&message.saved_version);
631 this.saved_mtime = message.saved_mtime.map(|time| time.into());
632 Ok(this)
633 }
634
635 /// Serialize the buffer's state to a protobuf message.
636 pub fn to_proto(&self) -> proto::BufferState {
637 proto::BufferState {
638 id: self.remote_id().into(),
639 file: self.file.as_ref().map(|f| f.to_proto()),
640 base_text: self.base_text().to_string(),
641 diff_base: self.diff_base.as_ref().map(|h| h.to_string()),
642 line_ending: proto::serialize_line_ending(self.line_ending()) as i32,
643 saved_version: proto::serialize_version(&self.saved_version),
644 saved_mtime: self.saved_mtime.map(|time| time.into()),
645 }
646 }
647
648 /// Serialize as protobufs all of the changes to the buffer since the given version.
649 pub fn serialize_ops(
650 &self,
651 since: Option<clock::Global>,
652 cx: &AppContext,
653 ) -> Task<Vec<proto::Operation>> {
654 let mut operations = Vec::new();
655 operations.extend(self.deferred_ops.iter().map(proto::serialize_operation));
656
657 operations.extend(self.remote_selections.iter().map(|(_, set)| {
658 proto::serialize_operation(&Operation::UpdateSelections {
659 selections: set.selections.clone(),
660 lamport_timestamp: set.lamport_timestamp,
661 line_mode: set.line_mode,
662 cursor_shape: set.cursor_shape,
663 })
664 }));
665
666 for (server_id, diagnostics) in &self.diagnostics {
667 operations.push(proto::serialize_operation(&Operation::UpdateDiagnostics {
668 lamport_timestamp: self.diagnostics_timestamp,
669 server_id: *server_id,
670 diagnostics: diagnostics.iter().cloned().collect(),
671 }));
672 }
673
674 operations.push(proto::serialize_operation(
675 &Operation::UpdateCompletionTriggers {
676 triggers: self.completion_triggers.clone(),
677 lamport_timestamp: self.completion_triggers_timestamp,
678 },
679 ));
680
681 let text_operations = self.text.operations().clone();
682 cx.background_executor().spawn(async move {
683 let since = since.unwrap_or_default();
684 operations.extend(
685 text_operations
686 .iter()
687 .filter(|(_, op)| !since.observed(op.timestamp()))
688 .map(|(_, op)| proto::serialize_operation(&Operation::Buffer(op.clone()))),
689 );
690 operations.sort_unstable_by_key(proto::lamport_timestamp_for_operation);
691 operations
692 })
693 }
694
695 /// Assign a language to the buffer, returning the buffer.
696 pub fn with_language(mut self, language: Arc<Language>, cx: &mut ModelContext<Self>) -> Self {
697 self.set_language(Some(language), cx);
698 self
699 }
700
701 /// Returns the [Capability] of this buffer.
702 pub fn capability(&self) -> Capability {
703 self.capability
704 }
705
706 /// Whether this buffer can only be read.
707 pub fn read_only(&self) -> bool {
708 self.capability == Capability::ReadOnly
709 }
710
711 /// Builds a [Buffer] with the given underlying [TextBuffer], diff base, [File] and [Capability].
712 pub fn build(
713 buffer: TextBuffer,
714 diff_base: Option<String>,
715 file: Option<Arc<dyn File>>,
716 capability: Capability,
717 ) -> Self {
718 let saved_mtime = file.as_ref().and_then(|file| file.mtime());
719
720 Self {
721 saved_mtime,
722 saved_version: buffer.version(),
723 reload_task: None,
724 transaction_depth: 0,
725 was_dirty_before_starting_transaction: None,
726 text: buffer,
727 diff_base: diff_base
728 .map(|mut raw_diff_base| {
729 LineEnding::normalize(&mut raw_diff_base);
730 raw_diff_base
731 })
732 .map(Rope::from),
733 diff_base_version: 0,
734 git_diff: git::diff::BufferDiff::new(),
735 file,
736 capability,
737 syntax_map: Mutex::new(SyntaxMap::new()),
738 parsing_in_background: false,
739 parse_count: 0,
740 sync_parse_timeout: Duration::from_millis(1),
741 autoindent_requests: Default::default(),
742 pending_autoindent: Default::default(),
743 language: None,
744 remote_selections: Default::default(),
745 selections_update_count: 0,
746 diagnostics: Default::default(),
747 diagnostics_update_count: 0,
748 diagnostics_timestamp: Default::default(),
749 file_update_count: 0,
750 git_diff_update_count: 0,
751 completion_triggers: Default::default(),
752 completion_triggers_timestamp: Default::default(),
753 deferred_ops: OperationQueue::new(),
754 has_conflict: false,
755 }
756 }
757
758 /// Retrieve a snapshot of the buffer's current state. This is computationally
759 /// cheap, and allows reading from the buffer on a background thread.
760 pub fn snapshot(&self) -> BufferSnapshot {
761 let text = self.text.snapshot();
762 let mut syntax_map = self.syntax_map.lock();
763 syntax_map.interpolate(&text);
764 let syntax = syntax_map.snapshot();
765
766 BufferSnapshot {
767 text,
768 syntax,
769 git_diff: self.git_diff.clone(),
770 file: self.file.clone(),
771 remote_selections: self.remote_selections.clone(),
772 diagnostics: self.diagnostics.clone(),
773 diagnostics_update_count: self.diagnostics_update_count,
774 file_update_count: self.file_update_count,
775 git_diff_update_count: self.git_diff_update_count,
776 language: self.language.clone(),
777 parse_count: self.parse_count,
778 selections_update_count: self.selections_update_count,
779 }
780 }
781
782 #[cfg(test)]
783 pub(crate) fn as_text_snapshot(&self) -> &text::BufferSnapshot {
784 &self.text
785 }
786
787 /// Retrieve a snapshot of the buffer's raw text, without any
788 /// language-related state like the syntax tree or diagnostics.
789 pub fn text_snapshot(&self) -> text::BufferSnapshot {
790 self.text.snapshot()
791 }
792
793 /// The file associated with the buffer, if any.
794 pub fn file(&self) -> Option<&Arc<dyn File>> {
795 self.file.as_ref()
796 }
797
798 /// The version of the buffer that was last saved or reloaded from disk.
799 pub fn saved_version(&self) -> &clock::Global {
800 &self.saved_version
801 }
802
803 /// The mtime of the buffer's file when the buffer was last saved or reloaded from disk.
804 pub fn saved_mtime(&self) -> Option<SystemTime> {
805 self.saved_mtime
806 }
807
808 /// Assign a language to the buffer.
809 pub fn set_language(&mut self, language: Option<Arc<Language>>, cx: &mut ModelContext<Self>) {
810 self.parse_count += 1;
811 self.syntax_map.lock().clear();
812 self.language = language;
813 self.reparse(cx);
814 cx.emit(Event::LanguageChanged);
815 }
816
817 /// Assign a language registry to the buffer. This allows the buffer to retrieve
818 /// other languages if parts of the buffer are written in different languages.
819 pub fn set_language_registry(&mut self, language_registry: Arc<LanguageRegistry>) {
820 self.syntax_map
821 .lock()
822 .set_language_registry(language_registry);
823 }
824
825 /// Assign the buffer a new [Capability].
826 pub fn set_capability(&mut self, capability: Capability, cx: &mut ModelContext<Self>) {
827 self.capability = capability;
828 cx.emit(Event::CapabilityChanged)
829 }
830
831 /// This method is called to signal that the buffer has been saved.
832 pub fn did_save(
833 &mut self,
834 version: clock::Global,
835 mtime: Option<SystemTime>,
836 cx: &mut ModelContext<Self>,
837 ) {
838 self.saved_version = version;
839 self.has_conflict = false;
840 self.saved_mtime = mtime;
841 cx.emit(Event::Saved);
842 cx.notify();
843 }
844
845 /// Reloads the contents of the buffer from disk.
846 pub fn reload(
847 &mut self,
848 cx: &mut ModelContext<Self>,
849 ) -> oneshot::Receiver<Option<Transaction>> {
850 let (tx, rx) = futures::channel::oneshot::channel();
851 let prev_version = self.text.version();
852 self.reload_task = Some(cx.spawn(|this, mut cx| async move {
853 let Some((new_mtime, new_text)) = this.update(&mut cx, |this, cx| {
854 let file = this.file.as_ref()?.as_local()?;
855 Some((file.mtime(), file.load(cx)))
856 })?
857 else {
858 return Ok(());
859 };
860
861 let new_text = new_text.await?;
862 let diff = this
863 .update(&mut cx, |this, cx| this.diff(new_text.clone(), cx))?
864 .await;
865 this.update(&mut cx, |this, cx| {
866 if this.version() == diff.base_version {
867 this.finalize_last_transaction();
868 this.apply_diff(diff, cx);
869 tx.send(this.finalize_last_transaction().cloned()).ok();
870 this.has_conflict = false;
871 this.did_reload(this.version(), this.line_ending(), new_mtime, cx);
872 } else {
873 if !diff.edits.is_empty()
874 || this
875 .edits_since::<usize>(&diff.base_version)
876 .next()
877 .is_some()
878 {
879 this.has_conflict = true;
880 }
881
882 this.did_reload(prev_version, this.line_ending(), this.saved_mtime, cx);
883 }
884
885 this.reload_task.take();
886 })
887 }));
888 rx
889 }
890
891 /// This method is called to signal that the buffer has been reloaded.
892 pub fn did_reload(
893 &mut self,
894 version: clock::Global,
895 line_ending: LineEnding,
896 mtime: Option<SystemTime>,
897 cx: &mut ModelContext<Self>,
898 ) {
899 self.saved_version = version;
900 self.text.set_line_ending(line_ending);
901 self.saved_mtime = mtime;
902 if let Some(file) = self.file.as_ref().and_then(|f| f.as_local()) {
903 file.buffer_reloaded(
904 self.remote_id(),
905 &self.saved_version,
906 self.line_ending(),
907 self.saved_mtime,
908 cx,
909 );
910 }
911 cx.emit(Event::Reloaded);
912 cx.notify();
913 }
914
915 /// Updates the [File] backing this buffer. This should be called when
916 /// the file has changed or has been deleted.
917 pub fn file_updated(&mut self, new_file: Arc<dyn File>, cx: &mut ModelContext<Self>) {
918 let mut file_changed = false;
919
920 if let Some(old_file) = self.file.as_ref() {
921 if new_file.path() != old_file.path() {
922 file_changed = true;
923 }
924
925 if new_file.is_deleted() {
926 if !old_file.is_deleted() {
927 file_changed = true;
928 if !self.is_dirty() {
929 cx.emit(Event::DirtyChanged);
930 }
931 }
932 } else {
933 let new_mtime = new_file.mtime();
934 if new_mtime != old_file.mtime() {
935 file_changed = true;
936
937 if !self.is_dirty() {
938 self.reload(cx).close();
939 }
940 }
941 }
942 } else {
943 file_changed = true;
944 };
945
946 self.file = Some(new_file);
947 if file_changed {
948 self.file_update_count += 1;
949 cx.emit(Event::FileHandleChanged);
950 cx.notify();
951 }
952 }
953
954 /// Returns the current diff base, see [Buffer::set_diff_base].
955 pub fn diff_base(&self) -> Option<&Rope> {
956 self.diff_base.as_ref()
957 }
958
959 /// Sets the text that will be used to compute a Git diff
960 /// against the buffer text.
961 pub fn set_diff_base(&mut self, diff_base: Option<String>, cx: &mut ModelContext<Self>) {
962 self.diff_base = diff_base
963 .map(|mut raw_diff_base| {
964 LineEnding::normalize(&mut raw_diff_base);
965 raw_diff_base
966 })
967 .map(Rope::from);
968 self.diff_base_version += 1;
969 if let Some(recalc_task) = self.git_diff_recalc(cx) {
970 cx.spawn(|buffer, mut cx| async move {
971 recalc_task.await;
972 buffer
973 .update(&mut cx, |_, cx| {
974 cx.emit(Event::DiffBaseChanged);
975 })
976 .ok();
977 })
978 .detach();
979 }
980 }
981
982 /// Returns a number, unique per diff base set to the buffer.
983 pub fn diff_base_version(&self) -> usize {
984 self.diff_base_version
985 }
986
987 /// Recomputes the Git diff status.
988 pub fn git_diff_recalc(&mut self, cx: &mut ModelContext<Self>) -> Option<Task<()>> {
989 let diff_base = self.diff_base.clone()?;
990 let snapshot = self.snapshot();
991
992 let mut diff = self.git_diff.clone();
993 let diff = cx.background_executor().spawn(async move {
994 diff.update(&diff_base, &snapshot).await;
995 diff
996 });
997
998 Some(cx.spawn(|this, mut cx| async move {
999 let buffer_diff = diff.await;
1000 this.update(&mut cx, |this, cx| {
1001 this.git_diff = buffer_diff;
1002 this.git_diff_update_count += 1;
1003 cx.emit(Event::DiffUpdated);
1004 })
1005 .ok();
1006 }))
1007 }
1008
1009 /// Returns the primary [Language] assigned to this [Buffer].
1010 pub fn language(&self) -> Option<&Arc<Language>> {
1011 self.language.as_ref()
1012 }
1013
1014 /// Returns the [Language] at the given location.
1015 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<Arc<Language>> {
1016 let offset = position.to_offset(self);
1017 self.syntax_map
1018 .lock()
1019 .layers_for_range(offset..offset, &self.text)
1020 .last()
1021 .map(|info| info.language.clone())
1022 .or_else(|| self.language.clone())
1023 }
1024
1025 /// The number of times the buffer was parsed.
1026 pub fn parse_count(&self) -> usize {
1027 self.parse_count
1028 }
1029
1030 /// The number of times selections were updated.
1031 pub fn selections_update_count(&self) -> usize {
1032 self.selections_update_count
1033 }
1034
1035 /// The number of times diagnostics were updated.
1036 pub fn diagnostics_update_count(&self) -> usize {
1037 self.diagnostics_update_count
1038 }
1039
1040 /// The number of times the underlying file was updated.
1041 pub fn file_update_count(&self) -> usize {
1042 self.file_update_count
1043 }
1044
1045 /// The number of times the git diff status was updated.
1046 pub fn git_diff_update_count(&self) -> usize {
1047 self.git_diff_update_count
1048 }
1049
1050 /// Whether the buffer is being parsed in the background.
1051 #[cfg(any(test, feature = "test-support"))]
1052 pub fn is_parsing(&self) -> bool {
1053 self.parsing_in_background
1054 }
1055
1056 /// Indicates whether the buffer contains any regions that may be
1057 /// written in a language that hasn't been loaded yet.
1058 pub fn contains_unknown_injections(&self) -> bool {
1059 self.syntax_map.lock().contains_unknown_injections()
1060 }
1061
1062 #[cfg(test)]
1063 pub fn set_sync_parse_timeout(&mut self, timeout: Duration) {
1064 self.sync_parse_timeout = timeout;
1065 }
1066
1067 /// Called after an edit to synchronize the buffer's main parse tree with
1068 /// the buffer's new underlying state.
1069 ///
1070 /// Locks the syntax map and interpolates the edits since the last reparse
1071 /// into the foreground syntax tree.
1072 ///
1073 /// Then takes a stable snapshot of the syntax map before unlocking it.
1074 /// The snapshot with the interpolated edits is sent to a background thread,
1075 /// where we ask Tree-sitter to perform an incremental parse.
1076 ///
1077 /// Meanwhile, in the foreground, we block the main thread for up to 1ms
1078 /// waiting on the parse to complete. As soon as it completes, we proceed
1079 /// synchronously, unless a 1ms timeout elapses.
1080 ///
1081 /// If we time out waiting on the parse, we spawn a second task waiting
1082 /// until the parse does complete and return with the interpolated tree still
1083 /// in the foreground. When the background parse completes, call back into
1084 /// the main thread and assign the foreground parse state.
1085 ///
1086 /// If the buffer or grammar changed since the start of the background parse,
1087 /// initiate an additional reparse recursively. To avoid concurrent parses
1088 /// for the same buffer, we only initiate a new parse if we are not already
1089 /// parsing in the background.
1090 pub fn reparse(&mut self, cx: &mut ModelContext<Self>) {
1091 if self.parsing_in_background {
1092 return;
1093 }
1094 let language = if let Some(language) = self.language.clone() {
1095 language
1096 } else {
1097 return;
1098 };
1099
1100 let text = self.text_snapshot();
1101 let parsed_version = self.version();
1102
1103 let mut syntax_map = self.syntax_map.lock();
1104 syntax_map.interpolate(&text);
1105 let language_registry = syntax_map.language_registry();
1106 let mut syntax_snapshot = syntax_map.snapshot();
1107 drop(syntax_map);
1108
1109 let parse_task = cx.background_executor().spawn({
1110 let language = language.clone();
1111 let language_registry = language_registry.clone();
1112 async move {
1113 syntax_snapshot.reparse(&text, language_registry, language);
1114 syntax_snapshot
1115 }
1116 });
1117
1118 match cx
1119 .background_executor()
1120 .block_with_timeout(self.sync_parse_timeout, parse_task)
1121 {
1122 Ok(new_syntax_snapshot) => {
1123 self.did_finish_parsing(new_syntax_snapshot, cx);
1124 return;
1125 }
1126 Err(parse_task) => {
1127 self.parsing_in_background = true;
1128 cx.spawn(move |this, mut cx| async move {
1129 let new_syntax_map = parse_task.await;
1130 this.update(&mut cx, move |this, cx| {
1131 let grammar_changed =
1132 this.language.as_ref().map_or(true, |current_language| {
1133 !Arc::ptr_eq(&language, current_language)
1134 });
1135 let language_registry_changed = new_syntax_map
1136 .contains_unknown_injections()
1137 && language_registry.map_or(false, |registry| {
1138 registry.version() != new_syntax_map.language_registry_version()
1139 });
1140 let parse_again = language_registry_changed
1141 || grammar_changed
1142 || this.version.changed_since(&parsed_version);
1143 this.did_finish_parsing(new_syntax_map, cx);
1144 this.parsing_in_background = false;
1145 if parse_again {
1146 this.reparse(cx);
1147 }
1148 })
1149 .ok();
1150 })
1151 .detach();
1152 }
1153 }
1154 }
1155
1156 fn did_finish_parsing(&mut self, syntax_snapshot: SyntaxSnapshot, cx: &mut ModelContext<Self>) {
1157 self.parse_count += 1;
1158 self.syntax_map.lock().did_parse(syntax_snapshot);
1159 self.request_autoindent(cx);
1160 cx.emit(Event::Reparsed);
1161 cx.notify();
1162 }
1163
1164 /// Assign to the buffer a set of diagnostics created by a given language server.
1165 pub fn update_diagnostics(
1166 &mut self,
1167 server_id: LanguageServerId,
1168 diagnostics: DiagnosticSet,
1169 cx: &mut ModelContext<Self>,
1170 ) {
1171 let lamport_timestamp = self.text.lamport_clock.tick();
1172 let op = Operation::UpdateDiagnostics {
1173 server_id,
1174 diagnostics: diagnostics.iter().cloned().collect(),
1175 lamport_timestamp,
1176 };
1177 self.apply_diagnostic_update(server_id, diagnostics, lamport_timestamp, cx);
1178 self.send_operation(op, cx);
1179 }
1180
1181 fn request_autoindent(&mut self, cx: &mut ModelContext<Self>) {
1182 if let Some(indent_sizes) = self.compute_autoindents() {
1183 let indent_sizes = cx.background_executor().spawn(indent_sizes);
1184 match cx
1185 .background_executor()
1186 .block_with_timeout(Duration::from_micros(500), indent_sizes)
1187 {
1188 Ok(indent_sizes) => self.apply_autoindents(indent_sizes, cx),
1189 Err(indent_sizes) => {
1190 self.pending_autoindent = Some(cx.spawn(|this, mut cx| async move {
1191 let indent_sizes = indent_sizes.await;
1192 this.update(&mut cx, |this, cx| {
1193 this.apply_autoindents(indent_sizes, cx);
1194 })
1195 .ok();
1196 }));
1197 }
1198 }
1199 } else {
1200 self.autoindent_requests.clear();
1201 }
1202 }
1203
1204 fn compute_autoindents(&self) -> Option<impl Future<Output = BTreeMap<u32, IndentSize>>> {
1205 let max_rows_between_yields = 100;
1206 let snapshot = self.snapshot();
1207 if snapshot.syntax.is_empty() || self.autoindent_requests.is_empty() {
1208 return None;
1209 }
1210
1211 let autoindent_requests = self.autoindent_requests.clone();
1212 Some(async move {
1213 let mut indent_sizes = BTreeMap::new();
1214 for request in autoindent_requests {
1215 // Resolve each edited range to its row in the current buffer and in the
1216 // buffer before this batch of edits.
1217 let mut row_ranges = Vec::new();
1218 let mut old_to_new_rows = BTreeMap::new();
1219 let mut language_indent_sizes_by_new_row = Vec::new();
1220 for entry in &request.entries {
1221 let position = entry.range.start;
1222 let new_row = position.to_point(&snapshot).row;
1223 let new_end_row = entry.range.end.to_point(&snapshot).row + 1;
1224 language_indent_sizes_by_new_row.push((new_row, entry.indent_size));
1225
1226 if !entry.first_line_is_new {
1227 let old_row = position.to_point(&request.before_edit).row;
1228 old_to_new_rows.insert(old_row, new_row);
1229 }
1230 row_ranges.push((new_row..new_end_row, entry.original_indent_column));
1231 }
1232
1233 // Build a map containing the suggested indentation for each of the edited lines
1234 // with respect to the state of the buffer before these edits. This map is keyed
1235 // by the rows for these lines in the current state of the buffer.
1236 let mut old_suggestions = BTreeMap::<u32, (IndentSize, bool)>::default();
1237 let old_edited_ranges =
1238 contiguous_ranges(old_to_new_rows.keys().copied(), max_rows_between_yields);
1239 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1240 let mut language_indent_size = IndentSize::default();
1241 for old_edited_range in old_edited_ranges {
1242 let suggestions = request
1243 .before_edit
1244 .suggest_autoindents(old_edited_range.clone())
1245 .into_iter()
1246 .flatten();
1247 for (old_row, suggestion) in old_edited_range.zip(suggestions) {
1248 if let Some(suggestion) = suggestion {
1249 let new_row = *old_to_new_rows.get(&old_row).unwrap();
1250
1251 // Find the indent size based on the language for this row.
1252 while let Some((row, size)) = language_indent_sizes.peek() {
1253 if *row > new_row {
1254 break;
1255 }
1256 language_indent_size = *size;
1257 language_indent_sizes.next();
1258 }
1259
1260 let suggested_indent = old_to_new_rows
1261 .get(&suggestion.basis_row)
1262 .and_then(|from_row| {
1263 Some(old_suggestions.get(from_row).copied()?.0)
1264 })
1265 .unwrap_or_else(|| {
1266 request
1267 .before_edit
1268 .indent_size_for_line(suggestion.basis_row)
1269 })
1270 .with_delta(suggestion.delta, language_indent_size);
1271 old_suggestions
1272 .insert(new_row, (suggested_indent, suggestion.within_error));
1273 }
1274 }
1275 yield_now().await;
1276 }
1277
1278 // In block mode, only compute indentation suggestions for the first line
1279 // of each insertion. Otherwise, compute suggestions for every inserted line.
1280 let new_edited_row_ranges = contiguous_ranges(
1281 row_ranges.iter().flat_map(|(range, _)| {
1282 if request.is_block_mode {
1283 range.start..range.start + 1
1284 } else {
1285 range.clone()
1286 }
1287 }),
1288 max_rows_between_yields,
1289 );
1290
1291 // Compute new suggestions for each line, but only include them in the result
1292 // if they differ from the old suggestion for that line.
1293 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1294 let mut language_indent_size = IndentSize::default();
1295 for new_edited_row_range in new_edited_row_ranges {
1296 let suggestions = snapshot
1297 .suggest_autoindents(new_edited_row_range.clone())
1298 .into_iter()
1299 .flatten();
1300 for (new_row, suggestion) in new_edited_row_range.zip(suggestions) {
1301 if let Some(suggestion) = suggestion {
1302 // Find the indent size based on the language for this row.
1303 while let Some((row, size)) = language_indent_sizes.peek() {
1304 if *row > new_row {
1305 break;
1306 }
1307 language_indent_size = *size;
1308 language_indent_sizes.next();
1309 }
1310
1311 let suggested_indent = indent_sizes
1312 .get(&suggestion.basis_row)
1313 .copied()
1314 .unwrap_or_else(|| {
1315 snapshot.indent_size_for_line(suggestion.basis_row)
1316 })
1317 .with_delta(suggestion.delta, language_indent_size);
1318 if old_suggestions.get(&new_row).map_or(
1319 true,
1320 |(old_indentation, was_within_error)| {
1321 suggested_indent != *old_indentation
1322 && (!suggestion.within_error || *was_within_error)
1323 },
1324 ) {
1325 indent_sizes.insert(new_row, suggested_indent);
1326 }
1327 }
1328 }
1329 yield_now().await;
1330 }
1331
1332 // For each block of inserted text, adjust the indentation of the remaining
1333 // lines of the block by the same amount as the first line was adjusted.
1334 if request.is_block_mode {
1335 for (row_range, original_indent_column) in
1336 row_ranges
1337 .into_iter()
1338 .filter_map(|(range, original_indent_column)| {
1339 if range.len() > 1 {
1340 Some((range, original_indent_column?))
1341 } else {
1342 None
1343 }
1344 })
1345 {
1346 let new_indent = indent_sizes
1347 .get(&row_range.start)
1348 .copied()
1349 .unwrap_or_else(|| snapshot.indent_size_for_line(row_range.start));
1350 let delta = new_indent.len as i64 - original_indent_column as i64;
1351 if delta != 0 {
1352 for row in row_range.skip(1) {
1353 indent_sizes.entry(row).or_insert_with(|| {
1354 let mut size = snapshot.indent_size_for_line(row);
1355 if size.kind == new_indent.kind {
1356 match delta.cmp(&0) {
1357 Ordering::Greater => size.len += delta as u32,
1358 Ordering::Less => {
1359 size.len = size.len.saturating_sub(-delta as u32)
1360 }
1361 Ordering::Equal => {}
1362 }
1363 }
1364 size
1365 });
1366 }
1367 }
1368 }
1369 }
1370 }
1371
1372 indent_sizes
1373 })
1374 }
1375
1376 fn apply_autoindents(
1377 &mut self,
1378 indent_sizes: BTreeMap<u32, IndentSize>,
1379 cx: &mut ModelContext<Self>,
1380 ) {
1381 self.autoindent_requests.clear();
1382
1383 let edits: Vec<_> = indent_sizes
1384 .into_iter()
1385 .filter_map(|(row, indent_size)| {
1386 let current_size = indent_size_for_line(self, row);
1387 Self::edit_for_indent_size_adjustment(row, current_size, indent_size)
1388 })
1389 .collect();
1390
1391 self.edit(edits, None, cx);
1392 }
1393
1394 /// Create a minimal edit that will cause the given row to be indented
1395 /// with the given size. After applying this edit, the length of the line
1396 /// will always be at least `new_size.len`.
1397 pub fn edit_for_indent_size_adjustment(
1398 row: u32,
1399 current_size: IndentSize,
1400 new_size: IndentSize,
1401 ) -> Option<(Range<Point>, String)> {
1402 if new_size.kind == current_size.kind {
1403 match new_size.len.cmp(¤t_size.len) {
1404 Ordering::Greater => {
1405 let point = Point::new(row, 0);
1406 Some((
1407 point..point,
1408 iter::repeat(new_size.char())
1409 .take((new_size.len - current_size.len) as usize)
1410 .collect::<String>(),
1411 ))
1412 }
1413
1414 Ordering::Less => Some((
1415 Point::new(row, 0)..Point::new(row, current_size.len - new_size.len),
1416 String::new(),
1417 )),
1418
1419 Ordering::Equal => None,
1420 }
1421 } else {
1422 Some((
1423 Point::new(row, 0)..Point::new(row, current_size.len),
1424 iter::repeat(new_size.char())
1425 .take(new_size.len as usize)
1426 .collect::<String>(),
1427 ))
1428 }
1429 }
1430
1431 /// Spawns a background task that asynchronously computes a `Diff` between the buffer's text
1432 /// and the given new text.
1433 pub fn diff(&self, mut new_text: String, cx: &AppContext) -> Task<Diff> {
1434 let old_text = self.as_rope().clone();
1435 let base_version = self.version();
1436 cx.background_executor()
1437 .spawn_labeled(*BUFFER_DIFF_TASK, async move {
1438 let old_text = old_text.to_string();
1439 let line_ending = LineEnding::detect(&new_text);
1440 LineEnding::normalize(&mut new_text);
1441
1442 let diff = TextDiff::from_chars(old_text.as_str(), new_text.as_str());
1443 let empty: Arc<str> = "".into();
1444
1445 let mut edits = Vec::new();
1446 let mut old_offset = 0;
1447 let mut new_offset = 0;
1448 let mut last_edit: Option<(Range<usize>, Range<usize>)> = None;
1449 for change in diff.iter_all_changes().map(Some).chain([None]) {
1450 if let Some(change) = &change {
1451 let len = change.value().len();
1452 match change.tag() {
1453 ChangeTag::Equal => {
1454 old_offset += len;
1455 new_offset += len;
1456 }
1457 ChangeTag::Delete => {
1458 let old_end_offset = old_offset + len;
1459 if let Some((last_old_range, _)) = &mut last_edit {
1460 last_old_range.end = old_end_offset;
1461 } else {
1462 last_edit =
1463 Some((old_offset..old_end_offset, new_offset..new_offset));
1464 }
1465 old_offset = old_end_offset;
1466 }
1467 ChangeTag::Insert => {
1468 let new_end_offset = new_offset + len;
1469 if let Some((_, last_new_range)) = &mut last_edit {
1470 last_new_range.end = new_end_offset;
1471 } else {
1472 last_edit =
1473 Some((old_offset..old_offset, new_offset..new_end_offset));
1474 }
1475 new_offset = new_end_offset;
1476 }
1477 }
1478 }
1479
1480 if let Some((old_range, new_range)) = &last_edit {
1481 if old_offset > old_range.end
1482 || new_offset > new_range.end
1483 || change.is_none()
1484 {
1485 let text = if new_range.is_empty() {
1486 empty.clone()
1487 } else {
1488 new_text[new_range.clone()].into()
1489 };
1490 edits.push((old_range.clone(), text));
1491 last_edit.take();
1492 }
1493 }
1494 }
1495
1496 Diff {
1497 base_version,
1498 line_ending,
1499 edits,
1500 }
1501 })
1502 }
1503
1504 /// Spawns a background task that searches the buffer for any whitespace
1505 /// at the ends of a lines, and returns a `Diff` that removes that whitespace.
1506 pub fn remove_trailing_whitespace(&self, cx: &AppContext) -> Task<Diff> {
1507 let old_text = self.as_rope().clone();
1508 let line_ending = self.line_ending();
1509 let base_version = self.version();
1510 cx.background_executor().spawn(async move {
1511 let ranges = trailing_whitespace_ranges(&old_text);
1512 let empty = Arc::<str>::from("");
1513 Diff {
1514 base_version,
1515 line_ending,
1516 edits: ranges
1517 .into_iter()
1518 .map(|range| (range, empty.clone()))
1519 .collect(),
1520 }
1521 })
1522 }
1523
1524 /// Ensures that the buffer ends with a single newline character, and
1525 /// no other whitespace.
1526 pub fn ensure_final_newline(&mut self, cx: &mut ModelContext<Self>) {
1527 let len = self.len();
1528 let mut offset = len;
1529 for chunk in self.as_rope().reversed_chunks_in_range(0..len) {
1530 let non_whitespace_len = chunk
1531 .trim_end_matches(|c: char| c.is_ascii_whitespace())
1532 .len();
1533 offset -= chunk.len();
1534 offset += non_whitespace_len;
1535 if non_whitespace_len != 0 {
1536 if offset == len - 1 && chunk.get(non_whitespace_len..) == Some("\n") {
1537 return;
1538 }
1539 break;
1540 }
1541 }
1542 self.edit([(offset..len, "\n")], None, cx);
1543 }
1544
1545 /// Applies a diff to the buffer. If the buffer has changed since the given diff was
1546 /// calculated, then adjust the diff to account for those changes, and discard any
1547 /// parts of the diff that conflict with those changes.
1548 pub fn apply_diff(&mut self, diff: Diff, cx: &mut ModelContext<Self>) -> Option<TransactionId> {
1549 // Check for any edits to the buffer that have occurred since this diff
1550 // was computed.
1551 let snapshot = self.snapshot();
1552 let mut edits_since = snapshot.edits_since::<usize>(&diff.base_version).peekable();
1553 let mut delta = 0;
1554 let adjusted_edits = diff.edits.into_iter().filter_map(|(range, new_text)| {
1555 while let Some(edit_since) = edits_since.peek() {
1556 // If the edit occurs after a diff hunk, then it does not
1557 // affect that hunk.
1558 if edit_since.old.start > range.end {
1559 break;
1560 }
1561 // If the edit precedes the diff hunk, then adjust the hunk
1562 // to reflect the edit.
1563 else if edit_since.old.end < range.start {
1564 delta += edit_since.new_len() as i64 - edit_since.old_len() as i64;
1565 edits_since.next();
1566 }
1567 // If the edit intersects a diff hunk, then discard that hunk.
1568 else {
1569 return None;
1570 }
1571 }
1572
1573 let start = (range.start as i64 + delta) as usize;
1574 let end = (range.end as i64 + delta) as usize;
1575 Some((start..end, new_text))
1576 });
1577
1578 self.start_transaction();
1579 self.text.set_line_ending(diff.line_ending);
1580 self.edit(adjusted_edits, None, cx);
1581 self.end_transaction(cx)
1582 }
1583
1584 /// Checks if the buffer has unsaved changes.
1585 pub fn is_dirty(&self) -> bool {
1586 self.has_conflict
1587 || self.has_edits_since(&self.saved_version)
1588 || self
1589 .file
1590 .as_ref()
1591 .map_or(false, |file| file.is_deleted() || !file.is_created())
1592 }
1593
1594 /// Checks if the buffer and its file have both changed since the buffer
1595 /// was last saved or reloaded.
1596 pub fn has_conflict(&self) -> bool {
1597 self.has_conflict
1598 || self.file.as_ref().map_or(false, |file| {
1599 file.mtime() > self.saved_mtime && self.has_edits_since(&self.saved_version)
1600 })
1601 }
1602
1603 /// Gets a [`Subscription`] that tracks all of the changes to the buffer's text.
1604 pub fn subscribe(&mut self) -> Subscription {
1605 self.text.subscribe()
1606 }
1607
1608 /// Starts a transaction, if one is not already in-progress. When undoing or
1609 /// redoing edits, all of the edits performed within a transaction are undone
1610 /// or redone together.
1611 pub fn start_transaction(&mut self) -> Option<TransactionId> {
1612 self.start_transaction_at(Instant::now())
1613 }
1614
1615 /// Starts a transaction, providing the current time. Subsequent transactions
1616 /// that occur within a short period of time will be grouped together. This
1617 /// is controlled by the buffer's undo grouping duration.
1618 pub fn start_transaction_at(&mut self, now: Instant) -> Option<TransactionId> {
1619 self.transaction_depth += 1;
1620 if self.was_dirty_before_starting_transaction.is_none() {
1621 self.was_dirty_before_starting_transaction = Some(self.is_dirty());
1622 }
1623 self.text.start_transaction_at(now)
1624 }
1625
1626 /// Terminates the current transaction, if this is the outermost transaction.
1627 pub fn end_transaction(&mut self, cx: &mut ModelContext<Self>) -> Option<TransactionId> {
1628 self.end_transaction_at(Instant::now(), cx)
1629 }
1630
1631 /// Terminates the current transaction, providing the current time. Subsequent transactions
1632 /// that occur within a short period of time will be grouped together. This
1633 /// is controlled by the buffer's undo grouping duration.
1634 pub fn end_transaction_at(
1635 &mut self,
1636 now: Instant,
1637 cx: &mut ModelContext<Self>,
1638 ) -> Option<TransactionId> {
1639 assert!(self.transaction_depth > 0);
1640 self.transaction_depth -= 1;
1641 let was_dirty = if self.transaction_depth == 0 {
1642 self.was_dirty_before_starting_transaction.take().unwrap()
1643 } else {
1644 false
1645 };
1646 if let Some((transaction_id, start_version)) = self.text.end_transaction_at(now) {
1647 self.did_edit(&start_version, was_dirty, cx);
1648 Some(transaction_id)
1649 } else {
1650 None
1651 }
1652 }
1653
1654 /// Manually add a transaction to the buffer's undo history.
1655 pub fn push_transaction(&mut self, transaction: Transaction, now: Instant) {
1656 self.text.push_transaction(transaction, now);
1657 }
1658
1659 /// Prevent the last transaction from being grouped with any subsequent transactions,
1660 /// even if they occur with the buffer's undo grouping duration.
1661 pub fn finalize_last_transaction(&mut self) -> Option<&Transaction> {
1662 self.text.finalize_last_transaction()
1663 }
1664
1665 /// Manually group all changes since a given transaction.
1666 pub fn group_until_transaction(&mut self, transaction_id: TransactionId) {
1667 self.text.group_until_transaction(transaction_id);
1668 }
1669
1670 /// Manually remove a transaction from the buffer's undo history
1671 pub fn forget_transaction(&mut self, transaction_id: TransactionId) {
1672 self.text.forget_transaction(transaction_id);
1673 }
1674
1675 /// Manually merge two adjacent transactions in the buffer's undo history.
1676 pub fn merge_transactions(&mut self, transaction: TransactionId, destination: TransactionId) {
1677 self.text.merge_transactions(transaction, destination);
1678 }
1679
1680 /// Waits for the buffer to receive operations with the given timestamps.
1681 pub fn wait_for_edits(
1682 &mut self,
1683 edit_ids: impl IntoIterator<Item = clock::Lamport>,
1684 ) -> impl Future<Output = Result<()>> {
1685 self.text.wait_for_edits(edit_ids)
1686 }
1687
1688 /// Waits for the buffer to receive the operations necessary for resolving the given anchors.
1689 pub fn wait_for_anchors(
1690 &mut self,
1691 anchors: impl IntoIterator<Item = Anchor>,
1692 ) -> impl 'static + Future<Output = Result<()>> {
1693 self.text.wait_for_anchors(anchors)
1694 }
1695
1696 /// Waits for the buffer to receive operations up to the given version.
1697 pub fn wait_for_version(&mut self, version: clock::Global) -> impl Future<Output = Result<()>> {
1698 self.text.wait_for_version(version)
1699 }
1700
1701 /// Forces all futures returned by [`Buffer::wait_for_version`], [`Buffer::wait_for_edits`], or
1702 /// [`Buffer::wait_for_version`] to resolve with an error.
1703 pub fn give_up_waiting(&mut self) {
1704 self.text.give_up_waiting();
1705 }
1706
1707 /// Stores a set of selections that should be broadcasted to all of the buffer's replicas.
1708 pub fn set_active_selections(
1709 &mut self,
1710 selections: Arc<[Selection<Anchor>]>,
1711 line_mode: bool,
1712 cursor_shape: CursorShape,
1713 cx: &mut ModelContext<Self>,
1714 ) {
1715 let lamport_timestamp = self.text.lamport_clock.tick();
1716 self.remote_selections.insert(
1717 self.text.replica_id(),
1718 SelectionSet {
1719 selections: selections.clone(),
1720 lamport_timestamp,
1721 line_mode,
1722 cursor_shape,
1723 },
1724 );
1725 self.send_operation(
1726 Operation::UpdateSelections {
1727 selections,
1728 line_mode,
1729 lamport_timestamp,
1730 cursor_shape,
1731 },
1732 cx,
1733 );
1734 }
1735
1736 /// Clears the selections, so that other replicas of the buffer do not see any selections for
1737 /// this replica.
1738 pub fn remove_active_selections(&mut self, cx: &mut ModelContext<Self>) {
1739 if self
1740 .remote_selections
1741 .get(&self.text.replica_id())
1742 .map_or(true, |set| !set.selections.is_empty())
1743 {
1744 self.set_active_selections(Arc::from([]), false, Default::default(), cx);
1745 }
1746 }
1747
1748 /// Replaces the buffer's entire text.
1749 pub fn set_text<T>(&mut self, text: T, cx: &mut ModelContext<Self>) -> Option<clock::Lamport>
1750 where
1751 T: Into<Arc<str>>,
1752 {
1753 self.autoindent_requests.clear();
1754 self.edit([(0..self.len(), text)], None, cx)
1755 }
1756
1757 /// Applies the given edits to the buffer. Each edit is specified as a range of text to
1758 /// delete, and a string of text to insert at that location.
1759 ///
1760 /// If an [`AutoindentMode`] is provided, then the buffer will enqueue an auto-indent
1761 /// request for the edited ranges, which will be processed when the buffer finishes
1762 /// parsing.
1763 ///
1764 /// Parsing takes place at the end of a transaction, and may compute synchronously
1765 /// or asynchronously, depending on the changes.
1766 pub fn edit<I, S, T>(
1767 &mut self,
1768 edits_iter: I,
1769 autoindent_mode: Option<AutoindentMode>,
1770 cx: &mut ModelContext<Self>,
1771 ) -> Option<clock::Lamport>
1772 where
1773 I: IntoIterator<Item = (Range<S>, T)>,
1774 S: ToOffset,
1775 T: Into<Arc<str>>,
1776 {
1777 // Skip invalid edits and coalesce contiguous ones.
1778 let mut edits: Vec<(Range<usize>, Arc<str>)> = Vec::new();
1779 for (range, new_text) in edits_iter {
1780 let mut range = range.start.to_offset(self)..range.end.to_offset(self);
1781 if range.start > range.end {
1782 mem::swap(&mut range.start, &mut range.end);
1783 }
1784 let new_text = new_text.into();
1785 if !new_text.is_empty() || !range.is_empty() {
1786 if let Some((prev_range, prev_text)) = edits.last_mut() {
1787 if prev_range.end >= range.start {
1788 prev_range.end = cmp::max(prev_range.end, range.end);
1789 *prev_text = format!("{prev_text}{new_text}").into();
1790 } else {
1791 edits.push((range, new_text));
1792 }
1793 } else {
1794 edits.push((range, new_text));
1795 }
1796 }
1797 }
1798 if edits.is_empty() {
1799 return None;
1800 }
1801
1802 self.start_transaction();
1803 self.pending_autoindent.take();
1804 let autoindent_request = autoindent_mode
1805 .and_then(|mode| self.language.as_ref().map(|_| (self.snapshot(), mode)));
1806
1807 let edit_operation = self.text.edit(edits.iter().cloned());
1808 let edit_id = edit_operation.timestamp();
1809
1810 if let Some((before_edit, mode)) = autoindent_request {
1811 let mut delta = 0isize;
1812 let entries = edits
1813 .into_iter()
1814 .enumerate()
1815 .zip(&edit_operation.as_edit().unwrap().new_text)
1816 .map(|((ix, (range, _)), new_text)| {
1817 let new_text_length = new_text.len();
1818 let old_start = range.start.to_point(&before_edit);
1819 let new_start = (delta + range.start as isize) as usize;
1820 delta += new_text_length as isize - (range.end as isize - range.start as isize);
1821
1822 let mut range_of_insertion_to_indent = 0..new_text_length;
1823 let mut first_line_is_new = false;
1824 let mut original_indent_column = None;
1825
1826 // When inserting an entire line at the beginning of an existing line,
1827 // treat the insertion as new.
1828 if new_text.contains('\n')
1829 && old_start.column <= before_edit.indent_size_for_line(old_start.row).len
1830 {
1831 first_line_is_new = true;
1832 }
1833
1834 // When inserting text starting with a newline, avoid auto-indenting the
1835 // previous line.
1836 if new_text.starts_with('\n') {
1837 range_of_insertion_to_indent.start += 1;
1838 first_line_is_new = true;
1839 }
1840
1841 // Avoid auto-indenting after the insertion.
1842 if let AutoindentMode::Block {
1843 original_indent_columns,
1844 } = &mode
1845 {
1846 original_indent_column =
1847 Some(original_indent_columns.get(ix).copied().unwrap_or_else(|| {
1848 indent_size_for_text(
1849 new_text[range_of_insertion_to_indent.clone()].chars(),
1850 )
1851 .len
1852 }));
1853 if new_text[range_of_insertion_to_indent.clone()].ends_with('\n') {
1854 range_of_insertion_to_indent.end -= 1;
1855 }
1856 }
1857
1858 AutoindentRequestEntry {
1859 first_line_is_new,
1860 original_indent_column,
1861 indent_size: before_edit.language_indent_size_at(range.start, cx),
1862 range: self.anchor_before(new_start + range_of_insertion_to_indent.start)
1863 ..self.anchor_after(new_start + range_of_insertion_to_indent.end),
1864 }
1865 })
1866 .collect();
1867
1868 self.autoindent_requests.push(Arc::new(AutoindentRequest {
1869 before_edit,
1870 entries,
1871 is_block_mode: matches!(mode, AutoindentMode::Block { .. }),
1872 }));
1873 }
1874
1875 self.end_transaction(cx);
1876 self.send_operation(Operation::Buffer(edit_operation), cx);
1877 Some(edit_id)
1878 }
1879
1880 fn did_edit(
1881 &mut self,
1882 old_version: &clock::Global,
1883 was_dirty: bool,
1884 cx: &mut ModelContext<Self>,
1885 ) {
1886 if self.edits_since::<usize>(old_version).next().is_none() {
1887 return;
1888 }
1889
1890 self.reparse(cx);
1891
1892 cx.emit(Event::Edited);
1893 if was_dirty != self.is_dirty() {
1894 cx.emit(Event::DirtyChanged);
1895 }
1896 cx.notify();
1897 }
1898
1899 /// Applies the given remote operations to the buffer.
1900 pub fn apply_ops<I: IntoIterator<Item = Operation>>(
1901 &mut self,
1902 ops: I,
1903 cx: &mut ModelContext<Self>,
1904 ) -> Result<()> {
1905 self.pending_autoindent.take();
1906 let was_dirty = self.is_dirty();
1907 let old_version = self.version.clone();
1908 let mut deferred_ops = Vec::new();
1909 let buffer_ops = ops
1910 .into_iter()
1911 .filter_map(|op| match op {
1912 Operation::Buffer(op) => Some(op),
1913 _ => {
1914 if self.can_apply_op(&op) {
1915 self.apply_op(op, cx);
1916 } else {
1917 deferred_ops.push(op);
1918 }
1919 None
1920 }
1921 })
1922 .collect::<Vec<_>>();
1923 self.text.apply_ops(buffer_ops)?;
1924 self.deferred_ops.insert(deferred_ops);
1925 self.flush_deferred_ops(cx);
1926 self.did_edit(&old_version, was_dirty, cx);
1927 // Notify independently of whether the buffer was edited as the operations could include a
1928 // selection update.
1929 cx.notify();
1930 Ok(())
1931 }
1932
1933 fn flush_deferred_ops(&mut self, cx: &mut ModelContext<Self>) {
1934 let mut deferred_ops = Vec::new();
1935 for op in self.deferred_ops.drain().iter().cloned() {
1936 if self.can_apply_op(&op) {
1937 self.apply_op(op, cx);
1938 } else {
1939 deferred_ops.push(op);
1940 }
1941 }
1942 self.deferred_ops.insert(deferred_ops);
1943 }
1944
1945 fn can_apply_op(&self, operation: &Operation) -> bool {
1946 match operation {
1947 Operation::Buffer(_) => {
1948 unreachable!("buffer operations should never be applied at this layer")
1949 }
1950 Operation::UpdateDiagnostics {
1951 diagnostics: diagnostic_set,
1952 ..
1953 } => diagnostic_set.iter().all(|diagnostic| {
1954 self.text.can_resolve(&diagnostic.range.start)
1955 && self.text.can_resolve(&diagnostic.range.end)
1956 }),
1957 Operation::UpdateSelections { selections, .. } => selections
1958 .iter()
1959 .all(|s| self.can_resolve(&s.start) && self.can_resolve(&s.end)),
1960 Operation::UpdateCompletionTriggers { .. } => true,
1961 }
1962 }
1963
1964 fn apply_op(&mut self, operation: Operation, cx: &mut ModelContext<Self>) {
1965 match operation {
1966 Operation::Buffer(_) => {
1967 unreachable!("buffer operations should never be applied at this layer")
1968 }
1969 Operation::UpdateDiagnostics {
1970 server_id,
1971 diagnostics: diagnostic_set,
1972 lamport_timestamp,
1973 } => {
1974 let snapshot = self.snapshot();
1975 self.apply_diagnostic_update(
1976 server_id,
1977 DiagnosticSet::from_sorted_entries(diagnostic_set.iter().cloned(), &snapshot),
1978 lamport_timestamp,
1979 cx,
1980 );
1981 }
1982 Operation::UpdateSelections {
1983 selections,
1984 lamport_timestamp,
1985 line_mode,
1986 cursor_shape,
1987 } => {
1988 if let Some(set) = self.remote_selections.get(&lamport_timestamp.replica_id) {
1989 if set.lamport_timestamp > lamport_timestamp {
1990 return;
1991 }
1992 }
1993
1994 self.remote_selections.insert(
1995 lamport_timestamp.replica_id,
1996 SelectionSet {
1997 selections,
1998 lamport_timestamp,
1999 line_mode,
2000 cursor_shape,
2001 },
2002 );
2003 self.text.lamport_clock.observe(lamport_timestamp);
2004 self.selections_update_count += 1;
2005 }
2006 Operation::UpdateCompletionTriggers {
2007 triggers,
2008 lamport_timestamp,
2009 } => {
2010 self.completion_triggers = triggers;
2011 self.text.lamport_clock.observe(lamport_timestamp);
2012 }
2013 }
2014 }
2015
2016 fn apply_diagnostic_update(
2017 &mut self,
2018 server_id: LanguageServerId,
2019 diagnostics: DiagnosticSet,
2020 lamport_timestamp: clock::Lamport,
2021 cx: &mut ModelContext<Self>,
2022 ) {
2023 if lamport_timestamp > self.diagnostics_timestamp {
2024 let ix = self.diagnostics.binary_search_by_key(&server_id, |e| e.0);
2025 if diagnostics.len() == 0 {
2026 if let Ok(ix) = ix {
2027 self.diagnostics.remove(ix);
2028 }
2029 } else {
2030 match ix {
2031 Err(ix) => self.diagnostics.insert(ix, (server_id, diagnostics)),
2032 Ok(ix) => self.diagnostics[ix].1 = diagnostics,
2033 };
2034 }
2035 self.diagnostics_timestamp = lamport_timestamp;
2036 self.diagnostics_update_count += 1;
2037 self.text.lamport_clock.observe(lamport_timestamp);
2038 cx.notify();
2039 cx.emit(Event::DiagnosticsUpdated);
2040 }
2041 }
2042
2043 fn send_operation(&mut self, operation: Operation, cx: &mut ModelContext<Self>) {
2044 cx.emit(Event::Operation(operation));
2045 }
2046
2047 /// Removes the selections for a given peer.
2048 pub fn remove_peer(&mut self, replica_id: ReplicaId, cx: &mut ModelContext<Self>) {
2049 self.remote_selections.remove(&replica_id);
2050 cx.notify();
2051 }
2052
2053 /// Undoes the most recent transaction.
2054 pub fn undo(&mut self, cx: &mut ModelContext<Self>) -> Option<TransactionId> {
2055 let was_dirty = self.is_dirty();
2056 let old_version = self.version.clone();
2057
2058 if let Some((transaction_id, operation)) = self.text.undo() {
2059 self.send_operation(Operation::Buffer(operation), cx);
2060 self.did_edit(&old_version, was_dirty, cx);
2061 Some(transaction_id)
2062 } else {
2063 None
2064 }
2065 }
2066
2067 /// Manually undoes a specific transaction in the buffer's undo history.
2068 pub fn undo_transaction(
2069 &mut self,
2070 transaction_id: TransactionId,
2071 cx: &mut ModelContext<Self>,
2072 ) -> bool {
2073 let was_dirty = self.is_dirty();
2074 let old_version = self.version.clone();
2075 if let Some(operation) = self.text.undo_transaction(transaction_id) {
2076 self.send_operation(Operation::Buffer(operation), cx);
2077 self.did_edit(&old_version, was_dirty, cx);
2078 true
2079 } else {
2080 false
2081 }
2082 }
2083
2084 /// Manually undoes all changes after a given transaction in the buffer's undo history.
2085 pub fn undo_to_transaction(
2086 &mut self,
2087 transaction_id: TransactionId,
2088 cx: &mut ModelContext<Self>,
2089 ) -> bool {
2090 let was_dirty = self.is_dirty();
2091 let old_version = self.version.clone();
2092
2093 let operations = self.text.undo_to_transaction(transaction_id);
2094 let undone = !operations.is_empty();
2095 for operation in operations {
2096 self.send_operation(Operation::Buffer(operation), cx);
2097 }
2098 if undone {
2099 self.did_edit(&old_version, was_dirty, cx)
2100 }
2101 undone
2102 }
2103
2104 /// Manually redoes a specific transaction in the buffer's redo history.
2105 pub fn redo(&mut self, cx: &mut ModelContext<Self>) -> Option<TransactionId> {
2106 let was_dirty = self.is_dirty();
2107 let old_version = self.version.clone();
2108
2109 if let Some((transaction_id, operation)) = self.text.redo() {
2110 self.send_operation(Operation::Buffer(operation), cx);
2111 self.did_edit(&old_version, was_dirty, cx);
2112 Some(transaction_id)
2113 } else {
2114 None
2115 }
2116 }
2117
2118 /// Manually undoes all changes until a given transaction in the buffer's redo history.
2119 pub fn redo_to_transaction(
2120 &mut self,
2121 transaction_id: TransactionId,
2122 cx: &mut ModelContext<Self>,
2123 ) -> bool {
2124 let was_dirty = self.is_dirty();
2125 let old_version = self.version.clone();
2126
2127 let operations = self.text.redo_to_transaction(transaction_id);
2128 let redone = !operations.is_empty();
2129 for operation in operations {
2130 self.send_operation(Operation::Buffer(operation), cx);
2131 }
2132 if redone {
2133 self.did_edit(&old_version, was_dirty, cx)
2134 }
2135 redone
2136 }
2137
2138 /// Override current completion triggers with the user-provided completion triggers.
2139 pub fn set_completion_triggers(&mut self, triggers: Vec<String>, cx: &mut ModelContext<Self>) {
2140 self.completion_triggers.clone_from(&triggers);
2141 self.completion_triggers_timestamp = self.text.lamport_clock.tick();
2142 self.send_operation(
2143 Operation::UpdateCompletionTriggers {
2144 triggers,
2145 lamport_timestamp: self.completion_triggers_timestamp,
2146 },
2147 cx,
2148 );
2149 cx.notify();
2150 }
2151
2152 /// Returns a list of strings which trigger a completion menu for this language.
2153 /// Usually this is driven by LSP server which returns a list of trigger characters for completions.
2154 pub fn completion_triggers(&self) -> &[String] {
2155 &self.completion_triggers
2156 }
2157}
2158
2159#[doc(hidden)]
2160#[cfg(any(test, feature = "test-support"))]
2161impl Buffer {
2162 pub fn edit_via_marked_text(
2163 &mut self,
2164 marked_string: &str,
2165 autoindent_mode: Option<AutoindentMode>,
2166 cx: &mut ModelContext<Self>,
2167 ) {
2168 let edits = self.edits_for_marked_text(marked_string);
2169 self.edit(edits, autoindent_mode, cx);
2170 }
2171
2172 pub fn set_group_interval(&mut self, group_interval: Duration) {
2173 self.text.set_group_interval(group_interval);
2174 }
2175
2176 pub fn randomly_edit<T>(
2177 &mut self,
2178 rng: &mut T,
2179 old_range_count: usize,
2180 cx: &mut ModelContext<Self>,
2181 ) where
2182 T: rand::Rng,
2183 {
2184 let mut edits: Vec<(Range<usize>, String)> = Vec::new();
2185 let mut last_end = None;
2186 for _ in 0..old_range_count {
2187 if last_end.map_or(false, |last_end| last_end >= self.len()) {
2188 break;
2189 }
2190
2191 let new_start = last_end.map_or(0, |last_end| last_end + 1);
2192 let mut range = self.random_byte_range(new_start, rng);
2193 if rng.gen_bool(0.2) {
2194 mem::swap(&mut range.start, &mut range.end);
2195 }
2196 last_end = Some(range.end);
2197
2198 let new_text_len = rng.gen_range(0..10);
2199 let new_text: String = RandomCharIter::new(&mut *rng).take(new_text_len).collect();
2200
2201 edits.push((range, new_text));
2202 }
2203 log::info!("mutating buffer {} with {:?}", self.replica_id(), edits);
2204 self.edit(edits, None, cx);
2205 }
2206
2207 pub fn randomly_undo_redo(&mut self, rng: &mut impl rand::Rng, cx: &mut ModelContext<Self>) {
2208 let was_dirty = self.is_dirty();
2209 let old_version = self.version.clone();
2210
2211 let ops = self.text.randomly_undo_redo(rng);
2212 if !ops.is_empty() {
2213 for op in ops {
2214 self.send_operation(Operation::Buffer(op), cx);
2215 self.did_edit(&old_version, was_dirty, cx);
2216 }
2217 }
2218 }
2219}
2220
2221impl EventEmitter<Event> for Buffer {}
2222
2223impl Deref for Buffer {
2224 type Target = TextBuffer;
2225
2226 fn deref(&self) -> &Self::Target {
2227 &self.text
2228 }
2229}
2230
2231impl BufferSnapshot {
2232 /// Returns [`IndentSize`] for a given line that respects user settings and /// language preferences.
2233 pub fn indent_size_for_line(&self, row: u32) -> IndentSize {
2234 indent_size_for_line(self, row)
2235 }
2236 /// Returns [`IndentSize`] for a given position that respects user settings
2237 /// and language preferences.
2238 pub fn language_indent_size_at<T: ToOffset>(&self, position: T, cx: &AppContext) -> IndentSize {
2239 let settings = language_settings(self.language_at(position), self.file(), cx);
2240 if settings.hard_tabs {
2241 IndentSize::tab()
2242 } else {
2243 IndentSize::spaces(settings.tab_size.get())
2244 }
2245 }
2246
2247 /// Retrieve the suggested indent size for all of the given rows. The unit of indentation
2248 /// is passed in as `single_indent_size`.
2249 pub fn suggested_indents(
2250 &self,
2251 rows: impl Iterator<Item = u32>,
2252 single_indent_size: IndentSize,
2253 ) -> BTreeMap<u32, IndentSize> {
2254 let mut result = BTreeMap::new();
2255
2256 for row_range in contiguous_ranges(rows, 10) {
2257 let suggestions = match self.suggest_autoindents(row_range.clone()) {
2258 Some(suggestions) => suggestions,
2259 _ => break,
2260 };
2261
2262 for (row, suggestion) in row_range.zip(suggestions) {
2263 let indent_size = if let Some(suggestion) = suggestion {
2264 result
2265 .get(&suggestion.basis_row)
2266 .copied()
2267 .unwrap_or_else(|| self.indent_size_for_line(suggestion.basis_row))
2268 .with_delta(suggestion.delta, single_indent_size)
2269 } else {
2270 self.indent_size_for_line(row)
2271 };
2272
2273 result.insert(row, indent_size);
2274 }
2275 }
2276
2277 result
2278 }
2279
2280 fn suggest_autoindents(
2281 &self,
2282 row_range: Range<u32>,
2283 ) -> Option<impl Iterator<Item = Option<IndentSuggestion>> + '_> {
2284 let config = &self.language.as_ref()?.config;
2285 let prev_non_blank_row = self.prev_non_blank_row(row_range.start);
2286
2287 // Find the suggested indentation ranges based on the syntax tree.
2288 let start = Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0);
2289 let end = Point::new(row_range.end, 0);
2290 let range = (start..end).to_offset(&self.text);
2291 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
2292 Some(&grammar.indents_config.as_ref()?.query)
2293 });
2294 let indent_configs = matches
2295 .grammars()
2296 .iter()
2297 .map(|grammar| grammar.indents_config.as_ref().unwrap())
2298 .collect::<Vec<_>>();
2299
2300 let mut indent_ranges = Vec::<Range<Point>>::new();
2301 let mut outdent_positions = Vec::<Point>::new();
2302 while let Some(mat) = matches.peek() {
2303 let mut start: Option<Point> = None;
2304 let mut end: Option<Point> = None;
2305
2306 let config = &indent_configs[mat.grammar_index];
2307 for capture in mat.captures {
2308 if capture.index == config.indent_capture_ix {
2309 start.get_or_insert(Point::from_ts_point(capture.node.start_position()));
2310 end.get_or_insert(Point::from_ts_point(capture.node.end_position()));
2311 } else if Some(capture.index) == config.start_capture_ix {
2312 start = Some(Point::from_ts_point(capture.node.end_position()));
2313 } else if Some(capture.index) == config.end_capture_ix {
2314 end = Some(Point::from_ts_point(capture.node.start_position()));
2315 } else if Some(capture.index) == config.outdent_capture_ix {
2316 outdent_positions.push(Point::from_ts_point(capture.node.start_position()));
2317 }
2318 }
2319
2320 matches.advance();
2321 if let Some((start, end)) = start.zip(end) {
2322 if start.row == end.row {
2323 continue;
2324 }
2325
2326 let range = start..end;
2327 match indent_ranges.binary_search_by_key(&range.start, |r| r.start) {
2328 Err(ix) => indent_ranges.insert(ix, range),
2329 Ok(ix) => {
2330 let prev_range = &mut indent_ranges[ix];
2331 prev_range.end = prev_range.end.max(range.end);
2332 }
2333 }
2334 }
2335 }
2336
2337 let mut error_ranges = Vec::<Range<Point>>::new();
2338 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
2339 Some(&grammar.error_query)
2340 });
2341 while let Some(mat) = matches.peek() {
2342 let node = mat.captures[0].node;
2343 let start = Point::from_ts_point(node.start_position());
2344 let end = Point::from_ts_point(node.end_position());
2345 let range = start..end;
2346 let ix = match error_ranges.binary_search_by_key(&range.start, |r| r.start) {
2347 Ok(ix) | Err(ix) => ix,
2348 };
2349 let mut end_ix = ix;
2350 while let Some(existing_range) = error_ranges.get(end_ix) {
2351 if existing_range.end < end {
2352 end_ix += 1;
2353 } else {
2354 break;
2355 }
2356 }
2357 error_ranges.splice(ix..end_ix, [range]);
2358 matches.advance();
2359 }
2360
2361 outdent_positions.sort();
2362 for outdent_position in outdent_positions {
2363 // find the innermost indent range containing this outdent_position
2364 // set its end to the outdent position
2365 if let Some(range_to_truncate) = indent_ranges
2366 .iter_mut()
2367 .filter(|indent_range| indent_range.contains(&outdent_position))
2368 .last()
2369 {
2370 range_to_truncate.end = outdent_position;
2371 }
2372 }
2373
2374 // Find the suggested indentation increases and decreased based on regexes.
2375 let mut indent_change_rows = Vec::<(u32, Ordering)>::new();
2376 self.for_each_line(
2377 Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0)
2378 ..Point::new(row_range.end, 0),
2379 |row, line| {
2380 if config
2381 .decrease_indent_pattern
2382 .as_ref()
2383 .map_or(false, |regex| regex.is_match(line))
2384 {
2385 indent_change_rows.push((row, Ordering::Less));
2386 }
2387 if config
2388 .increase_indent_pattern
2389 .as_ref()
2390 .map_or(false, |regex| regex.is_match(line))
2391 {
2392 indent_change_rows.push((row + 1, Ordering::Greater));
2393 }
2394 },
2395 );
2396
2397 let mut indent_changes = indent_change_rows.into_iter().peekable();
2398 let mut prev_row = if config.auto_indent_using_last_non_empty_line {
2399 prev_non_blank_row.unwrap_or(0)
2400 } else {
2401 row_range.start.saturating_sub(1)
2402 };
2403 let mut prev_row_start = Point::new(prev_row, self.indent_size_for_line(prev_row).len);
2404 Some(row_range.map(move |row| {
2405 let row_start = Point::new(row, self.indent_size_for_line(row).len);
2406
2407 let mut indent_from_prev_row = false;
2408 let mut outdent_from_prev_row = false;
2409 let mut outdent_to_row = u32::MAX;
2410
2411 while let Some((indent_row, delta)) = indent_changes.peek() {
2412 match indent_row.cmp(&row) {
2413 Ordering::Equal => match delta {
2414 Ordering::Less => outdent_from_prev_row = true,
2415 Ordering::Greater => indent_from_prev_row = true,
2416 _ => {}
2417 },
2418
2419 Ordering::Greater => break,
2420 Ordering::Less => {}
2421 }
2422
2423 indent_changes.next();
2424 }
2425
2426 for range in &indent_ranges {
2427 if range.start.row >= row {
2428 break;
2429 }
2430 if range.start.row == prev_row && range.end > row_start {
2431 indent_from_prev_row = true;
2432 }
2433 if range.end > prev_row_start && range.end <= row_start {
2434 outdent_to_row = outdent_to_row.min(range.start.row);
2435 }
2436 }
2437
2438 let within_error = error_ranges
2439 .iter()
2440 .any(|e| e.start.row < row && e.end > row_start);
2441
2442 let suggestion = if outdent_to_row == prev_row
2443 || (outdent_from_prev_row && indent_from_prev_row)
2444 {
2445 Some(IndentSuggestion {
2446 basis_row: prev_row,
2447 delta: Ordering::Equal,
2448 within_error,
2449 })
2450 } else if indent_from_prev_row {
2451 Some(IndentSuggestion {
2452 basis_row: prev_row,
2453 delta: Ordering::Greater,
2454 within_error,
2455 })
2456 } else if outdent_to_row < prev_row {
2457 Some(IndentSuggestion {
2458 basis_row: outdent_to_row,
2459 delta: Ordering::Equal,
2460 within_error,
2461 })
2462 } else if outdent_from_prev_row {
2463 Some(IndentSuggestion {
2464 basis_row: prev_row,
2465 delta: Ordering::Less,
2466 within_error,
2467 })
2468 } else if config.auto_indent_using_last_non_empty_line || !self.is_line_blank(prev_row)
2469 {
2470 Some(IndentSuggestion {
2471 basis_row: prev_row,
2472 delta: Ordering::Equal,
2473 within_error,
2474 })
2475 } else {
2476 None
2477 };
2478
2479 prev_row = row;
2480 prev_row_start = row_start;
2481 suggestion
2482 }))
2483 }
2484
2485 fn prev_non_blank_row(&self, mut row: u32) -> Option<u32> {
2486 while row > 0 {
2487 row -= 1;
2488 if !self.is_line_blank(row) {
2489 return Some(row);
2490 }
2491 }
2492 None
2493 }
2494
2495 /// Iterates over chunks of text in the given range of the buffer. Text is chunked
2496 /// in an arbitrary way due to being stored in a [`Rope`](text::Rope). The text is also
2497 /// returned in chunks where each chunk has a single syntax highlighting style and
2498 /// diagnostic status.
2499 pub fn chunks<T: ToOffset>(&self, range: Range<T>, language_aware: bool) -> BufferChunks {
2500 let range = range.start.to_offset(self)..range.end.to_offset(self);
2501
2502 let mut syntax = None;
2503 let mut diagnostic_endpoints = Vec::new();
2504 if language_aware {
2505 let captures = self.syntax.captures(range.clone(), &self.text, |grammar| {
2506 grammar.highlights_query.as_ref()
2507 });
2508 let highlight_maps = captures
2509 .grammars()
2510 .into_iter()
2511 .map(|grammar| grammar.highlight_map())
2512 .collect();
2513 syntax = Some((captures, highlight_maps));
2514 for entry in self.diagnostics_in_range::<_, usize>(range.clone(), false) {
2515 diagnostic_endpoints.push(DiagnosticEndpoint {
2516 offset: entry.range.start,
2517 is_start: true,
2518 severity: entry.diagnostic.severity,
2519 is_unnecessary: entry.diagnostic.is_unnecessary,
2520 });
2521 diagnostic_endpoints.push(DiagnosticEndpoint {
2522 offset: entry.range.end,
2523 is_start: false,
2524 severity: entry.diagnostic.severity,
2525 is_unnecessary: entry.diagnostic.is_unnecessary,
2526 });
2527 }
2528 diagnostic_endpoints
2529 .sort_unstable_by_key(|endpoint| (endpoint.offset, !endpoint.is_start));
2530 }
2531
2532 BufferChunks::new(self.text.as_rope(), range, syntax, diagnostic_endpoints)
2533 }
2534
2535 /// Invokes the given callback for each line of text in the given range of the buffer.
2536 /// Uses callback to avoid allocating a string for each line.
2537 fn for_each_line(&self, range: Range<Point>, mut callback: impl FnMut(u32, &str)) {
2538 let mut line = String::new();
2539 let mut row = range.start.row;
2540 for chunk in self
2541 .as_rope()
2542 .chunks_in_range(range.to_offset(self))
2543 .chain(["\n"])
2544 {
2545 for (newline_ix, text) in chunk.split('\n').enumerate() {
2546 if newline_ix > 0 {
2547 callback(row, &line);
2548 row += 1;
2549 line.clear();
2550 }
2551 line.push_str(text);
2552 }
2553 }
2554 }
2555
2556 /// Iterates over every [`SyntaxLayer`] in the buffer.
2557 pub fn syntax_layers(&self) -> impl Iterator<Item = SyntaxLayer> + '_ {
2558 self.syntax.layers_for_range(0..self.len(), &self.text)
2559 }
2560
2561 pub fn syntax_layer_at<D: ToOffset>(&self, position: D) -> Option<SyntaxLayer> {
2562 let offset = position.to_offset(self);
2563 self.syntax
2564 .layers_for_range(offset..offset, &self.text)
2565 .filter(|l| l.node().end_byte() > offset)
2566 .last()
2567 }
2568
2569 /// Returns the main [Language]
2570 pub fn language(&self) -> Option<&Arc<Language>> {
2571 self.language.as_ref()
2572 }
2573
2574 /// Returns the [Language] at the given location.
2575 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<&Arc<Language>> {
2576 self.syntax_layer_at(position)
2577 .map(|info| info.language)
2578 .or(self.language.as_ref())
2579 }
2580
2581 /// Returns the settings for the language at the given location.
2582 pub fn settings_at<'a, D: ToOffset>(
2583 &self,
2584 position: D,
2585 cx: &'a AppContext,
2586 ) -> &'a LanguageSettings {
2587 language_settings(self.language_at(position), self.file.as_ref(), cx)
2588 }
2589
2590 /// Returns the [LanguageScope] at the given location.
2591 pub fn language_scope_at<D: ToOffset>(&self, position: D) -> Option<LanguageScope> {
2592 let offset = position.to_offset(self);
2593 let mut scope = None;
2594 let mut smallest_range: Option<Range<usize>> = None;
2595
2596 // Use the layer that has the smallest node intersecting the given point.
2597 for layer in self.syntax.layers_for_range(offset..offset, &self.text) {
2598 let mut cursor = layer.node().walk();
2599
2600 let mut range = None;
2601 loop {
2602 let child_range = cursor.node().byte_range();
2603 if !child_range.to_inclusive().contains(&offset) {
2604 break;
2605 }
2606
2607 range = Some(child_range);
2608 if cursor.goto_first_child_for_byte(offset).is_none() {
2609 break;
2610 }
2611 }
2612
2613 if let Some(range) = range {
2614 if smallest_range
2615 .as_ref()
2616 .map_or(true, |smallest_range| range.len() < smallest_range.len())
2617 {
2618 smallest_range = Some(range);
2619 scope = Some(LanguageScope {
2620 language: layer.language.clone(),
2621 override_id: layer.override_id(offset, &self.text),
2622 });
2623 }
2624 }
2625 }
2626
2627 scope.or_else(|| {
2628 self.language.clone().map(|language| LanguageScope {
2629 language,
2630 override_id: None,
2631 })
2632 })
2633 }
2634
2635 /// Returns a tuple of the range and character kind of the word
2636 /// surrounding the given position.
2637 pub fn surrounding_word<T: ToOffset>(&self, start: T) -> (Range<usize>, Option<CharKind>) {
2638 let mut start = start.to_offset(self);
2639 let mut end = start;
2640 let mut next_chars = self.chars_at(start).peekable();
2641 let mut prev_chars = self.reversed_chars_at(start).peekable();
2642
2643 let scope = self.language_scope_at(start);
2644 let kind = |c| char_kind(&scope, c);
2645 let word_kind = cmp::max(
2646 prev_chars.peek().copied().map(kind),
2647 next_chars.peek().copied().map(kind),
2648 );
2649
2650 for ch in prev_chars {
2651 if Some(kind(ch)) == word_kind && ch != '\n' {
2652 start -= ch.len_utf8();
2653 } else {
2654 break;
2655 }
2656 }
2657
2658 for ch in next_chars {
2659 if Some(kind(ch)) == word_kind && ch != '\n' {
2660 end += ch.len_utf8();
2661 } else {
2662 break;
2663 }
2664 }
2665
2666 (start..end, word_kind)
2667 }
2668
2669 /// Returns the range for the closes syntax node enclosing the given range.
2670 pub fn range_for_syntax_ancestor<T: ToOffset>(&self, range: Range<T>) -> Option<Range<usize>> {
2671 let range = range.start.to_offset(self)..range.end.to_offset(self);
2672 let mut result: Option<Range<usize>> = None;
2673 'outer: for layer in self.syntax.layers_for_range(range.clone(), &self.text) {
2674 let mut cursor = layer.node().walk();
2675
2676 // Descend to the first leaf that touches the start of the range,
2677 // and if the range is non-empty, extends beyond the start.
2678 while cursor.goto_first_child_for_byte(range.start).is_some() {
2679 if !range.is_empty() && cursor.node().end_byte() == range.start {
2680 cursor.goto_next_sibling();
2681 }
2682 }
2683
2684 // Ascend to the smallest ancestor that strictly contains the range.
2685 loop {
2686 let node_range = cursor.node().byte_range();
2687 if node_range.start <= range.start
2688 && node_range.end >= range.end
2689 && node_range.len() > range.len()
2690 {
2691 break;
2692 }
2693 if !cursor.goto_parent() {
2694 continue 'outer;
2695 }
2696 }
2697
2698 let left_node = cursor.node();
2699 let mut layer_result = left_node.byte_range();
2700
2701 // For an empty range, try to find another node immediately to the right of the range.
2702 if left_node.end_byte() == range.start {
2703 let mut right_node = None;
2704 while !cursor.goto_next_sibling() {
2705 if !cursor.goto_parent() {
2706 break;
2707 }
2708 }
2709
2710 while cursor.node().start_byte() == range.start {
2711 right_node = Some(cursor.node());
2712 if !cursor.goto_first_child() {
2713 break;
2714 }
2715 }
2716
2717 // If there is a candidate node on both sides of the (empty) range, then
2718 // decide between the two by favoring a named node over an anonymous token.
2719 // If both nodes are the same in that regard, favor the right one.
2720 if let Some(right_node) = right_node {
2721 if right_node.is_named() || !left_node.is_named() {
2722 layer_result = right_node.byte_range();
2723 }
2724 }
2725 }
2726
2727 if let Some(previous_result) = &result {
2728 if previous_result.len() < layer_result.len() {
2729 continue;
2730 }
2731 }
2732 result = Some(layer_result);
2733 }
2734
2735 result
2736 }
2737
2738 /// Returns the outline for the buffer.
2739 ///
2740 /// This method allows passing an optional [SyntaxTheme] to
2741 /// syntax-highlight the returned symbols.
2742 pub fn outline(&self, theme: Option<&SyntaxTheme>) -> Option<Outline<Anchor>> {
2743 self.outline_items_containing(0..self.len(), true, theme)
2744 .map(Outline::new)
2745 }
2746
2747 /// Returns all the symbols that contain the given position.
2748 ///
2749 /// This method allows passing an optional [SyntaxTheme] to
2750 /// syntax-highlight the returned symbols.
2751 pub fn symbols_containing<T: ToOffset>(
2752 &self,
2753 position: T,
2754 theme: Option<&SyntaxTheme>,
2755 ) -> Option<Vec<OutlineItem<Anchor>>> {
2756 let position = position.to_offset(self);
2757 let mut items = self.outline_items_containing(
2758 position.saturating_sub(1)..self.len().min(position + 1),
2759 false,
2760 theme,
2761 )?;
2762 let mut prev_depth = None;
2763 items.retain(|item| {
2764 let result = prev_depth.map_or(true, |prev_depth| item.depth > prev_depth);
2765 prev_depth = Some(item.depth);
2766 result
2767 });
2768 Some(items)
2769 }
2770
2771 fn outline_items_containing(
2772 &self,
2773 range: Range<usize>,
2774 include_extra_context: bool,
2775 theme: Option<&SyntaxTheme>,
2776 ) -> Option<Vec<OutlineItem<Anchor>>> {
2777 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
2778 grammar.outline_config.as_ref().map(|c| &c.query)
2779 });
2780 let configs = matches
2781 .grammars()
2782 .iter()
2783 .map(|g| g.outline_config.as_ref().unwrap())
2784 .collect::<Vec<_>>();
2785
2786 let mut stack = Vec::<Range<usize>>::new();
2787 let mut items = Vec::new();
2788 while let Some(mat) = matches.peek() {
2789 let config = &configs[mat.grammar_index];
2790 let item_node = mat.captures.iter().find_map(|cap| {
2791 if cap.index == config.item_capture_ix {
2792 Some(cap.node)
2793 } else {
2794 None
2795 }
2796 })?;
2797
2798 let item_range = item_node.byte_range();
2799 if item_range.end < range.start || item_range.start > range.end {
2800 matches.advance();
2801 continue;
2802 }
2803
2804 let mut buffer_ranges = Vec::new();
2805 for capture in mat.captures {
2806 let node_is_name;
2807 if capture.index == config.name_capture_ix {
2808 node_is_name = true;
2809 } else if Some(capture.index) == config.context_capture_ix
2810 || (Some(capture.index) == config.extra_context_capture_ix
2811 && include_extra_context)
2812 {
2813 node_is_name = false;
2814 } else {
2815 continue;
2816 }
2817
2818 let mut range = capture.node.start_byte()..capture.node.end_byte();
2819 let start = capture.node.start_position();
2820 if capture.node.end_position().row > start.row {
2821 range.end =
2822 range.start + self.line_len(start.row as u32) as usize - start.column;
2823 }
2824
2825 if !range.is_empty() {
2826 buffer_ranges.push((range, node_is_name));
2827 }
2828 }
2829
2830 if buffer_ranges.is_empty() {
2831 matches.advance();
2832 continue;
2833 }
2834
2835 let mut text = String::new();
2836 let mut highlight_ranges = Vec::new();
2837 let mut name_ranges = Vec::new();
2838 let mut chunks = self.chunks(
2839 buffer_ranges.first().unwrap().0.start..buffer_ranges.last().unwrap().0.end,
2840 true,
2841 );
2842 let mut last_buffer_range_end = 0;
2843 for (buffer_range, is_name) in buffer_ranges {
2844 if !text.is_empty() && buffer_range.start > last_buffer_range_end {
2845 text.push(' ');
2846 }
2847 last_buffer_range_end = buffer_range.end;
2848 if is_name {
2849 let mut start = text.len();
2850 let end = start + buffer_range.len();
2851
2852 // When multiple names are captured, then the matcheable text
2853 // includes the whitespace in between the names.
2854 if !name_ranges.is_empty() {
2855 start -= 1;
2856 }
2857
2858 name_ranges.push(start..end);
2859 }
2860
2861 let mut offset = buffer_range.start;
2862 chunks.seek(offset);
2863 for mut chunk in chunks.by_ref() {
2864 if chunk.text.len() > buffer_range.end - offset {
2865 chunk.text = &chunk.text[0..(buffer_range.end - offset)];
2866 offset = buffer_range.end;
2867 } else {
2868 offset += chunk.text.len();
2869 }
2870 let style = chunk
2871 .syntax_highlight_id
2872 .zip(theme)
2873 .and_then(|(highlight, theme)| highlight.style(theme));
2874 if let Some(style) = style {
2875 let start = text.len();
2876 let end = start + chunk.text.len();
2877 highlight_ranges.push((start..end, style));
2878 }
2879 text.push_str(chunk.text);
2880 if offset >= buffer_range.end {
2881 break;
2882 }
2883 }
2884 }
2885
2886 matches.advance();
2887 while stack.last().map_or(false, |prev_range| {
2888 prev_range.start > item_range.start || prev_range.end < item_range.end
2889 }) {
2890 stack.pop();
2891 }
2892 stack.push(item_range.clone());
2893
2894 items.push(OutlineItem {
2895 depth: stack.len() - 1,
2896 range: self.anchor_after(item_range.start)..self.anchor_before(item_range.end),
2897 text,
2898 highlight_ranges,
2899 name_ranges,
2900 })
2901 }
2902 Some(items)
2903 }
2904
2905 /// For each grammar in the language, runs the provided
2906 /// [tree_sitter::Query] against the given range.
2907 pub fn matches(
2908 &self,
2909 range: Range<usize>,
2910 query: fn(&Grammar) -> Option<&tree_sitter::Query>,
2911 ) -> SyntaxMapMatches {
2912 self.syntax.matches(range, self, query)
2913 }
2914
2915 /// Returns bracket range pairs overlapping or adjacent to `range`
2916 pub fn bracket_ranges<T: ToOffset>(
2917 &self,
2918 range: Range<T>,
2919 ) -> impl Iterator<Item = (Range<usize>, Range<usize>)> + '_ {
2920 // Find bracket pairs that *inclusively* contain the given range.
2921 let range = range.start.to_offset(self).saturating_sub(1)
2922 ..self.len().min(range.end.to_offset(self) + 1);
2923
2924 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
2925 grammar.brackets_config.as_ref().map(|c| &c.query)
2926 });
2927 let configs = matches
2928 .grammars()
2929 .iter()
2930 .map(|grammar| grammar.brackets_config.as_ref().unwrap())
2931 .collect::<Vec<_>>();
2932
2933 iter::from_fn(move || {
2934 while let Some(mat) = matches.peek() {
2935 let mut open = None;
2936 let mut close = None;
2937 let config = &configs[mat.grammar_index];
2938 for capture in mat.captures {
2939 if capture.index == config.open_capture_ix {
2940 open = Some(capture.node.byte_range());
2941 } else if capture.index == config.close_capture_ix {
2942 close = Some(capture.node.byte_range());
2943 }
2944 }
2945
2946 matches.advance();
2947
2948 let Some((open, close)) = open.zip(close) else {
2949 continue;
2950 };
2951
2952 let bracket_range = open.start..=close.end;
2953 if !bracket_range.overlaps(&range) {
2954 continue;
2955 }
2956
2957 return Some((open, close));
2958 }
2959 None
2960 })
2961 }
2962
2963 /// Returns enclosing bracket ranges containing the given range
2964 pub fn enclosing_bracket_ranges<T: ToOffset>(
2965 &self,
2966 range: Range<T>,
2967 ) -> impl Iterator<Item = (Range<usize>, Range<usize>)> + '_ {
2968 let range = range.start.to_offset(self)..range.end.to_offset(self);
2969
2970 self.bracket_ranges(range.clone())
2971 .filter(move |(open, close)| open.start <= range.start && close.end >= range.end)
2972 }
2973
2974 /// Returns the smallest enclosing bracket ranges containing the given range or None if no brackets contain range
2975 ///
2976 /// Can optionally pass a range_filter to filter the ranges of brackets to consider
2977 pub fn innermost_enclosing_bracket_ranges<T: ToOffset>(
2978 &self,
2979 range: Range<T>,
2980 range_filter: Option<&dyn Fn(Range<usize>, Range<usize>) -> bool>,
2981 ) -> Option<(Range<usize>, Range<usize>)> {
2982 let range = range.start.to_offset(self)..range.end.to_offset(self);
2983
2984 // Get the ranges of the innermost pair of brackets.
2985 let mut result: Option<(Range<usize>, Range<usize>)> = None;
2986
2987 for (open, close) in self.enclosing_bracket_ranges(range.clone()) {
2988 if let Some(range_filter) = range_filter {
2989 if !range_filter(open.clone(), close.clone()) {
2990 continue;
2991 }
2992 }
2993
2994 let len = close.end - open.start;
2995
2996 if let Some((existing_open, existing_close)) = &result {
2997 let existing_len = existing_close.end - existing_open.start;
2998 if len > existing_len {
2999 continue;
3000 }
3001 }
3002
3003 result = Some((open, close));
3004 }
3005
3006 result
3007 }
3008
3009 /// Returns anchor ranges for any matches of the redaction query.
3010 /// The buffer can be associated with multiple languages, and the redaction query associated with each
3011 /// will be run on the relevant section of the buffer.
3012 pub fn redacted_ranges<T: ToOffset>(
3013 &self,
3014 range: Range<T>,
3015 ) -> impl Iterator<Item = Range<usize>> + '_ {
3016 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
3017 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
3018 grammar
3019 .redactions_config
3020 .as_ref()
3021 .map(|config| &config.query)
3022 });
3023
3024 let configs = syntax_matches
3025 .grammars()
3026 .iter()
3027 .map(|grammar| grammar.redactions_config.as_ref())
3028 .collect::<Vec<_>>();
3029
3030 iter::from_fn(move || {
3031 let redacted_range = syntax_matches
3032 .peek()
3033 .and_then(|mat| {
3034 configs[mat.grammar_index].and_then(|config| {
3035 mat.captures
3036 .iter()
3037 .find(|capture| capture.index == config.redaction_capture_ix)
3038 })
3039 })
3040 .map(|mat| mat.node.byte_range());
3041 syntax_matches.advance();
3042 redacted_range
3043 })
3044 }
3045
3046 pub fn runnable_ranges(
3047 &self,
3048 range: Range<Anchor>,
3049 ) -> impl Iterator<Item = RunnableRange> + '_ {
3050 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
3051
3052 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
3053 grammar.runnable_config.as_ref().map(|config| &config.query)
3054 });
3055
3056 let test_configs = syntax_matches
3057 .grammars()
3058 .iter()
3059 .map(|grammar| grammar.runnable_config.as_ref())
3060 .collect::<Vec<_>>();
3061
3062 iter::from_fn(move || loop {
3063 let mat = syntax_matches.peek()?;
3064
3065 let test_range = test_configs[mat.grammar_index].and_then(|test_configs| {
3066 let mut run_range = None;
3067 let full_range = mat.captures.iter().fold(
3068 Range {
3069 start: usize::MAX,
3070 end: 0,
3071 },
3072 |mut acc, next| {
3073 let byte_range = next.node.byte_range();
3074 if acc.start > byte_range.start {
3075 acc.start = byte_range.start;
3076 }
3077 if acc.end < byte_range.end {
3078 acc.end = byte_range.end;
3079 }
3080 acc
3081 },
3082 );
3083 if full_range.start > full_range.end {
3084 // We did not find a full spanning range of this match.
3085 return None;
3086 }
3087 let extra_captures: SmallVec<[_; 1]> =
3088 SmallVec::from_iter(mat.captures.iter().filter_map(|capture| {
3089 test_configs
3090 .extra_captures
3091 .get(capture.index as usize)
3092 .cloned()
3093 .and_then(|tag_name| match tag_name {
3094 RunnableCapture::Named(name) => {
3095 Some((capture.node.byte_range(), name))
3096 }
3097 RunnableCapture::Run => {
3098 let _ = run_range.insert(capture.node.byte_range());
3099 None
3100 }
3101 })
3102 }));
3103 let run_range = run_range?;
3104 let tags = test_configs
3105 .query
3106 .property_settings(mat.pattern_index)
3107 .iter()
3108 .filter_map(|property| {
3109 if *property.key == *"tag" {
3110 property
3111 .value
3112 .as_ref()
3113 .map(|value| RunnableTag(value.to_string().into()))
3114 } else {
3115 None
3116 }
3117 })
3118 .collect();
3119 let extra_captures = extra_captures
3120 .into_iter()
3121 .map(|(range, name)| {
3122 (
3123 name.to_string(),
3124 self.text_for_range(range.clone()).collect::<String>(),
3125 )
3126 })
3127 .collect();
3128 // All tags should have the same range.
3129 Some(RunnableRange {
3130 run_range,
3131 full_range,
3132 runnable: Runnable {
3133 tags,
3134 language: mat.language,
3135 buffer: self.remote_id(),
3136 },
3137 extra_captures,
3138 buffer_id: self.remote_id(),
3139 })
3140 });
3141
3142 syntax_matches.advance();
3143 if test_range.is_some() {
3144 // It's fine for us to short-circuit on .peek()? returning None. We don't want to return None from this iter if we
3145 // had a capture that did not contain a run marker, hence we'll just loop around for the next capture.
3146 return test_range;
3147 }
3148 })
3149 }
3150
3151 pub fn indent_guides_in_range(
3152 &self,
3153 range: Range<Anchor>,
3154 cx: &AppContext,
3155 ) -> Vec<IndentGuide> {
3156 fn tab_size_for_row(this: &BufferSnapshot, row: BufferRow, cx: &AppContext) -> u32 {
3157 let language = this.language_at(Point::new(row, 0));
3158 language_settings(language, None, cx).tab_size.get() as u32
3159 }
3160
3161 let start_row = range.start.to_point(self).row;
3162 let end_row = range.end.to_point(self).row;
3163 let row_range = start_row..end_row + 1;
3164
3165 let mut row_indents = self.line_indents_in_row_range(row_range.clone());
3166
3167 let mut result_vec = Vec::new();
3168 let mut indent_stack = SmallVec::<[IndentGuide; 8]>::new();
3169
3170 // TODO: This should be calculated for every row but it is pretty expensive
3171 let tab_size = tab_size_for_row(self, start_row, cx);
3172
3173 while let Some((first_row, mut line_indent)) = row_indents.next() {
3174 let current_depth = indent_stack.len() as u32;
3175
3176 // When encountering empty, continue until found useful line indent
3177 // then add to the indent stack with the depth found
3178 let mut found_indent = false;
3179 let mut last_row = first_row;
3180 if line_indent.is_line_empty() {
3181 let mut trailing_row = end_row;
3182 while !found_indent {
3183 let (target_row, new_line_indent) =
3184 if let Some(display_row) = row_indents.next() {
3185 display_row
3186 } else {
3187 // This means we reached the end of the given range and found empty lines at the end.
3188 // We need to traverse further until we find a non-empty line to know if we need to add
3189 // an indent guide for the last visible indent.
3190 trailing_row += 1;
3191
3192 const TRAILING_ROW_SEARCH_LIMIT: u32 = 25;
3193 if trailing_row > self.max_point().row
3194 || trailing_row > end_row + TRAILING_ROW_SEARCH_LIMIT
3195 {
3196 break;
3197 }
3198 let new_line_indent = self.line_indent_for_row(trailing_row);
3199 (trailing_row, new_line_indent)
3200 };
3201
3202 if new_line_indent.is_line_empty() {
3203 continue;
3204 }
3205 last_row = target_row.min(end_row);
3206 line_indent = new_line_indent;
3207 found_indent = true;
3208 break;
3209 }
3210 } else {
3211 found_indent = true
3212 }
3213
3214 let depth = if found_indent {
3215 line_indent.len(tab_size) / tab_size
3216 + ((line_indent.len(tab_size) % tab_size) > 0) as u32
3217 } else {
3218 current_depth
3219 };
3220
3221 if depth < current_depth {
3222 for _ in 0..(current_depth - depth) {
3223 let mut indent = indent_stack.pop().unwrap();
3224 if last_row != first_row {
3225 // In this case, we landed on an empty row, had to seek forward,
3226 // and discovered that the indent we where on is ending.
3227 // This means that the last display row must
3228 // be on line that ends this indent range, so we
3229 // should display the range up to the first non-empty line
3230 indent.end_row = first_row.saturating_sub(1);
3231 }
3232
3233 result_vec.push(indent)
3234 }
3235 } else if depth > current_depth {
3236 for next_depth in current_depth..depth {
3237 indent_stack.push(IndentGuide {
3238 buffer_id: self.remote_id(),
3239 start_row: first_row,
3240 end_row: last_row,
3241 depth: next_depth,
3242 tab_size,
3243 });
3244 }
3245 }
3246
3247 for indent in indent_stack.iter_mut() {
3248 indent.end_row = last_row;
3249 }
3250 }
3251
3252 result_vec.extend(indent_stack);
3253
3254 result_vec
3255 }
3256
3257 pub async fn enclosing_indent(
3258 &self,
3259 mut buffer_row: BufferRow,
3260 ) -> Option<(Range<BufferRow>, LineIndent)> {
3261 let max_row = self.max_point().row;
3262 if buffer_row >= max_row {
3263 return None;
3264 }
3265
3266 let mut target_indent = self.line_indent_for_row(buffer_row);
3267
3268 // If the current row is at the start of an indented block, we want to return this
3269 // block as the enclosing indent.
3270 if !target_indent.is_line_empty() && buffer_row < max_row {
3271 let next_line_indent = self.line_indent_for_row(buffer_row + 1);
3272 if !next_line_indent.is_line_empty()
3273 && target_indent.raw_len() < next_line_indent.raw_len()
3274 {
3275 target_indent = next_line_indent;
3276 buffer_row += 1;
3277 }
3278 }
3279
3280 const SEARCH_ROW_LIMIT: u32 = 25000;
3281 const SEARCH_WHITESPACE_ROW_LIMIT: u32 = 2500;
3282 const YIELD_INTERVAL: u32 = 100;
3283
3284 let mut accessed_row_counter = 0;
3285
3286 // If there is a blank line at the current row, search for the next non indented lines
3287 if target_indent.is_line_empty() {
3288 let start = buffer_row.saturating_sub(SEARCH_WHITESPACE_ROW_LIMIT);
3289 let end = (max_row + 1).min(buffer_row + SEARCH_WHITESPACE_ROW_LIMIT);
3290
3291 let mut non_empty_line_above = None;
3292 for (row, indent) in self
3293 .text
3294 .reversed_line_indents_in_row_range(start..buffer_row)
3295 {
3296 accessed_row_counter += 1;
3297 if accessed_row_counter == YIELD_INTERVAL {
3298 accessed_row_counter = 0;
3299 yield_now().await;
3300 }
3301 if !indent.is_line_empty() {
3302 non_empty_line_above = Some((row, indent));
3303 break;
3304 }
3305 }
3306
3307 let mut non_empty_line_below = None;
3308 for (row, indent) in self.text.line_indents_in_row_range((buffer_row + 1)..end) {
3309 accessed_row_counter += 1;
3310 if accessed_row_counter == YIELD_INTERVAL {
3311 accessed_row_counter = 0;
3312 yield_now().await;
3313 }
3314 if !indent.is_line_empty() {
3315 non_empty_line_below = Some((row, indent));
3316 break;
3317 }
3318 }
3319
3320 let (row, indent) = match (non_empty_line_above, non_empty_line_below) {
3321 (Some((above_row, above_indent)), Some((below_row, below_indent))) => {
3322 if above_indent.raw_len() >= below_indent.raw_len() {
3323 (above_row, above_indent)
3324 } else {
3325 (below_row, below_indent)
3326 }
3327 }
3328 (Some(above), None) => above,
3329 (None, Some(below)) => below,
3330 _ => return None,
3331 };
3332
3333 target_indent = indent;
3334 buffer_row = row;
3335 }
3336
3337 let start = buffer_row.saturating_sub(SEARCH_ROW_LIMIT);
3338 let end = (max_row + 1).min(buffer_row + SEARCH_ROW_LIMIT);
3339
3340 let mut start_indent = None;
3341 for (row, indent) in self
3342 .text
3343 .reversed_line_indents_in_row_range(start..buffer_row)
3344 {
3345 accessed_row_counter += 1;
3346 if accessed_row_counter == YIELD_INTERVAL {
3347 accessed_row_counter = 0;
3348 yield_now().await;
3349 }
3350 if !indent.is_line_empty() && indent.raw_len() < target_indent.raw_len() {
3351 start_indent = Some((row, indent));
3352 break;
3353 }
3354 }
3355 let (start_row, start_indent_size) = start_indent?;
3356
3357 let mut end_indent = (end, None);
3358 for (row, indent) in self.text.line_indents_in_row_range((buffer_row + 1)..end) {
3359 accessed_row_counter += 1;
3360 if accessed_row_counter == YIELD_INTERVAL {
3361 accessed_row_counter = 0;
3362 yield_now().await;
3363 }
3364 if !indent.is_line_empty() && indent.raw_len() < target_indent.raw_len() {
3365 end_indent = (row.saturating_sub(1), Some(indent));
3366 break;
3367 }
3368 }
3369 let (end_row, end_indent_size) = end_indent;
3370
3371 let indent = if let Some(end_indent_size) = end_indent_size {
3372 if start_indent_size.raw_len() > end_indent_size.raw_len() {
3373 start_indent_size
3374 } else {
3375 end_indent_size
3376 }
3377 } else {
3378 start_indent_size
3379 };
3380
3381 Some((start_row..end_row, indent))
3382 }
3383
3384 /// Returns selections for remote peers intersecting the given range.
3385 #[allow(clippy::type_complexity)]
3386 pub fn remote_selections_in_range(
3387 &self,
3388 range: Range<Anchor>,
3389 ) -> impl Iterator<
3390 Item = (
3391 ReplicaId,
3392 bool,
3393 CursorShape,
3394 impl Iterator<Item = &Selection<Anchor>> + '_,
3395 ),
3396 > + '_ {
3397 self.remote_selections
3398 .iter()
3399 .filter(|(replica_id, set)| {
3400 **replica_id != self.text.replica_id() && !set.selections.is_empty()
3401 })
3402 .map(move |(replica_id, set)| {
3403 let start_ix = match set.selections.binary_search_by(|probe| {
3404 probe.end.cmp(&range.start, self).then(Ordering::Greater)
3405 }) {
3406 Ok(ix) | Err(ix) => ix,
3407 };
3408 let end_ix = match set.selections.binary_search_by(|probe| {
3409 probe.start.cmp(&range.end, self).then(Ordering::Less)
3410 }) {
3411 Ok(ix) | Err(ix) => ix,
3412 };
3413
3414 (
3415 *replica_id,
3416 set.line_mode,
3417 set.cursor_shape,
3418 set.selections[start_ix..end_ix].iter(),
3419 )
3420 })
3421 }
3422
3423 /// Whether the buffer contains any git changes.
3424 pub fn has_git_diff(&self) -> bool {
3425 !self.git_diff.is_empty()
3426 }
3427
3428 /// Returns all the Git diff hunks intersecting the given
3429 /// row range.
3430 pub fn git_diff_hunks_in_row_range(
3431 &self,
3432 range: Range<BufferRow>,
3433 ) -> impl '_ + Iterator<Item = git::diff::DiffHunk<u32>> {
3434 self.git_diff.hunks_in_row_range(range, self)
3435 }
3436
3437 /// Returns all the Git diff hunks intersecting the given
3438 /// range.
3439 pub fn git_diff_hunks_intersecting_range(
3440 &self,
3441 range: Range<Anchor>,
3442 ) -> impl '_ + Iterator<Item = git::diff::DiffHunk<u32>> {
3443 self.git_diff.hunks_intersecting_range(range, self)
3444 }
3445
3446 /// Returns all the Git diff hunks intersecting the given
3447 /// range, in reverse order.
3448 pub fn git_diff_hunks_intersecting_range_rev(
3449 &self,
3450 range: Range<Anchor>,
3451 ) -> impl '_ + Iterator<Item = git::diff::DiffHunk<u32>> {
3452 self.git_diff.hunks_intersecting_range_rev(range, self)
3453 }
3454
3455 /// Returns if the buffer contains any diagnostics.
3456 pub fn has_diagnostics(&self) -> bool {
3457 !self.diagnostics.is_empty()
3458 }
3459
3460 /// Returns all the diagnostics intersecting the given range.
3461 pub fn diagnostics_in_range<'a, T, O>(
3462 &'a self,
3463 search_range: Range<T>,
3464 reversed: bool,
3465 ) -> impl 'a + Iterator<Item = DiagnosticEntry<O>>
3466 where
3467 T: 'a + Clone + ToOffset,
3468 O: 'a + FromAnchor + Ord,
3469 {
3470 let mut iterators: Vec<_> = self
3471 .diagnostics
3472 .iter()
3473 .map(|(_, collection)| {
3474 collection
3475 .range::<T, O>(search_range.clone(), self, true, reversed)
3476 .peekable()
3477 })
3478 .collect();
3479
3480 std::iter::from_fn(move || {
3481 let (next_ix, _) = iterators
3482 .iter_mut()
3483 .enumerate()
3484 .flat_map(|(ix, iter)| Some((ix, iter.peek()?)))
3485 .min_by(|(_, a), (_, b)| {
3486 let cmp = a
3487 .range
3488 .start
3489 .cmp(&b.range.start)
3490 // when range is equal, sort by diagnostic severity
3491 .then(a.diagnostic.severity.cmp(&b.diagnostic.severity))
3492 // and stabilize order with group_id
3493 .then(a.diagnostic.group_id.cmp(&b.diagnostic.group_id));
3494 if reversed {
3495 cmp.reverse()
3496 } else {
3497 cmp
3498 }
3499 })?;
3500 iterators[next_ix].next()
3501 })
3502 }
3503
3504 /// Returns all the diagnostic groups associated with the given
3505 /// language server id. If no language server id is provided,
3506 /// all diagnostics groups are returned.
3507 pub fn diagnostic_groups(
3508 &self,
3509 language_server_id: Option<LanguageServerId>,
3510 ) -> Vec<(LanguageServerId, DiagnosticGroup<Anchor>)> {
3511 let mut groups = Vec::new();
3512
3513 if let Some(language_server_id) = language_server_id {
3514 if let Ok(ix) = self
3515 .diagnostics
3516 .binary_search_by_key(&language_server_id, |e| e.0)
3517 {
3518 self.diagnostics[ix]
3519 .1
3520 .groups(language_server_id, &mut groups, self);
3521 }
3522 } else {
3523 for (language_server_id, diagnostics) in self.diagnostics.iter() {
3524 diagnostics.groups(*language_server_id, &mut groups, self);
3525 }
3526 }
3527
3528 groups.sort_by(|(id_a, group_a), (id_b, group_b)| {
3529 let a_start = &group_a.entries[group_a.primary_ix].range.start;
3530 let b_start = &group_b.entries[group_b.primary_ix].range.start;
3531 a_start.cmp(b_start, self).then_with(|| id_a.cmp(id_b))
3532 });
3533
3534 groups
3535 }
3536
3537 /// Returns an iterator over the diagnostics for the given group.
3538 pub fn diagnostic_group<'a, O>(
3539 &'a self,
3540 group_id: usize,
3541 ) -> impl 'a + Iterator<Item = DiagnosticEntry<O>>
3542 where
3543 O: 'a + FromAnchor,
3544 {
3545 self.diagnostics
3546 .iter()
3547 .flat_map(move |(_, set)| set.group(group_id, self))
3548 }
3549
3550 /// The number of times diagnostics were updated.
3551 pub fn diagnostics_update_count(&self) -> usize {
3552 self.diagnostics_update_count
3553 }
3554
3555 /// The number of times the buffer was parsed.
3556 pub fn parse_count(&self) -> usize {
3557 self.parse_count
3558 }
3559
3560 /// The number of times selections were updated.
3561 pub fn selections_update_count(&self) -> usize {
3562 self.selections_update_count
3563 }
3564
3565 /// Returns a snapshot of underlying file.
3566 pub fn file(&self) -> Option<&Arc<dyn File>> {
3567 self.file.as_ref()
3568 }
3569
3570 /// Resolves the file path (relative to the worktree root) associated with the underlying file.
3571 pub fn resolve_file_path(&self, cx: &AppContext, include_root: bool) -> Option<PathBuf> {
3572 if let Some(file) = self.file() {
3573 if file.path().file_name().is_none() || include_root {
3574 Some(file.full_path(cx))
3575 } else {
3576 Some(file.path().to_path_buf())
3577 }
3578 } else {
3579 None
3580 }
3581 }
3582
3583 /// The number of times the underlying file was updated.
3584 pub fn file_update_count(&self) -> usize {
3585 self.file_update_count
3586 }
3587
3588 /// The number of times the git diff status was updated.
3589 pub fn git_diff_update_count(&self) -> usize {
3590 self.git_diff_update_count
3591 }
3592}
3593
3594fn indent_size_for_line(text: &text::BufferSnapshot, row: u32) -> IndentSize {
3595 indent_size_for_text(text.chars_at(Point::new(row, 0)))
3596}
3597
3598fn indent_size_for_text(text: impl Iterator<Item = char>) -> IndentSize {
3599 let mut result = IndentSize::spaces(0);
3600 for c in text {
3601 let kind = match c {
3602 ' ' => IndentKind::Space,
3603 '\t' => IndentKind::Tab,
3604 _ => break,
3605 };
3606 if result.len == 0 {
3607 result.kind = kind;
3608 }
3609 result.len += 1;
3610 }
3611 result
3612}
3613
3614impl Clone for BufferSnapshot {
3615 fn clone(&self) -> Self {
3616 Self {
3617 text: self.text.clone(),
3618 git_diff: self.git_diff.clone(),
3619 syntax: self.syntax.clone(),
3620 file: self.file.clone(),
3621 remote_selections: self.remote_selections.clone(),
3622 diagnostics: self.diagnostics.clone(),
3623 selections_update_count: self.selections_update_count,
3624 diagnostics_update_count: self.diagnostics_update_count,
3625 file_update_count: self.file_update_count,
3626 git_diff_update_count: self.git_diff_update_count,
3627 language: self.language.clone(),
3628 parse_count: self.parse_count,
3629 }
3630 }
3631}
3632
3633impl Deref for BufferSnapshot {
3634 type Target = text::BufferSnapshot;
3635
3636 fn deref(&self) -> &Self::Target {
3637 &self.text
3638 }
3639}
3640
3641unsafe impl<'a> Send for BufferChunks<'a> {}
3642
3643impl<'a> BufferChunks<'a> {
3644 pub(crate) fn new(
3645 text: &'a Rope,
3646 range: Range<usize>,
3647 syntax: Option<(SyntaxMapCaptures<'a>, Vec<HighlightMap>)>,
3648 diagnostic_endpoints: Vec<DiagnosticEndpoint>,
3649 ) -> Self {
3650 let mut highlights = None;
3651 if let Some((captures, highlight_maps)) = syntax {
3652 highlights = Some(BufferChunkHighlights {
3653 captures,
3654 next_capture: None,
3655 stack: Default::default(),
3656 highlight_maps,
3657 })
3658 }
3659
3660 let diagnostic_endpoints = diagnostic_endpoints.into_iter().peekable();
3661 let chunks = text.chunks_in_range(range.clone());
3662
3663 BufferChunks {
3664 range,
3665 chunks,
3666 diagnostic_endpoints,
3667 error_depth: 0,
3668 warning_depth: 0,
3669 information_depth: 0,
3670 hint_depth: 0,
3671 unnecessary_depth: 0,
3672 highlights,
3673 }
3674 }
3675
3676 /// Seeks to the given byte offset in the buffer.
3677 pub fn seek(&mut self, offset: usize) {
3678 self.range.start = offset;
3679 self.chunks.seek(self.range.start);
3680 if let Some(highlights) = self.highlights.as_mut() {
3681 highlights
3682 .stack
3683 .retain(|(end_offset, _)| *end_offset > offset);
3684 if let Some(capture) = &highlights.next_capture {
3685 if offset >= capture.node.start_byte() {
3686 let next_capture_end = capture.node.end_byte();
3687 if offset < next_capture_end {
3688 highlights.stack.push((
3689 next_capture_end,
3690 highlights.highlight_maps[capture.grammar_index].get(capture.index),
3691 ));
3692 }
3693 highlights.next_capture.take();
3694 }
3695 }
3696 highlights.captures.set_byte_range(self.range.clone());
3697 }
3698 }
3699
3700 /// The current byte offset in the buffer.
3701 pub fn offset(&self) -> usize {
3702 self.range.start
3703 }
3704
3705 fn update_diagnostic_depths(&mut self, endpoint: DiagnosticEndpoint) {
3706 let depth = match endpoint.severity {
3707 DiagnosticSeverity::ERROR => &mut self.error_depth,
3708 DiagnosticSeverity::WARNING => &mut self.warning_depth,
3709 DiagnosticSeverity::INFORMATION => &mut self.information_depth,
3710 DiagnosticSeverity::HINT => &mut self.hint_depth,
3711 _ => return,
3712 };
3713 if endpoint.is_start {
3714 *depth += 1;
3715 } else {
3716 *depth -= 1;
3717 }
3718
3719 if endpoint.is_unnecessary {
3720 if endpoint.is_start {
3721 self.unnecessary_depth += 1;
3722 } else {
3723 self.unnecessary_depth -= 1;
3724 }
3725 }
3726 }
3727
3728 fn current_diagnostic_severity(&self) -> Option<DiagnosticSeverity> {
3729 if self.error_depth > 0 {
3730 Some(DiagnosticSeverity::ERROR)
3731 } else if self.warning_depth > 0 {
3732 Some(DiagnosticSeverity::WARNING)
3733 } else if self.information_depth > 0 {
3734 Some(DiagnosticSeverity::INFORMATION)
3735 } else if self.hint_depth > 0 {
3736 Some(DiagnosticSeverity::HINT)
3737 } else {
3738 None
3739 }
3740 }
3741
3742 fn current_code_is_unnecessary(&self) -> bool {
3743 self.unnecessary_depth > 0
3744 }
3745}
3746
3747impl<'a> Iterator for BufferChunks<'a> {
3748 type Item = Chunk<'a>;
3749
3750 fn next(&mut self) -> Option<Self::Item> {
3751 let mut next_capture_start = usize::MAX;
3752 let mut next_diagnostic_endpoint = usize::MAX;
3753
3754 if let Some(highlights) = self.highlights.as_mut() {
3755 while let Some((parent_capture_end, _)) = highlights.stack.last() {
3756 if *parent_capture_end <= self.range.start {
3757 highlights.stack.pop();
3758 } else {
3759 break;
3760 }
3761 }
3762
3763 if highlights.next_capture.is_none() {
3764 highlights.next_capture = highlights.captures.next();
3765 }
3766
3767 while let Some(capture) = highlights.next_capture.as_ref() {
3768 if self.range.start < capture.node.start_byte() {
3769 next_capture_start = capture.node.start_byte();
3770 break;
3771 } else {
3772 let highlight_id =
3773 highlights.highlight_maps[capture.grammar_index].get(capture.index);
3774 highlights
3775 .stack
3776 .push((capture.node.end_byte(), highlight_id));
3777 highlights.next_capture = highlights.captures.next();
3778 }
3779 }
3780 }
3781
3782 while let Some(endpoint) = self.diagnostic_endpoints.peek().copied() {
3783 if endpoint.offset <= self.range.start {
3784 self.update_diagnostic_depths(endpoint);
3785 self.diagnostic_endpoints.next();
3786 } else {
3787 next_diagnostic_endpoint = endpoint.offset;
3788 break;
3789 }
3790 }
3791
3792 if let Some(chunk) = self.chunks.peek() {
3793 let chunk_start = self.range.start;
3794 let mut chunk_end = (self.chunks.offset() + chunk.len())
3795 .min(next_capture_start)
3796 .min(next_diagnostic_endpoint);
3797 let mut highlight_id = None;
3798 if let Some(highlights) = self.highlights.as_ref() {
3799 if let Some((parent_capture_end, parent_highlight_id)) = highlights.stack.last() {
3800 chunk_end = chunk_end.min(*parent_capture_end);
3801 highlight_id = Some(*parent_highlight_id);
3802 }
3803 }
3804
3805 let slice =
3806 &chunk[chunk_start - self.chunks.offset()..chunk_end - self.chunks.offset()];
3807 self.range.start = chunk_end;
3808 if self.range.start == self.chunks.offset() + chunk.len() {
3809 self.chunks.next().unwrap();
3810 }
3811
3812 Some(Chunk {
3813 text: slice,
3814 syntax_highlight_id: highlight_id,
3815 diagnostic_severity: self.current_diagnostic_severity(),
3816 is_unnecessary: self.current_code_is_unnecessary(),
3817 ..Default::default()
3818 })
3819 } else {
3820 None
3821 }
3822 }
3823}
3824
3825impl operation_queue::Operation for Operation {
3826 fn lamport_timestamp(&self) -> clock::Lamport {
3827 match self {
3828 Operation::Buffer(_) => {
3829 unreachable!("buffer operations should never be deferred at this layer")
3830 }
3831 Operation::UpdateDiagnostics {
3832 lamport_timestamp, ..
3833 }
3834 | Operation::UpdateSelections {
3835 lamport_timestamp, ..
3836 }
3837 | Operation::UpdateCompletionTriggers {
3838 lamport_timestamp, ..
3839 } => *lamport_timestamp,
3840 }
3841 }
3842}
3843
3844impl Default for Diagnostic {
3845 fn default() -> Self {
3846 Self {
3847 source: Default::default(),
3848 code: None,
3849 severity: DiagnosticSeverity::ERROR,
3850 message: Default::default(),
3851 group_id: 0,
3852 is_primary: false,
3853 is_disk_based: false,
3854 is_unnecessary: false,
3855 }
3856 }
3857}
3858
3859impl IndentSize {
3860 /// Returns an [IndentSize] representing the given spaces.
3861 pub fn spaces(len: u32) -> Self {
3862 Self {
3863 len,
3864 kind: IndentKind::Space,
3865 }
3866 }
3867
3868 /// Returns an [IndentSize] representing a tab.
3869 pub fn tab() -> Self {
3870 Self {
3871 len: 1,
3872 kind: IndentKind::Tab,
3873 }
3874 }
3875
3876 /// An iterator over the characters represented by this [IndentSize].
3877 pub fn chars(&self) -> impl Iterator<Item = char> {
3878 iter::repeat(self.char()).take(self.len as usize)
3879 }
3880
3881 /// The character representation of this [IndentSize].
3882 pub fn char(&self) -> char {
3883 match self.kind {
3884 IndentKind::Space => ' ',
3885 IndentKind::Tab => '\t',
3886 }
3887 }
3888
3889 /// Consumes the current [IndentSize] and returns a new one that has
3890 /// been shrunk or enlarged by the given size along the given direction.
3891 pub fn with_delta(mut self, direction: Ordering, size: IndentSize) -> Self {
3892 match direction {
3893 Ordering::Less => {
3894 if self.kind == size.kind && self.len >= size.len {
3895 self.len -= size.len;
3896 }
3897 }
3898 Ordering::Equal => {}
3899 Ordering::Greater => {
3900 if self.len == 0 {
3901 self = size;
3902 } else if self.kind == size.kind {
3903 self.len += size.len;
3904 }
3905 }
3906 }
3907 self
3908 }
3909}
3910
3911#[cfg(any(test, feature = "test-support"))]
3912pub struct TestFile {
3913 pub path: Arc<Path>,
3914 pub root_name: String,
3915}
3916
3917#[cfg(any(test, feature = "test-support"))]
3918impl File for TestFile {
3919 fn path(&self) -> &Arc<Path> {
3920 &self.path
3921 }
3922
3923 fn full_path(&self, _: &gpui::AppContext) -> PathBuf {
3924 PathBuf::from(&self.root_name).join(self.path.as_ref())
3925 }
3926
3927 fn as_local(&self) -> Option<&dyn LocalFile> {
3928 None
3929 }
3930
3931 fn mtime(&self) -> Option<SystemTime> {
3932 unimplemented!()
3933 }
3934
3935 fn file_name<'a>(&'a self, _: &'a gpui::AppContext) -> &'a std::ffi::OsStr {
3936 self.path().file_name().unwrap_or(self.root_name.as_ref())
3937 }
3938
3939 fn worktree_id(&self) -> usize {
3940 0
3941 }
3942
3943 fn is_deleted(&self) -> bool {
3944 unimplemented!()
3945 }
3946
3947 fn as_any(&self) -> &dyn std::any::Any {
3948 unimplemented!()
3949 }
3950
3951 fn to_proto(&self) -> rpc::proto::File {
3952 unimplemented!()
3953 }
3954
3955 fn is_private(&self) -> bool {
3956 false
3957 }
3958}
3959
3960pub(crate) fn contiguous_ranges(
3961 values: impl Iterator<Item = u32>,
3962 max_len: usize,
3963) -> impl Iterator<Item = Range<u32>> {
3964 let mut values = values;
3965 let mut current_range: Option<Range<u32>> = None;
3966 std::iter::from_fn(move || loop {
3967 if let Some(value) = values.next() {
3968 if let Some(range) = &mut current_range {
3969 if value == range.end && range.len() < max_len {
3970 range.end += 1;
3971 continue;
3972 }
3973 }
3974
3975 let prev_range = current_range.clone();
3976 current_range = Some(value..(value + 1));
3977 if prev_range.is_some() {
3978 return prev_range;
3979 }
3980 } else {
3981 return current_range.take();
3982 }
3983 })
3984}
3985
3986/// Returns the [CharKind] for the given character. When a scope is provided,
3987/// the function checks if the character is considered a word character
3988/// based on the language scope's word character settings.
3989pub fn char_kind(scope: &Option<LanguageScope>, c: char) -> CharKind {
3990 if c.is_whitespace() {
3991 return CharKind::Whitespace;
3992 } else if c.is_alphanumeric() || c == '_' {
3993 return CharKind::Word;
3994 }
3995
3996 if let Some(scope) = scope {
3997 if let Some(characters) = scope.word_characters() {
3998 if characters.contains(&c) {
3999 return CharKind::Word;
4000 }
4001 }
4002 }
4003
4004 CharKind::Punctuation
4005}
4006
4007/// Find all of the ranges of whitespace that occur at the ends of lines
4008/// in the given rope.
4009///
4010/// This could also be done with a regex search, but this implementation
4011/// avoids copying text.
4012pub fn trailing_whitespace_ranges(rope: &Rope) -> Vec<Range<usize>> {
4013 let mut ranges = Vec::new();
4014
4015 let mut offset = 0;
4016 let mut prev_chunk_trailing_whitespace_range = 0..0;
4017 for chunk in rope.chunks() {
4018 let mut prev_line_trailing_whitespace_range = 0..0;
4019 for (i, line) in chunk.split('\n').enumerate() {
4020 let line_end_offset = offset + line.len();
4021 let trimmed_line_len = line.trim_end_matches(|c| matches!(c, ' ' | '\t')).len();
4022 let mut trailing_whitespace_range = (offset + trimmed_line_len)..line_end_offset;
4023
4024 if i == 0 && trimmed_line_len == 0 {
4025 trailing_whitespace_range.start = prev_chunk_trailing_whitespace_range.start;
4026 }
4027 if !prev_line_trailing_whitespace_range.is_empty() {
4028 ranges.push(prev_line_trailing_whitespace_range);
4029 }
4030
4031 offset = line_end_offset + 1;
4032 prev_line_trailing_whitespace_range = trailing_whitespace_range;
4033 }
4034
4035 offset -= 1;
4036 prev_chunk_trailing_whitespace_range = prev_line_trailing_whitespace_range;
4037 }
4038
4039 if !prev_chunk_trailing_whitespace_range.is_empty() {
4040 ranges.push(prev_chunk_trailing_whitespace_range);
4041 }
4042
4043 ranges
4044}