1pub use crate::{
2 diagnostic_set::DiagnosticSet,
3 highlight_map::{HighlightId, HighlightMap},
4 markdown::ParsedMarkdown,
5 proto, Grammar, Language, LanguageRegistry,
6};
7use crate::{
8 diagnostic_set::{DiagnosticEntry, DiagnosticGroup},
9 language_settings::{language_settings, IndentGuideSettings, LanguageSettings},
10 markdown::parse_markdown,
11 outline::OutlineItem,
12 syntax_map::{
13 SyntaxLayer, SyntaxMap, SyntaxMapCapture, SyntaxMapCaptures, SyntaxMapMatches,
14 SyntaxSnapshot, ToTreeSitterPoint,
15 },
16 task_context::RunnableRange,
17 LanguageScope, Outline, RunnableCapture, RunnableTag,
18};
19use anyhow::{anyhow, Context, Result};
20use async_watch as watch;
21pub use clock::ReplicaId;
22use futures::channel::oneshot;
23use gpui::{
24 AnyElement, AppContext, EventEmitter, HighlightStyle, ModelContext, Task, TaskLabel,
25 WindowContext,
26};
27use lazy_static::lazy_static;
28use lsp::LanguageServerId;
29use parking_lot::Mutex;
30use serde_json::Value;
31use similar::{ChangeTag, TextDiff};
32use smallvec::SmallVec;
33use smol::future::yield_now;
34use std::{
35 any::Any,
36 cell::Cell,
37 cmp::{self, Ordering, Reverse},
38 collections::BTreeMap,
39 ffi::OsStr,
40 fmt,
41 future::Future,
42 iter::{self, Iterator, Peekable},
43 mem,
44 ops::{Deref, Range},
45 path::{Path, PathBuf},
46 str,
47 sync::Arc,
48 time::{Duration, Instant, SystemTime},
49 vec,
50};
51use sum_tree::TreeMap;
52use text::operation_queue::OperationQueue;
53use text::*;
54pub use text::{
55 Anchor, Bias, Buffer as TextBuffer, BufferId, BufferSnapshot as TextBufferSnapshot, Edit,
56 OffsetRangeExt, OffsetUtf16, Patch, Point, PointUtf16, Rope, Selection, SelectionGoal,
57 Subscription, TextDimension, TextSummary, ToOffset, ToOffsetUtf16, ToPoint, ToPointUtf16,
58 Transaction, TransactionId, Unclipped,
59};
60use theme::SyntaxTheme;
61#[cfg(any(test, feature = "test-support"))]
62use util::RandomCharIter;
63use util::RangeExt;
64
65#[cfg(any(test, feature = "test-support"))]
66pub use {tree_sitter_rust, tree_sitter_typescript};
67
68pub use lsp::DiagnosticSeverity;
69
70lazy_static! {
71 /// A label for the background task spawned by the buffer to compute
72 /// a diff against the contents of its file.
73 pub static ref BUFFER_DIFF_TASK: TaskLabel = TaskLabel::new();
74}
75
76/// Indicate whether a [Buffer] has permissions to edit.
77#[derive(PartialEq, Clone, Copy, Debug)]
78pub enum Capability {
79 /// The buffer is a mutable replica.
80 ReadWrite,
81 /// The buffer is a read-only replica.
82 ReadOnly,
83}
84
85pub type BufferRow = u32;
86
87/// An in-memory representation of a source code file, including its text,
88/// syntax trees, git status, and diagnostics.
89pub struct Buffer {
90 text: TextBuffer,
91 diff_base: Option<Rope>,
92 git_diff: git::diff::BufferDiff,
93 file: Option<Arc<dyn File>>,
94 /// The mtime of the file when this buffer was last loaded from
95 /// or saved to disk.
96 saved_mtime: Option<SystemTime>,
97 /// The version vector when this buffer was last loaded from
98 /// or saved to disk.
99 saved_version: clock::Global,
100 transaction_depth: usize,
101 was_dirty_before_starting_transaction: Option<bool>,
102 reload_task: Option<Task<Result<()>>>,
103 language: Option<Arc<Language>>,
104 autoindent_requests: Vec<Arc<AutoindentRequest>>,
105 pending_autoindent: Option<Task<()>>,
106 sync_parse_timeout: Duration,
107 syntax_map: Mutex<SyntaxMap>,
108 parsing_in_background: bool,
109 parse_status: (watch::Sender<ParseStatus>, watch::Receiver<ParseStatus>),
110 non_text_state_update_count: usize,
111 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
112 remote_selections: TreeMap<ReplicaId, SelectionSet>,
113 diagnostics_timestamp: clock::Lamport,
114 completion_triggers: Vec<String>,
115 completion_triggers_timestamp: clock::Lamport,
116 deferred_ops: OperationQueue<Operation>,
117 capability: Capability,
118 has_conflict: bool,
119 diff_base_version: usize,
120 /// Memoize calls to has_changes_since(saved_version).
121 /// The contents of a cell are (self.version, has_changes) at the time of a last call.
122 has_unsaved_edits: Cell<(clock::Global, bool)>,
123}
124
125#[derive(Copy, Clone, Debug, PartialEq, Eq)]
126pub enum ParseStatus {
127 Idle,
128 Parsing,
129}
130
131/// An immutable, cheaply cloneable representation of a fixed
132/// state of a buffer.
133pub struct BufferSnapshot {
134 text: text::BufferSnapshot,
135 git_diff: git::diff::BufferDiff,
136 pub(crate) syntax: SyntaxSnapshot,
137 file: Option<Arc<dyn File>>,
138 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
139 remote_selections: TreeMap<ReplicaId, SelectionSet>,
140 language: Option<Arc<Language>>,
141 non_text_state_update_count: usize,
142}
143
144/// The kind and amount of indentation in a particular line. For now,
145/// assumes that indentation is all the same character.
146#[derive(Clone, Copy, Debug, PartialEq, Eq, Default)]
147pub struct IndentSize {
148 /// The number of bytes that comprise the indentation.
149 pub len: u32,
150 /// The kind of whitespace used for indentation.
151 pub kind: IndentKind,
152}
153
154/// A whitespace character that's used for indentation.
155#[derive(Clone, Copy, Debug, PartialEq, Eq, Default)]
156pub enum IndentKind {
157 /// An ASCII space character.
158 #[default]
159 Space,
160 /// An ASCII tab character.
161 Tab,
162}
163
164/// The shape of a selection cursor.
165#[derive(Copy, Clone, PartialEq, Eq, Debug, Default)]
166pub enum CursorShape {
167 /// A vertical bar
168 #[default]
169 Bar,
170 /// A block that surrounds the following character
171 Block,
172 /// An underline that runs along the following character
173 Underscore,
174 /// A box drawn around the following character
175 Hollow,
176}
177
178#[derive(Clone, Debug)]
179struct SelectionSet {
180 line_mode: bool,
181 cursor_shape: CursorShape,
182 selections: Arc<[Selection<Anchor>]>,
183 lamport_timestamp: clock::Lamport,
184}
185
186/// A diagnostic associated with a certain range of a buffer.
187#[derive(Clone, Debug, PartialEq, Eq)]
188pub struct Diagnostic {
189 /// The name of the service that produced this diagnostic.
190 pub source: Option<String>,
191 /// A machine-readable code that identifies this diagnostic.
192 pub code: Option<String>,
193 /// Whether this diagnostic is a hint, warning, or error.
194 pub severity: DiagnosticSeverity,
195 /// The human-readable message associated with this diagnostic.
196 pub message: String,
197 /// An id that identifies the group to which this diagnostic belongs.
198 ///
199 /// When a language server produces a diagnostic with
200 /// one or more associated diagnostics, those diagnostics are all
201 /// assigned a single group id.
202 pub group_id: usize,
203 /// Whether this diagnostic is the primary diagnostic for its group.
204 ///
205 /// In a given group, the primary diagnostic is the top-level diagnostic
206 /// returned by the language server. The non-primary diagnostics are the
207 /// associated diagnostics.
208 pub is_primary: bool,
209 /// Whether this diagnostic is considered to originate from an analysis of
210 /// files on disk, as opposed to any unsaved buffer contents. This is a
211 /// property of a given diagnostic source, and is configured for a given
212 /// language server via the [`LspAdapter::disk_based_diagnostic_sources`](crate::LspAdapter::disk_based_diagnostic_sources) method
213 /// for the language server.
214 pub is_disk_based: bool,
215 /// Whether this diagnostic marks unnecessary code.
216 pub is_unnecessary: bool,
217 /// Data from language server that produced this diagnostic. Passed back to the LS when we request code actions for this diagnostic.
218 pub data: Option<Value>,
219}
220
221/// TODO - move this into the `project` crate and make it private.
222pub async fn prepare_completion_documentation(
223 documentation: &lsp::Documentation,
224 language_registry: &Arc<LanguageRegistry>,
225 language: Option<Arc<Language>>,
226) -> Documentation {
227 match documentation {
228 lsp::Documentation::String(text) => {
229 if text.lines().count() <= 1 {
230 Documentation::SingleLine(text.clone())
231 } else {
232 Documentation::MultiLinePlainText(text.clone())
233 }
234 }
235
236 lsp::Documentation::MarkupContent(lsp::MarkupContent { kind, value }) => match kind {
237 lsp::MarkupKind::PlainText => {
238 if value.lines().count() <= 1 {
239 Documentation::SingleLine(value.clone())
240 } else {
241 Documentation::MultiLinePlainText(value.clone())
242 }
243 }
244
245 lsp::MarkupKind::Markdown => {
246 let parsed = parse_markdown(value, language_registry, language).await;
247 Documentation::MultiLineMarkdown(parsed)
248 }
249 },
250 }
251}
252
253/// Documentation associated with a [`Completion`].
254#[derive(Clone, Debug)]
255pub enum Documentation {
256 /// There is no documentation for this completion.
257 Undocumented,
258 /// A single line of documentation.
259 SingleLine(String),
260 /// Multiple lines of plain text documentation.
261 MultiLinePlainText(String),
262 /// Markdown documentation.
263 MultiLineMarkdown(ParsedMarkdown),
264}
265
266/// An operation used to synchronize this buffer with its other replicas.
267#[derive(Clone, Debug, PartialEq)]
268pub enum Operation {
269 /// A text operation.
270 Buffer(text::Operation),
271
272 /// An update to the buffer's diagnostics.
273 UpdateDiagnostics {
274 /// The id of the language server that produced the new diagnostics.
275 server_id: LanguageServerId,
276 /// The diagnostics.
277 diagnostics: Arc<[DiagnosticEntry<Anchor>]>,
278 /// The buffer's lamport timestamp.
279 lamport_timestamp: clock::Lamport,
280 },
281
282 /// An update to the most recent selections in this buffer.
283 UpdateSelections {
284 /// The selections.
285 selections: Arc<[Selection<Anchor>]>,
286 /// The buffer's lamport timestamp.
287 lamport_timestamp: clock::Lamport,
288 /// Whether the selections are in 'line mode'.
289 line_mode: bool,
290 /// The [`CursorShape`] associated with these selections.
291 cursor_shape: CursorShape,
292 },
293
294 /// An update to the characters that should trigger autocompletion
295 /// for this buffer.
296 UpdateCompletionTriggers {
297 /// The characters that trigger autocompletion.
298 triggers: Vec<String>,
299 /// The buffer's lamport timestamp.
300 lamport_timestamp: clock::Lamport,
301 },
302}
303
304/// An event that occurs in a buffer.
305#[derive(Clone, Debug, PartialEq)]
306pub enum Event {
307 /// The buffer was changed in a way that must be
308 /// propagated to its other replicas.
309 Operation(Operation),
310 /// The buffer was edited.
311 Edited,
312 /// The buffer's `dirty` bit changed.
313 DirtyChanged,
314 /// The buffer was saved.
315 Saved,
316 /// The buffer's file was changed on disk.
317 FileHandleChanged,
318 /// The buffer was reloaded.
319 Reloaded,
320 /// The buffer's diff_base changed.
321 DiffBaseChanged,
322 /// Buffer's excerpts for a certain diff base were recalculated.
323 DiffUpdated,
324 /// The buffer's language was changed.
325 LanguageChanged,
326 /// The buffer's syntax trees were updated.
327 Reparsed,
328 /// The buffer's diagnostics were updated.
329 DiagnosticsUpdated,
330 /// The buffer gained or lost editing capabilities.
331 CapabilityChanged,
332 /// The buffer was explicitly requested to close.
333 Closed,
334}
335
336/// The file associated with a buffer.
337pub trait File: Send + Sync {
338 /// Returns the [`LocalFile`] associated with this file, if the
339 /// file is local.
340 fn as_local(&self) -> Option<&dyn LocalFile>;
341
342 /// Returns whether this file is local.
343 fn is_local(&self) -> bool {
344 self.as_local().is_some()
345 }
346
347 /// Returns the file's mtime.
348 fn mtime(&self) -> Option<SystemTime>;
349
350 /// Returns the path of this file relative to the worktree's root directory.
351 fn path(&self) -> &Arc<Path>;
352
353 /// Returns the path of this file relative to the worktree's parent directory (this means it
354 /// includes the name of the worktree's root folder).
355 fn full_path(&self, cx: &AppContext) -> PathBuf;
356
357 /// Returns the last component of this handle's absolute path. If this handle refers to the root
358 /// of its worktree, then this method will return the name of the worktree itself.
359 fn file_name<'a>(&'a self, cx: &'a AppContext) -> &'a OsStr;
360
361 /// Returns the id of the worktree to which this file belongs.
362 ///
363 /// This is needed for looking up project-specific settings.
364 fn worktree_id(&self) -> usize;
365
366 /// Returns whether the file has been deleted.
367 fn is_deleted(&self) -> bool;
368
369 /// Returns whether the file existed on disk at one point
370 fn is_created(&self) -> bool {
371 self.mtime().is_some()
372 }
373
374 /// Converts this file into an [`Any`] trait object.
375 fn as_any(&self) -> &dyn Any;
376
377 /// Converts this file into a protobuf message.
378 fn to_proto(&self, cx: &AppContext) -> rpc::proto::File;
379
380 /// Return whether Zed considers this to be a private file.
381 fn is_private(&self) -> bool;
382}
383
384/// The file associated with a buffer, in the case where the file is on the local disk.
385pub trait LocalFile: File {
386 /// Returns the absolute path of this file.
387 fn abs_path(&self, cx: &AppContext) -> PathBuf;
388
389 /// Loads the file's contents from disk.
390 fn load(&self, cx: &AppContext) -> Task<Result<String>>;
391
392 /// Returns true if the file should not be shared with collaborators.
393 fn is_private(&self, _: &AppContext) -> bool {
394 false
395 }
396}
397
398/// The auto-indent behavior associated with an editing operation.
399/// For some editing operations, each affected line of text has its
400/// indentation recomputed. For other operations, the entire block
401/// of edited text is adjusted uniformly.
402#[derive(Clone, Debug)]
403pub enum AutoindentMode {
404 /// Indent each line of inserted text.
405 EachLine,
406 /// Apply the same indentation adjustment to all of the lines
407 /// in a given insertion.
408 Block {
409 /// The original indentation level of the first line of each
410 /// insertion, if it has been copied.
411 original_indent_columns: Vec<u32>,
412 },
413}
414
415#[derive(Clone)]
416struct AutoindentRequest {
417 before_edit: BufferSnapshot,
418 entries: Vec<AutoindentRequestEntry>,
419 is_block_mode: bool,
420}
421
422#[derive(Clone)]
423struct AutoindentRequestEntry {
424 /// A range of the buffer whose indentation should be adjusted.
425 range: Range<Anchor>,
426 /// Whether or not these lines should be considered brand new, for the
427 /// purpose of auto-indent. When text is not new, its indentation will
428 /// only be adjusted if the suggested indentation level has *changed*
429 /// since the edit was made.
430 first_line_is_new: bool,
431 indent_size: IndentSize,
432 original_indent_column: Option<u32>,
433}
434
435#[derive(Debug)]
436struct IndentSuggestion {
437 basis_row: u32,
438 delta: Ordering,
439 within_error: bool,
440}
441
442struct BufferChunkHighlights<'a> {
443 captures: SyntaxMapCaptures<'a>,
444 next_capture: Option<SyntaxMapCapture<'a>>,
445 stack: Vec<(usize, HighlightId)>,
446 highlight_maps: Vec<HighlightMap>,
447}
448
449/// An iterator that yields chunks of a buffer's text, along with their
450/// syntax highlights and diagnostic status.
451pub struct BufferChunks<'a> {
452 range: Range<usize>,
453 chunks: text::Chunks<'a>,
454 diagnostic_endpoints: Peekable<vec::IntoIter<DiagnosticEndpoint>>,
455 error_depth: usize,
456 warning_depth: usize,
457 information_depth: usize,
458 hint_depth: usize,
459 unnecessary_depth: usize,
460 highlights: Option<BufferChunkHighlights<'a>>,
461}
462
463/// A chunk of a buffer's text, along with its syntax highlight and
464/// diagnostic status.
465#[derive(Clone, Debug, Default)]
466pub struct Chunk<'a> {
467 /// The text of the chunk.
468 pub text: &'a str,
469 /// The syntax highlighting style of the chunk.
470 pub syntax_highlight_id: Option<HighlightId>,
471 /// The highlight style that has been applied to this chunk in
472 /// the editor.
473 pub highlight_style: Option<HighlightStyle>,
474 /// The severity of diagnostic associated with this chunk, if any.
475 pub diagnostic_severity: Option<DiagnosticSeverity>,
476 /// Whether this chunk of text is marked as unnecessary.
477 pub is_unnecessary: bool,
478 /// Whether this chunk of text was originally a tab character.
479 pub is_tab: bool,
480 /// An optional recipe for how the chunk should be presented.
481 pub renderer: Option<ChunkRenderer>,
482}
483
484/// A recipe for how the chunk should be presented.
485#[derive(Clone)]
486pub struct ChunkRenderer {
487 /// creates a custom element to represent this chunk.
488 pub render: Arc<dyn Send + Sync + Fn(&mut WindowContext) -> AnyElement>,
489 /// If true, the element is constrained to the shaped width of the text.
490 pub constrain_width: bool,
491}
492
493impl fmt::Debug for ChunkRenderer {
494 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
495 f.debug_struct("ChunkRenderer")
496 .field("constrain_width", &self.constrain_width)
497 .finish()
498 }
499}
500
501/// A set of edits to a given version of a buffer, computed asynchronously.
502#[derive(Debug)]
503pub struct Diff {
504 pub(crate) base_version: clock::Global,
505 line_ending: LineEnding,
506 edits: Vec<(Range<usize>, Arc<str>)>,
507}
508
509#[derive(Clone, Copy)]
510pub(crate) struct DiagnosticEndpoint {
511 offset: usize,
512 is_start: bool,
513 severity: DiagnosticSeverity,
514 is_unnecessary: bool,
515}
516
517/// A class of characters, used for characterizing a run of text.
518#[derive(Copy, Clone, Eq, PartialEq, PartialOrd, Ord, Debug)]
519pub enum CharKind {
520 /// Whitespace.
521 Whitespace,
522 /// Punctuation.
523 Punctuation,
524 /// Word.
525 Word,
526}
527
528/// A runnable is a set of data about a region that could be resolved into a task
529pub struct Runnable {
530 pub tags: SmallVec<[RunnableTag; 1]>,
531 pub language: Arc<Language>,
532 pub buffer: BufferId,
533}
534
535#[derive(Clone, Debug, PartialEq)]
536pub struct IndentGuide {
537 pub buffer_id: BufferId,
538 pub start_row: BufferRow,
539 pub end_row: BufferRow,
540 pub depth: u32,
541 pub tab_size: u32,
542 pub settings: IndentGuideSettings,
543}
544
545impl IndentGuide {
546 pub fn indent_level(&self) -> u32 {
547 self.depth * self.tab_size
548 }
549}
550
551impl Buffer {
552 /// Create a new buffer with the given base text.
553 pub fn local<T: Into<String>>(base_text: T, cx: &mut ModelContext<Self>) -> Self {
554 Self::build(
555 TextBuffer::new(0, cx.entity_id().as_non_zero_u64().into(), base_text.into()),
556 None,
557 None,
558 Capability::ReadWrite,
559 )
560 }
561
562 /// Create a new buffer with the given base text that has proper line endings and other normalization applied.
563 pub fn local_normalized(
564 base_text_normalized: Rope,
565 line_ending: LineEnding,
566 cx: &mut ModelContext<Self>,
567 ) -> Self {
568 Self::build(
569 TextBuffer::new_normalized(
570 0,
571 cx.entity_id().as_non_zero_u64().into(),
572 line_ending,
573 base_text_normalized,
574 ),
575 None,
576 None,
577 Capability::ReadWrite,
578 )
579 }
580
581 /// Create a new buffer that is a replica of a remote buffer.
582 pub fn remote(
583 remote_id: BufferId,
584 replica_id: ReplicaId,
585 capability: Capability,
586 base_text: impl Into<String>,
587 ) -> Self {
588 Self::build(
589 TextBuffer::new(replica_id, remote_id, base_text.into()),
590 None,
591 None,
592 capability,
593 )
594 }
595
596 /// Create a new buffer that is a replica of a remote buffer, populating its
597 /// state from the given protobuf message.
598 pub fn from_proto(
599 replica_id: ReplicaId,
600 capability: Capability,
601 message: proto::BufferState,
602 file: Option<Arc<dyn File>>,
603 ) -> Result<Self> {
604 let buffer_id = BufferId::new(message.id)
605 .with_context(|| anyhow!("Could not deserialize buffer_id"))?;
606 let buffer = TextBuffer::new(replica_id, buffer_id, message.base_text);
607 let mut this = Self::build(buffer, message.diff_base, file, capability);
608 this.text.set_line_ending(proto::deserialize_line_ending(
609 rpc::proto::LineEnding::from_i32(message.line_ending)
610 .ok_or_else(|| anyhow!("missing line_ending"))?,
611 ));
612 this.saved_version = proto::deserialize_version(&message.saved_version);
613 this.saved_mtime = message.saved_mtime.map(|time| time.into());
614 Ok(this)
615 }
616
617 /// Serialize the buffer's state to a protobuf message.
618 pub fn to_proto(&self, cx: &AppContext) -> proto::BufferState {
619 proto::BufferState {
620 id: self.remote_id().into(),
621 file: self.file.as_ref().map(|f| f.to_proto(cx)),
622 base_text: self.base_text().to_string(),
623 diff_base: self.diff_base.as_ref().map(|h| h.to_string()),
624 line_ending: proto::serialize_line_ending(self.line_ending()) as i32,
625 saved_version: proto::serialize_version(&self.saved_version),
626 saved_mtime: self.saved_mtime.map(|time| time.into()),
627 }
628 }
629
630 /// Serialize as protobufs all of the changes to the buffer since the given version.
631 pub fn serialize_ops(
632 &self,
633 since: Option<clock::Global>,
634 cx: &AppContext,
635 ) -> Task<Vec<proto::Operation>> {
636 let mut operations = Vec::new();
637 operations.extend(self.deferred_ops.iter().map(proto::serialize_operation));
638
639 operations.extend(self.remote_selections.iter().map(|(_, set)| {
640 proto::serialize_operation(&Operation::UpdateSelections {
641 selections: set.selections.clone(),
642 lamport_timestamp: set.lamport_timestamp,
643 line_mode: set.line_mode,
644 cursor_shape: set.cursor_shape,
645 })
646 }));
647
648 for (server_id, diagnostics) in &self.diagnostics {
649 operations.push(proto::serialize_operation(&Operation::UpdateDiagnostics {
650 lamport_timestamp: self.diagnostics_timestamp,
651 server_id: *server_id,
652 diagnostics: diagnostics.iter().cloned().collect(),
653 }));
654 }
655
656 operations.push(proto::serialize_operation(
657 &Operation::UpdateCompletionTriggers {
658 triggers: self.completion_triggers.clone(),
659 lamport_timestamp: self.completion_triggers_timestamp,
660 },
661 ));
662
663 let text_operations = self.text.operations().clone();
664 cx.background_executor().spawn(async move {
665 let since = since.unwrap_or_default();
666 operations.extend(
667 text_operations
668 .iter()
669 .filter(|(_, op)| !since.observed(op.timestamp()))
670 .map(|(_, op)| proto::serialize_operation(&Operation::Buffer(op.clone()))),
671 );
672 operations.sort_unstable_by_key(proto::lamport_timestamp_for_operation);
673 operations
674 })
675 }
676
677 /// Assign a language to the buffer, returning the buffer.
678 pub fn with_language(mut self, language: Arc<Language>, cx: &mut ModelContext<Self>) -> Self {
679 self.set_language(Some(language), cx);
680 self
681 }
682
683 /// Returns the [Capability] of this buffer.
684 pub fn capability(&self) -> Capability {
685 self.capability
686 }
687
688 /// Whether this buffer can only be read.
689 pub fn read_only(&self) -> bool {
690 self.capability == Capability::ReadOnly
691 }
692
693 /// Builds a [Buffer] with the given underlying [TextBuffer], diff base, [File] and [Capability].
694 pub fn build(
695 buffer: TextBuffer,
696 diff_base: Option<String>,
697 file: Option<Arc<dyn File>>,
698 capability: Capability,
699 ) -> Self {
700 let saved_mtime = file.as_ref().and_then(|file| file.mtime());
701
702 Self {
703 saved_mtime,
704 saved_version: buffer.version(),
705 reload_task: None,
706 transaction_depth: 0,
707 was_dirty_before_starting_transaction: None,
708 has_unsaved_edits: Cell::new((buffer.version(), false)),
709 text: buffer,
710 diff_base: diff_base
711 .map(|mut raw_diff_base| {
712 LineEnding::normalize(&mut raw_diff_base);
713 raw_diff_base
714 })
715 .map(Rope::from),
716 diff_base_version: 0,
717 git_diff: git::diff::BufferDiff::new(),
718 file,
719 capability,
720 syntax_map: Mutex::new(SyntaxMap::new()),
721 parsing_in_background: false,
722 non_text_state_update_count: 0,
723 sync_parse_timeout: Duration::from_millis(1),
724 parse_status: async_watch::channel(ParseStatus::Idle),
725 autoindent_requests: Default::default(),
726 pending_autoindent: Default::default(),
727 language: None,
728 remote_selections: Default::default(),
729 diagnostics: Default::default(),
730 diagnostics_timestamp: Default::default(),
731 completion_triggers: Default::default(),
732 completion_triggers_timestamp: Default::default(),
733 deferred_ops: OperationQueue::new(),
734 has_conflict: false,
735 }
736 }
737
738 /// Retrieve a snapshot of the buffer's current state. This is computationally
739 /// cheap, and allows reading from the buffer on a background thread.
740 pub fn snapshot(&self) -> BufferSnapshot {
741 let text = self.text.snapshot();
742 let mut syntax_map = self.syntax_map.lock();
743 syntax_map.interpolate(&text);
744 let syntax = syntax_map.snapshot();
745
746 BufferSnapshot {
747 text,
748 syntax,
749 git_diff: self.git_diff.clone(),
750 file: self.file.clone(),
751 remote_selections: self.remote_selections.clone(),
752 diagnostics: self.diagnostics.clone(),
753 language: self.language.clone(),
754 non_text_state_update_count: self.non_text_state_update_count,
755 }
756 }
757
758 #[cfg(test)]
759 pub(crate) fn as_text_snapshot(&self) -> &text::BufferSnapshot {
760 &self.text
761 }
762
763 /// Retrieve a snapshot of the buffer's raw text, without any
764 /// language-related state like the syntax tree or diagnostics.
765 pub fn text_snapshot(&self) -> text::BufferSnapshot {
766 self.text.snapshot()
767 }
768
769 /// The file associated with the buffer, if any.
770 pub fn file(&self) -> Option<&Arc<dyn File>> {
771 self.file.as_ref()
772 }
773
774 /// The version of the buffer that was last saved or reloaded from disk.
775 pub fn saved_version(&self) -> &clock::Global {
776 &self.saved_version
777 }
778
779 /// The mtime of the buffer's file when the buffer was last saved or reloaded from disk.
780 pub fn saved_mtime(&self) -> Option<SystemTime> {
781 self.saved_mtime
782 }
783
784 /// Assign a language to the buffer.
785 pub fn set_language(&mut self, language: Option<Arc<Language>>, cx: &mut ModelContext<Self>) {
786 self.non_text_state_update_count += 1;
787 self.syntax_map.lock().clear();
788 self.language = language;
789 self.reparse(cx);
790 cx.emit(Event::LanguageChanged);
791 }
792
793 /// Assign a language registry to the buffer. This allows the buffer to retrieve
794 /// other languages if parts of the buffer are written in different languages.
795 pub fn set_language_registry(&mut self, language_registry: Arc<LanguageRegistry>) {
796 self.syntax_map
797 .lock()
798 .set_language_registry(language_registry);
799 }
800
801 pub fn language_registry(&self) -> Option<Arc<LanguageRegistry>> {
802 self.syntax_map.lock().language_registry()
803 }
804
805 /// Assign the buffer a new [Capability].
806 pub fn set_capability(&mut self, capability: Capability, cx: &mut ModelContext<Self>) {
807 self.capability = capability;
808 cx.emit(Event::CapabilityChanged)
809 }
810
811 /// This method is called to signal that the buffer has been saved.
812 pub fn did_save(
813 &mut self,
814 version: clock::Global,
815 mtime: Option<SystemTime>,
816 cx: &mut ModelContext<Self>,
817 ) {
818 self.saved_version = version;
819 self.has_unsaved_edits
820 .set((self.saved_version().clone(), false));
821 self.has_conflict = false;
822 self.saved_mtime = mtime;
823 cx.emit(Event::Saved);
824 cx.notify();
825 }
826
827 /// Reloads the contents of the buffer from disk.
828 pub fn reload(
829 &mut self,
830 cx: &mut ModelContext<Self>,
831 ) -> oneshot::Receiver<Option<Transaction>> {
832 let (tx, rx) = futures::channel::oneshot::channel();
833 let prev_version = self.text.version();
834 self.reload_task = Some(cx.spawn(|this, mut cx| async move {
835 let Some((new_mtime, new_text)) = this.update(&mut cx, |this, cx| {
836 let file = this.file.as_ref()?.as_local()?;
837 Some((file.mtime(), file.load(cx)))
838 })?
839 else {
840 return Ok(());
841 };
842
843 let new_text = new_text.await?;
844 let diff = this
845 .update(&mut cx, |this, cx| this.diff(new_text.clone(), cx))?
846 .await;
847 this.update(&mut cx, |this, cx| {
848 if this.version() == diff.base_version {
849 this.finalize_last_transaction();
850 this.apply_diff(diff, cx);
851 tx.send(this.finalize_last_transaction().cloned()).ok();
852 this.has_conflict = false;
853 this.did_reload(this.version(), this.line_ending(), new_mtime, cx);
854 } else {
855 if !diff.edits.is_empty()
856 || this
857 .edits_since::<usize>(&diff.base_version)
858 .next()
859 .is_some()
860 {
861 this.has_conflict = true;
862 }
863
864 this.did_reload(prev_version, this.line_ending(), this.saved_mtime, cx);
865 }
866
867 this.reload_task.take();
868 })
869 }));
870 rx
871 }
872
873 /// This method is called to signal that the buffer has been reloaded.
874 pub fn did_reload(
875 &mut self,
876 version: clock::Global,
877 line_ending: LineEnding,
878 mtime: Option<SystemTime>,
879 cx: &mut ModelContext<Self>,
880 ) {
881 self.saved_version = version;
882 self.has_unsaved_edits
883 .set((self.saved_version.clone(), false));
884 self.text.set_line_ending(line_ending);
885 self.saved_mtime = mtime;
886 cx.emit(Event::Reloaded);
887 cx.notify();
888 }
889
890 /// Updates the [File] backing this buffer. This should be called when
891 /// the file has changed or has been deleted.
892 pub fn file_updated(&mut self, new_file: Arc<dyn File>, cx: &mut ModelContext<Self>) {
893 let mut file_changed = false;
894
895 if let Some(old_file) = self.file.as_ref() {
896 if new_file.path() != old_file.path() {
897 file_changed = true;
898 }
899
900 if new_file.is_deleted() {
901 if !old_file.is_deleted() {
902 file_changed = true;
903 if !self.is_dirty() {
904 cx.emit(Event::DirtyChanged);
905 }
906 }
907 } else {
908 let new_mtime = new_file.mtime();
909 if new_mtime != old_file.mtime() {
910 file_changed = true;
911
912 if !self.is_dirty() {
913 self.reload(cx).close();
914 }
915 }
916 }
917 } else {
918 file_changed = true;
919 };
920
921 self.file = Some(new_file);
922 if file_changed {
923 self.non_text_state_update_count += 1;
924 cx.emit(Event::FileHandleChanged);
925 cx.notify();
926 }
927 }
928
929 /// Returns the current diff base, see [Buffer::set_diff_base].
930 pub fn diff_base(&self) -> Option<&Rope> {
931 self.diff_base.as_ref()
932 }
933
934 /// Sets the text that will be used to compute a Git diff
935 /// against the buffer text.
936 pub fn set_diff_base(&mut self, diff_base: Option<String>, cx: &mut ModelContext<Self>) {
937 self.diff_base = diff_base
938 .map(|mut raw_diff_base| {
939 LineEnding::normalize(&mut raw_diff_base);
940 raw_diff_base
941 })
942 .map(Rope::from);
943 self.diff_base_version += 1;
944 if let Some(recalc_task) = self.git_diff_recalc(cx) {
945 cx.spawn(|buffer, mut cx| async move {
946 recalc_task.await;
947 buffer
948 .update(&mut cx, |_, cx| {
949 cx.emit(Event::DiffBaseChanged);
950 })
951 .ok();
952 })
953 .detach();
954 }
955 }
956
957 /// Returns a number, unique per diff base set to the buffer.
958 pub fn diff_base_version(&self) -> usize {
959 self.diff_base_version
960 }
961
962 /// Recomputes the Git diff status.
963 pub fn git_diff_recalc(&mut self, cx: &mut ModelContext<Self>) -> Option<Task<()>> {
964 let diff_base = self.diff_base.clone()?;
965 let snapshot = self.snapshot();
966
967 let mut diff = self.git_diff.clone();
968 let diff = cx.background_executor().spawn(async move {
969 diff.update(&diff_base, &snapshot).await;
970 diff
971 });
972
973 Some(cx.spawn(|this, mut cx| async move {
974 let buffer_diff = diff.await;
975 this.update(&mut cx, |this, cx| {
976 this.git_diff = buffer_diff;
977 this.non_text_state_update_count += 1;
978 cx.emit(Event::DiffUpdated);
979 })
980 .ok();
981 }))
982 }
983
984 /// Returns the primary [Language] assigned to this [Buffer].
985 pub fn language(&self) -> Option<&Arc<Language>> {
986 self.language.as_ref()
987 }
988
989 /// Returns the [Language] at the given location.
990 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<Arc<Language>> {
991 let offset = position.to_offset(self);
992 self.syntax_map
993 .lock()
994 .layers_for_range(offset..offset, &self.text)
995 .last()
996 .map(|info| info.language.clone())
997 .or_else(|| self.language.clone())
998 }
999
1000 /// An integer version number that accounts for all updates besides
1001 /// the buffer's text itself (which is versioned via a version vector).
1002 pub fn non_text_state_update_count(&self) -> usize {
1003 self.non_text_state_update_count
1004 }
1005
1006 /// Whether the buffer is being parsed in the background.
1007 #[cfg(any(test, feature = "test-support"))]
1008 pub fn is_parsing(&self) -> bool {
1009 self.parsing_in_background
1010 }
1011
1012 /// Indicates whether the buffer contains any regions that may be
1013 /// written in a language that hasn't been loaded yet.
1014 pub fn contains_unknown_injections(&self) -> bool {
1015 self.syntax_map.lock().contains_unknown_injections()
1016 }
1017
1018 #[cfg(test)]
1019 pub fn set_sync_parse_timeout(&mut self, timeout: Duration) {
1020 self.sync_parse_timeout = timeout;
1021 }
1022
1023 /// Called after an edit to synchronize the buffer's main parse tree with
1024 /// the buffer's new underlying state.
1025 ///
1026 /// Locks the syntax map and interpolates the edits since the last reparse
1027 /// into the foreground syntax tree.
1028 ///
1029 /// Then takes a stable snapshot of the syntax map before unlocking it.
1030 /// The snapshot with the interpolated edits is sent to a background thread,
1031 /// where we ask Tree-sitter to perform an incremental parse.
1032 ///
1033 /// Meanwhile, in the foreground, we block the main thread for up to 1ms
1034 /// waiting on the parse to complete. As soon as it completes, we proceed
1035 /// synchronously, unless a 1ms timeout elapses.
1036 ///
1037 /// If we time out waiting on the parse, we spawn a second task waiting
1038 /// until the parse does complete and return with the interpolated tree still
1039 /// in the foreground. When the background parse completes, call back into
1040 /// the main thread and assign the foreground parse state.
1041 ///
1042 /// If the buffer or grammar changed since the start of the background parse,
1043 /// initiate an additional reparse recursively. To avoid concurrent parses
1044 /// for the same buffer, we only initiate a new parse if we are not already
1045 /// parsing in the background.
1046 pub fn reparse(&mut self, cx: &mut ModelContext<Self>) {
1047 if self.parsing_in_background {
1048 return;
1049 }
1050 let language = if let Some(language) = self.language.clone() {
1051 language
1052 } else {
1053 return;
1054 };
1055
1056 let text = self.text_snapshot();
1057 let parsed_version = self.version();
1058
1059 let mut syntax_map = self.syntax_map.lock();
1060 syntax_map.interpolate(&text);
1061 let language_registry = syntax_map.language_registry();
1062 let mut syntax_snapshot = syntax_map.snapshot();
1063 drop(syntax_map);
1064
1065 let parse_task = cx.background_executor().spawn({
1066 let language = language.clone();
1067 let language_registry = language_registry.clone();
1068 async move {
1069 syntax_snapshot.reparse(&text, language_registry, language);
1070 syntax_snapshot
1071 }
1072 });
1073
1074 self.parse_status.0.send(ParseStatus::Parsing).unwrap();
1075 match cx
1076 .background_executor()
1077 .block_with_timeout(self.sync_parse_timeout, parse_task)
1078 {
1079 Ok(new_syntax_snapshot) => {
1080 self.did_finish_parsing(new_syntax_snapshot, cx);
1081 return;
1082 }
1083 Err(parse_task) => {
1084 self.parsing_in_background = true;
1085 cx.spawn(move |this, mut cx| async move {
1086 let new_syntax_map = parse_task.await;
1087 this.update(&mut cx, move |this, cx| {
1088 let grammar_changed =
1089 this.language.as_ref().map_or(true, |current_language| {
1090 !Arc::ptr_eq(&language, current_language)
1091 });
1092 let language_registry_changed = new_syntax_map
1093 .contains_unknown_injections()
1094 && language_registry.map_or(false, |registry| {
1095 registry.version() != new_syntax_map.language_registry_version()
1096 });
1097 let parse_again = language_registry_changed
1098 || grammar_changed
1099 || this.version.changed_since(&parsed_version);
1100 this.did_finish_parsing(new_syntax_map, cx);
1101 this.parsing_in_background = false;
1102 if parse_again {
1103 this.reparse(cx);
1104 }
1105 })
1106 .ok();
1107 })
1108 .detach();
1109 }
1110 }
1111 }
1112
1113 fn did_finish_parsing(&mut self, syntax_snapshot: SyntaxSnapshot, cx: &mut ModelContext<Self>) {
1114 self.non_text_state_update_count += 1;
1115 self.syntax_map.lock().did_parse(syntax_snapshot);
1116 self.request_autoindent(cx);
1117 self.parse_status.0.send(ParseStatus::Idle).unwrap();
1118 cx.emit(Event::Reparsed);
1119 cx.notify();
1120 }
1121
1122 pub fn parse_status(&self) -> watch::Receiver<ParseStatus> {
1123 self.parse_status.1.clone()
1124 }
1125
1126 /// Assign to the buffer a set of diagnostics created by a given language server.
1127 pub fn update_diagnostics(
1128 &mut self,
1129 server_id: LanguageServerId,
1130 diagnostics: DiagnosticSet,
1131 cx: &mut ModelContext<Self>,
1132 ) {
1133 let lamport_timestamp = self.text.lamport_clock.tick();
1134 let op = Operation::UpdateDiagnostics {
1135 server_id,
1136 diagnostics: diagnostics.iter().cloned().collect(),
1137 lamport_timestamp,
1138 };
1139 self.apply_diagnostic_update(server_id, diagnostics, lamport_timestamp, cx);
1140 self.send_operation(op, cx);
1141 }
1142
1143 fn request_autoindent(&mut self, cx: &mut ModelContext<Self>) {
1144 if let Some(indent_sizes) = self.compute_autoindents() {
1145 let indent_sizes = cx.background_executor().spawn(indent_sizes);
1146 match cx
1147 .background_executor()
1148 .block_with_timeout(Duration::from_micros(500), indent_sizes)
1149 {
1150 Ok(indent_sizes) => self.apply_autoindents(indent_sizes, cx),
1151 Err(indent_sizes) => {
1152 self.pending_autoindent = Some(cx.spawn(|this, mut cx| async move {
1153 let indent_sizes = indent_sizes.await;
1154 this.update(&mut cx, |this, cx| {
1155 this.apply_autoindents(indent_sizes, cx);
1156 })
1157 .ok();
1158 }));
1159 }
1160 }
1161 } else {
1162 self.autoindent_requests.clear();
1163 }
1164 }
1165
1166 fn compute_autoindents(&self) -> Option<impl Future<Output = BTreeMap<u32, IndentSize>>> {
1167 let max_rows_between_yields = 100;
1168 let snapshot = self.snapshot();
1169 if snapshot.syntax.is_empty() || self.autoindent_requests.is_empty() {
1170 return None;
1171 }
1172
1173 let autoindent_requests = self.autoindent_requests.clone();
1174 Some(async move {
1175 let mut indent_sizes = BTreeMap::new();
1176 for request in autoindent_requests {
1177 // Resolve each edited range to its row in the current buffer and in the
1178 // buffer before this batch of edits.
1179 let mut row_ranges = Vec::new();
1180 let mut old_to_new_rows = BTreeMap::new();
1181 let mut language_indent_sizes_by_new_row = Vec::new();
1182 for entry in &request.entries {
1183 let position = entry.range.start;
1184 let new_row = position.to_point(&snapshot).row;
1185 let new_end_row = entry.range.end.to_point(&snapshot).row + 1;
1186 language_indent_sizes_by_new_row.push((new_row, entry.indent_size));
1187
1188 if !entry.first_line_is_new {
1189 let old_row = position.to_point(&request.before_edit).row;
1190 old_to_new_rows.insert(old_row, new_row);
1191 }
1192 row_ranges.push((new_row..new_end_row, entry.original_indent_column));
1193 }
1194
1195 // Build a map containing the suggested indentation for each of the edited lines
1196 // with respect to the state of the buffer before these edits. This map is keyed
1197 // by the rows for these lines in the current state of the buffer.
1198 let mut old_suggestions = BTreeMap::<u32, (IndentSize, bool)>::default();
1199 let old_edited_ranges =
1200 contiguous_ranges(old_to_new_rows.keys().copied(), max_rows_between_yields);
1201 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1202 let mut language_indent_size = IndentSize::default();
1203 for old_edited_range in old_edited_ranges {
1204 let suggestions = request
1205 .before_edit
1206 .suggest_autoindents(old_edited_range.clone())
1207 .into_iter()
1208 .flatten();
1209 for (old_row, suggestion) in old_edited_range.zip(suggestions) {
1210 if let Some(suggestion) = suggestion {
1211 let new_row = *old_to_new_rows.get(&old_row).unwrap();
1212
1213 // Find the indent size based on the language for this row.
1214 while let Some((row, size)) = language_indent_sizes.peek() {
1215 if *row > new_row {
1216 break;
1217 }
1218 language_indent_size = *size;
1219 language_indent_sizes.next();
1220 }
1221
1222 let suggested_indent = old_to_new_rows
1223 .get(&suggestion.basis_row)
1224 .and_then(|from_row| {
1225 Some(old_suggestions.get(from_row).copied()?.0)
1226 })
1227 .unwrap_or_else(|| {
1228 request
1229 .before_edit
1230 .indent_size_for_line(suggestion.basis_row)
1231 })
1232 .with_delta(suggestion.delta, language_indent_size);
1233 old_suggestions
1234 .insert(new_row, (suggested_indent, suggestion.within_error));
1235 }
1236 }
1237 yield_now().await;
1238 }
1239
1240 // In block mode, only compute indentation suggestions for the first line
1241 // of each insertion. Otherwise, compute suggestions for every inserted line.
1242 let new_edited_row_ranges = contiguous_ranges(
1243 row_ranges.iter().flat_map(|(range, _)| {
1244 if request.is_block_mode {
1245 range.start..range.start + 1
1246 } else {
1247 range.clone()
1248 }
1249 }),
1250 max_rows_between_yields,
1251 );
1252
1253 // Compute new suggestions for each line, but only include them in the result
1254 // if they differ from the old suggestion for that line.
1255 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1256 let mut language_indent_size = IndentSize::default();
1257 for new_edited_row_range in new_edited_row_ranges {
1258 let suggestions = snapshot
1259 .suggest_autoindents(new_edited_row_range.clone())
1260 .into_iter()
1261 .flatten();
1262 for (new_row, suggestion) in new_edited_row_range.zip(suggestions) {
1263 if let Some(suggestion) = suggestion {
1264 // Find the indent size based on the language for this row.
1265 while let Some((row, size)) = language_indent_sizes.peek() {
1266 if *row > new_row {
1267 break;
1268 }
1269 language_indent_size = *size;
1270 language_indent_sizes.next();
1271 }
1272
1273 let suggested_indent = indent_sizes
1274 .get(&suggestion.basis_row)
1275 .copied()
1276 .unwrap_or_else(|| {
1277 snapshot.indent_size_for_line(suggestion.basis_row)
1278 })
1279 .with_delta(suggestion.delta, language_indent_size);
1280 if old_suggestions.get(&new_row).map_or(
1281 true,
1282 |(old_indentation, was_within_error)| {
1283 suggested_indent != *old_indentation
1284 && (!suggestion.within_error || *was_within_error)
1285 },
1286 ) {
1287 indent_sizes.insert(new_row, suggested_indent);
1288 }
1289 }
1290 }
1291 yield_now().await;
1292 }
1293
1294 // For each block of inserted text, adjust the indentation of the remaining
1295 // lines of the block by the same amount as the first line was adjusted.
1296 if request.is_block_mode {
1297 for (row_range, original_indent_column) in
1298 row_ranges
1299 .into_iter()
1300 .filter_map(|(range, original_indent_column)| {
1301 if range.len() > 1 {
1302 Some((range, original_indent_column?))
1303 } else {
1304 None
1305 }
1306 })
1307 {
1308 let new_indent = indent_sizes
1309 .get(&row_range.start)
1310 .copied()
1311 .unwrap_or_else(|| snapshot.indent_size_for_line(row_range.start));
1312 let delta = new_indent.len as i64 - original_indent_column as i64;
1313 if delta != 0 {
1314 for row in row_range.skip(1) {
1315 indent_sizes.entry(row).or_insert_with(|| {
1316 let mut size = snapshot.indent_size_for_line(row);
1317 if size.kind == new_indent.kind {
1318 match delta.cmp(&0) {
1319 Ordering::Greater => size.len += delta as u32,
1320 Ordering::Less => {
1321 size.len = size.len.saturating_sub(-delta as u32)
1322 }
1323 Ordering::Equal => {}
1324 }
1325 }
1326 size
1327 });
1328 }
1329 }
1330 }
1331 }
1332 }
1333
1334 indent_sizes
1335 })
1336 }
1337
1338 fn apply_autoindents(
1339 &mut self,
1340 indent_sizes: BTreeMap<u32, IndentSize>,
1341 cx: &mut ModelContext<Self>,
1342 ) {
1343 self.autoindent_requests.clear();
1344
1345 let edits: Vec<_> = indent_sizes
1346 .into_iter()
1347 .filter_map(|(row, indent_size)| {
1348 let current_size = indent_size_for_line(self, row);
1349 Self::edit_for_indent_size_adjustment(row, current_size, indent_size)
1350 })
1351 .collect();
1352
1353 self.edit(edits, None, cx);
1354 }
1355
1356 /// Create a minimal edit that will cause the given row to be indented
1357 /// with the given size. After applying this edit, the length of the line
1358 /// will always be at least `new_size.len`.
1359 pub fn edit_for_indent_size_adjustment(
1360 row: u32,
1361 current_size: IndentSize,
1362 new_size: IndentSize,
1363 ) -> Option<(Range<Point>, String)> {
1364 if new_size.kind == current_size.kind {
1365 match new_size.len.cmp(¤t_size.len) {
1366 Ordering::Greater => {
1367 let point = Point::new(row, 0);
1368 Some((
1369 point..point,
1370 iter::repeat(new_size.char())
1371 .take((new_size.len - current_size.len) as usize)
1372 .collect::<String>(),
1373 ))
1374 }
1375
1376 Ordering::Less => Some((
1377 Point::new(row, 0)..Point::new(row, current_size.len - new_size.len),
1378 String::new(),
1379 )),
1380
1381 Ordering::Equal => None,
1382 }
1383 } else {
1384 Some((
1385 Point::new(row, 0)..Point::new(row, current_size.len),
1386 iter::repeat(new_size.char())
1387 .take(new_size.len as usize)
1388 .collect::<String>(),
1389 ))
1390 }
1391 }
1392
1393 /// Spawns a background task that asynchronously computes a `Diff` between the buffer's text
1394 /// and the given new text.
1395 pub fn diff(&self, mut new_text: String, cx: &AppContext) -> Task<Diff> {
1396 let old_text = self.as_rope().clone();
1397 let base_version = self.version();
1398 cx.background_executor()
1399 .spawn_labeled(*BUFFER_DIFF_TASK, async move {
1400 let old_text = old_text.to_string();
1401 let line_ending = LineEnding::detect(&new_text);
1402 LineEnding::normalize(&mut new_text);
1403
1404 let diff = TextDiff::from_chars(old_text.as_str(), new_text.as_str());
1405 let empty: Arc<str> = "".into();
1406
1407 let mut edits = Vec::new();
1408 let mut old_offset = 0;
1409 let mut new_offset = 0;
1410 let mut last_edit: Option<(Range<usize>, Range<usize>)> = None;
1411 for change in diff.iter_all_changes().map(Some).chain([None]) {
1412 if let Some(change) = &change {
1413 let len = change.value().len();
1414 match change.tag() {
1415 ChangeTag::Equal => {
1416 old_offset += len;
1417 new_offset += len;
1418 }
1419 ChangeTag::Delete => {
1420 let old_end_offset = old_offset + len;
1421 if let Some((last_old_range, _)) = &mut last_edit {
1422 last_old_range.end = old_end_offset;
1423 } else {
1424 last_edit =
1425 Some((old_offset..old_end_offset, new_offset..new_offset));
1426 }
1427 old_offset = old_end_offset;
1428 }
1429 ChangeTag::Insert => {
1430 let new_end_offset = new_offset + len;
1431 if let Some((_, last_new_range)) = &mut last_edit {
1432 last_new_range.end = new_end_offset;
1433 } else {
1434 last_edit =
1435 Some((old_offset..old_offset, new_offset..new_end_offset));
1436 }
1437 new_offset = new_end_offset;
1438 }
1439 }
1440 }
1441
1442 if let Some((old_range, new_range)) = &last_edit {
1443 if old_offset > old_range.end
1444 || new_offset > new_range.end
1445 || change.is_none()
1446 {
1447 let text = if new_range.is_empty() {
1448 empty.clone()
1449 } else {
1450 new_text[new_range.clone()].into()
1451 };
1452 edits.push((old_range.clone(), text));
1453 last_edit.take();
1454 }
1455 }
1456 }
1457
1458 Diff {
1459 base_version,
1460 line_ending,
1461 edits,
1462 }
1463 })
1464 }
1465
1466 /// Spawns a background task that searches the buffer for any whitespace
1467 /// at the ends of a lines, and returns a `Diff` that removes that whitespace.
1468 pub fn remove_trailing_whitespace(&self, cx: &AppContext) -> Task<Diff> {
1469 let old_text = self.as_rope().clone();
1470 let line_ending = self.line_ending();
1471 let base_version = self.version();
1472 cx.background_executor().spawn(async move {
1473 let ranges = trailing_whitespace_ranges(&old_text);
1474 let empty = Arc::<str>::from("");
1475 Diff {
1476 base_version,
1477 line_ending,
1478 edits: ranges
1479 .into_iter()
1480 .map(|range| (range, empty.clone()))
1481 .collect(),
1482 }
1483 })
1484 }
1485
1486 /// Ensures that the buffer ends with a single newline character, and
1487 /// no other whitespace.
1488 pub fn ensure_final_newline(&mut self, cx: &mut ModelContext<Self>) {
1489 let len = self.len();
1490 let mut offset = len;
1491 for chunk in self.as_rope().reversed_chunks_in_range(0..len) {
1492 let non_whitespace_len = chunk
1493 .trim_end_matches(|c: char| c.is_ascii_whitespace())
1494 .len();
1495 offset -= chunk.len();
1496 offset += non_whitespace_len;
1497 if non_whitespace_len != 0 {
1498 if offset == len - 1 && chunk.get(non_whitespace_len..) == Some("\n") {
1499 return;
1500 }
1501 break;
1502 }
1503 }
1504 self.edit([(offset..len, "\n")], None, cx);
1505 }
1506
1507 /// Applies a diff to the buffer. If the buffer has changed since the given diff was
1508 /// calculated, then adjust the diff to account for those changes, and discard any
1509 /// parts of the diff that conflict with those changes.
1510 pub fn apply_diff(&mut self, diff: Diff, cx: &mut ModelContext<Self>) -> Option<TransactionId> {
1511 // Check for any edits to the buffer that have occurred since this diff
1512 // was computed.
1513 let snapshot = self.snapshot();
1514 let mut edits_since = snapshot.edits_since::<usize>(&diff.base_version).peekable();
1515 let mut delta = 0;
1516 let adjusted_edits = diff.edits.into_iter().filter_map(|(range, new_text)| {
1517 while let Some(edit_since) = edits_since.peek() {
1518 // If the edit occurs after a diff hunk, then it does not
1519 // affect that hunk.
1520 if edit_since.old.start > range.end {
1521 break;
1522 }
1523 // If the edit precedes the diff hunk, then adjust the hunk
1524 // to reflect the edit.
1525 else if edit_since.old.end < range.start {
1526 delta += edit_since.new_len() as i64 - edit_since.old_len() as i64;
1527 edits_since.next();
1528 }
1529 // If the edit intersects a diff hunk, then discard that hunk.
1530 else {
1531 return None;
1532 }
1533 }
1534
1535 let start = (range.start as i64 + delta) as usize;
1536 let end = (range.end as i64 + delta) as usize;
1537 Some((start..end, new_text))
1538 });
1539
1540 self.start_transaction();
1541 self.text.set_line_ending(diff.line_ending);
1542 self.edit(adjusted_edits, None, cx);
1543 self.end_transaction(cx)
1544 }
1545
1546 fn has_unsaved_edits(&self) -> bool {
1547 let (last_version, has_unsaved_edits) = self.has_unsaved_edits.take();
1548
1549 if last_version == self.version {
1550 self.has_unsaved_edits
1551 .set((last_version, has_unsaved_edits));
1552 return has_unsaved_edits;
1553 }
1554
1555 let has_edits = self.has_edits_since(&self.saved_version);
1556 self.has_unsaved_edits
1557 .set((self.version.clone(), has_edits));
1558 has_edits
1559 }
1560
1561 /// Checks if the buffer has unsaved changes.
1562 pub fn is_dirty(&self) -> bool {
1563 self.has_conflict
1564 || self.has_unsaved_edits()
1565 || self
1566 .file
1567 .as_ref()
1568 .map_or(false, |file| file.is_deleted() || !file.is_created())
1569 }
1570
1571 /// Checks if the buffer and its file have both changed since the buffer
1572 /// was last saved or reloaded.
1573 pub fn has_conflict(&self) -> bool {
1574 self.has_conflict
1575 || self.file.as_ref().map_or(false, |file| {
1576 file.mtime() > self.saved_mtime && self.has_unsaved_edits()
1577 })
1578 }
1579
1580 /// Gets a [`Subscription`] that tracks all of the changes to the buffer's text.
1581 pub fn subscribe(&mut self) -> Subscription {
1582 self.text.subscribe()
1583 }
1584
1585 /// Starts a transaction, if one is not already in-progress. When undoing or
1586 /// redoing edits, all of the edits performed within a transaction are undone
1587 /// or redone together.
1588 pub fn start_transaction(&mut self) -> Option<TransactionId> {
1589 self.start_transaction_at(Instant::now())
1590 }
1591
1592 /// Starts a transaction, providing the current time. Subsequent transactions
1593 /// that occur within a short period of time will be grouped together. This
1594 /// is controlled by the buffer's undo grouping duration.
1595 pub fn start_transaction_at(&mut self, now: Instant) -> Option<TransactionId> {
1596 self.transaction_depth += 1;
1597 if self.was_dirty_before_starting_transaction.is_none() {
1598 self.was_dirty_before_starting_transaction = Some(self.is_dirty());
1599 }
1600 self.text.start_transaction_at(now)
1601 }
1602
1603 /// Terminates the current transaction, if this is the outermost transaction.
1604 pub fn end_transaction(&mut self, cx: &mut ModelContext<Self>) -> Option<TransactionId> {
1605 self.end_transaction_at(Instant::now(), cx)
1606 }
1607
1608 /// Terminates the current transaction, providing the current time. Subsequent transactions
1609 /// that occur within a short period of time will be grouped together. This
1610 /// is controlled by the buffer's undo grouping duration.
1611 pub fn end_transaction_at(
1612 &mut self,
1613 now: Instant,
1614 cx: &mut ModelContext<Self>,
1615 ) -> Option<TransactionId> {
1616 assert!(self.transaction_depth > 0);
1617 self.transaction_depth -= 1;
1618 let was_dirty = if self.transaction_depth == 0 {
1619 self.was_dirty_before_starting_transaction.take().unwrap()
1620 } else {
1621 false
1622 };
1623 if let Some((transaction_id, start_version)) = self.text.end_transaction_at(now) {
1624 self.did_edit(&start_version, was_dirty, cx);
1625 Some(transaction_id)
1626 } else {
1627 None
1628 }
1629 }
1630
1631 /// Manually add a transaction to the buffer's undo history.
1632 pub fn push_transaction(&mut self, transaction: Transaction, now: Instant) {
1633 self.text.push_transaction(transaction, now);
1634 }
1635
1636 /// Prevent the last transaction from being grouped with any subsequent transactions,
1637 /// even if they occur with the buffer's undo grouping duration.
1638 pub fn finalize_last_transaction(&mut self) -> Option<&Transaction> {
1639 self.text.finalize_last_transaction()
1640 }
1641
1642 /// Manually group all changes since a given transaction.
1643 pub fn group_until_transaction(&mut self, transaction_id: TransactionId) {
1644 self.text.group_until_transaction(transaction_id);
1645 }
1646
1647 /// Manually remove a transaction from the buffer's undo history
1648 pub fn forget_transaction(&mut self, transaction_id: TransactionId) {
1649 self.text.forget_transaction(transaction_id);
1650 }
1651
1652 /// Manually merge two adjacent transactions in the buffer's undo history.
1653 pub fn merge_transactions(&mut self, transaction: TransactionId, destination: TransactionId) {
1654 self.text.merge_transactions(transaction, destination);
1655 }
1656
1657 /// Waits for the buffer to receive operations with the given timestamps.
1658 pub fn wait_for_edits(
1659 &mut self,
1660 edit_ids: impl IntoIterator<Item = clock::Lamport>,
1661 ) -> impl Future<Output = Result<()>> {
1662 self.text.wait_for_edits(edit_ids)
1663 }
1664
1665 /// Waits for the buffer to receive the operations necessary for resolving the given anchors.
1666 pub fn wait_for_anchors(
1667 &mut self,
1668 anchors: impl IntoIterator<Item = Anchor>,
1669 ) -> impl 'static + Future<Output = Result<()>> {
1670 self.text.wait_for_anchors(anchors)
1671 }
1672
1673 /// Waits for the buffer to receive operations up to the given version.
1674 pub fn wait_for_version(&mut self, version: clock::Global) -> impl Future<Output = Result<()>> {
1675 self.text.wait_for_version(version)
1676 }
1677
1678 /// Forces all futures returned by [`Buffer::wait_for_version`], [`Buffer::wait_for_edits`], or
1679 /// [`Buffer::wait_for_version`] to resolve with an error.
1680 pub fn give_up_waiting(&mut self) {
1681 self.text.give_up_waiting();
1682 }
1683
1684 /// Stores a set of selections that should be broadcasted to all of the buffer's replicas.
1685 pub fn set_active_selections(
1686 &mut self,
1687 selections: Arc<[Selection<Anchor>]>,
1688 line_mode: bool,
1689 cursor_shape: CursorShape,
1690 cx: &mut ModelContext<Self>,
1691 ) {
1692 let lamport_timestamp = self.text.lamport_clock.tick();
1693 self.remote_selections.insert(
1694 self.text.replica_id(),
1695 SelectionSet {
1696 selections: selections.clone(),
1697 lamport_timestamp,
1698 line_mode,
1699 cursor_shape,
1700 },
1701 );
1702 self.send_operation(
1703 Operation::UpdateSelections {
1704 selections,
1705 line_mode,
1706 lamport_timestamp,
1707 cursor_shape,
1708 },
1709 cx,
1710 );
1711 self.non_text_state_update_count += 1;
1712 cx.notify();
1713 }
1714
1715 /// Clears the selections, so that other replicas of the buffer do not see any selections for
1716 /// this replica.
1717 pub fn remove_active_selections(&mut self, cx: &mut ModelContext<Self>) {
1718 if self
1719 .remote_selections
1720 .get(&self.text.replica_id())
1721 .map_or(true, |set| !set.selections.is_empty())
1722 {
1723 self.set_active_selections(Arc::from([]), false, Default::default(), cx);
1724 }
1725 }
1726
1727 /// Replaces the buffer's entire text.
1728 pub fn set_text<T>(&mut self, text: T, cx: &mut ModelContext<Self>) -> Option<clock::Lamport>
1729 where
1730 T: Into<Arc<str>>,
1731 {
1732 self.autoindent_requests.clear();
1733 self.edit([(0..self.len(), text)], None, cx)
1734 }
1735
1736 /// Applies the given edits to the buffer. Each edit is specified as a range of text to
1737 /// delete, and a string of text to insert at that location.
1738 ///
1739 /// If an [`AutoindentMode`] is provided, then the buffer will enqueue an auto-indent
1740 /// request for the edited ranges, which will be processed when the buffer finishes
1741 /// parsing.
1742 ///
1743 /// Parsing takes place at the end of a transaction, and may compute synchronously
1744 /// or asynchronously, depending on the changes.
1745 pub fn edit<I, S, T>(
1746 &mut self,
1747 edits_iter: I,
1748 autoindent_mode: Option<AutoindentMode>,
1749 cx: &mut ModelContext<Self>,
1750 ) -> Option<clock::Lamport>
1751 where
1752 I: IntoIterator<Item = (Range<S>, T)>,
1753 S: ToOffset,
1754 T: Into<Arc<str>>,
1755 {
1756 // Skip invalid edits and coalesce contiguous ones.
1757 let mut edits: Vec<(Range<usize>, Arc<str>)> = Vec::new();
1758 for (range, new_text) in edits_iter {
1759 let mut range = range.start.to_offset(self)..range.end.to_offset(self);
1760 if range.start > range.end {
1761 mem::swap(&mut range.start, &mut range.end);
1762 }
1763 let new_text = new_text.into();
1764 if !new_text.is_empty() || !range.is_empty() {
1765 if let Some((prev_range, prev_text)) = edits.last_mut() {
1766 if prev_range.end >= range.start {
1767 prev_range.end = cmp::max(prev_range.end, range.end);
1768 *prev_text = format!("{prev_text}{new_text}").into();
1769 } else {
1770 edits.push((range, new_text));
1771 }
1772 } else {
1773 edits.push((range, new_text));
1774 }
1775 }
1776 }
1777 if edits.is_empty() {
1778 return None;
1779 }
1780
1781 self.start_transaction();
1782 self.pending_autoindent.take();
1783 let autoindent_request = autoindent_mode
1784 .and_then(|mode| self.language.as_ref().map(|_| (self.snapshot(), mode)));
1785
1786 let edit_operation = self.text.edit(edits.iter().cloned());
1787 let edit_id = edit_operation.timestamp();
1788
1789 if let Some((before_edit, mode)) = autoindent_request {
1790 let mut delta = 0isize;
1791 let entries = edits
1792 .into_iter()
1793 .enumerate()
1794 .zip(&edit_operation.as_edit().unwrap().new_text)
1795 .map(|((ix, (range, _)), new_text)| {
1796 let new_text_length = new_text.len();
1797 let old_start = range.start.to_point(&before_edit);
1798 let new_start = (delta + range.start as isize) as usize;
1799 delta += new_text_length as isize - (range.end as isize - range.start as isize);
1800
1801 let mut range_of_insertion_to_indent = 0..new_text_length;
1802 let mut first_line_is_new = false;
1803 let mut original_indent_column = None;
1804
1805 // When inserting an entire line at the beginning of an existing line,
1806 // treat the insertion as new.
1807 if new_text.contains('\n')
1808 && old_start.column <= before_edit.indent_size_for_line(old_start.row).len
1809 {
1810 first_line_is_new = true;
1811 }
1812
1813 // When inserting text starting with a newline, avoid auto-indenting the
1814 // previous line.
1815 if new_text.starts_with('\n') {
1816 range_of_insertion_to_indent.start += 1;
1817 first_line_is_new = true;
1818 }
1819
1820 // Avoid auto-indenting after the insertion.
1821 if let AutoindentMode::Block {
1822 original_indent_columns,
1823 } = &mode
1824 {
1825 original_indent_column =
1826 Some(original_indent_columns.get(ix).copied().unwrap_or_else(|| {
1827 indent_size_for_text(
1828 new_text[range_of_insertion_to_indent.clone()].chars(),
1829 )
1830 .len
1831 }));
1832 if new_text[range_of_insertion_to_indent.clone()].ends_with('\n') {
1833 range_of_insertion_to_indent.end -= 1;
1834 }
1835 }
1836
1837 AutoindentRequestEntry {
1838 first_line_is_new,
1839 original_indent_column,
1840 indent_size: before_edit.language_indent_size_at(range.start, cx),
1841 range: self.anchor_before(new_start + range_of_insertion_to_indent.start)
1842 ..self.anchor_after(new_start + range_of_insertion_to_indent.end),
1843 }
1844 })
1845 .collect();
1846
1847 self.autoindent_requests.push(Arc::new(AutoindentRequest {
1848 before_edit,
1849 entries,
1850 is_block_mode: matches!(mode, AutoindentMode::Block { .. }),
1851 }));
1852 }
1853
1854 self.end_transaction(cx);
1855 self.send_operation(Operation::Buffer(edit_operation), cx);
1856 Some(edit_id)
1857 }
1858
1859 fn did_edit(
1860 &mut self,
1861 old_version: &clock::Global,
1862 was_dirty: bool,
1863 cx: &mut ModelContext<Self>,
1864 ) {
1865 if self.edits_since::<usize>(old_version).next().is_none() {
1866 return;
1867 }
1868
1869 self.reparse(cx);
1870
1871 cx.emit(Event::Edited);
1872 if was_dirty != self.is_dirty() {
1873 cx.emit(Event::DirtyChanged);
1874 }
1875 cx.notify();
1876 }
1877
1878 /// Applies the given remote operations to the buffer.
1879 pub fn apply_ops<I: IntoIterator<Item = Operation>>(
1880 &mut self,
1881 ops: I,
1882 cx: &mut ModelContext<Self>,
1883 ) -> Result<()> {
1884 self.pending_autoindent.take();
1885 let was_dirty = self.is_dirty();
1886 let old_version = self.version.clone();
1887 let mut deferred_ops = Vec::new();
1888 let buffer_ops = ops
1889 .into_iter()
1890 .filter_map(|op| match op {
1891 Operation::Buffer(op) => Some(op),
1892 _ => {
1893 if self.can_apply_op(&op) {
1894 self.apply_op(op, cx);
1895 } else {
1896 deferred_ops.push(op);
1897 }
1898 None
1899 }
1900 })
1901 .collect::<Vec<_>>();
1902 self.text.apply_ops(buffer_ops)?;
1903 self.deferred_ops.insert(deferred_ops);
1904 self.flush_deferred_ops(cx);
1905 self.did_edit(&old_version, was_dirty, cx);
1906 // Notify independently of whether the buffer was edited as the operations could include a
1907 // selection update.
1908 cx.notify();
1909 Ok(())
1910 }
1911
1912 fn flush_deferred_ops(&mut self, cx: &mut ModelContext<Self>) {
1913 let mut deferred_ops = Vec::new();
1914 for op in self.deferred_ops.drain().iter().cloned() {
1915 if self.can_apply_op(&op) {
1916 self.apply_op(op, cx);
1917 } else {
1918 deferred_ops.push(op);
1919 }
1920 }
1921 self.deferred_ops.insert(deferred_ops);
1922 }
1923
1924 pub fn has_deferred_ops(&self) -> bool {
1925 !self.deferred_ops.is_empty() || self.text.has_deferred_ops()
1926 }
1927
1928 fn can_apply_op(&self, operation: &Operation) -> bool {
1929 match operation {
1930 Operation::Buffer(_) => {
1931 unreachable!("buffer operations should never be applied at this layer")
1932 }
1933 Operation::UpdateDiagnostics {
1934 diagnostics: diagnostic_set,
1935 ..
1936 } => diagnostic_set.iter().all(|diagnostic| {
1937 self.text.can_resolve(&diagnostic.range.start)
1938 && self.text.can_resolve(&diagnostic.range.end)
1939 }),
1940 Operation::UpdateSelections { selections, .. } => selections
1941 .iter()
1942 .all(|s| self.can_resolve(&s.start) && self.can_resolve(&s.end)),
1943 Operation::UpdateCompletionTriggers { .. } => true,
1944 }
1945 }
1946
1947 fn apply_op(&mut self, operation: Operation, cx: &mut ModelContext<Self>) {
1948 match operation {
1949 Operation::Buffer(_) => {
1950 unreachable!("buffer operations should never be applied at this layer")
1951 }
1952 Operation::UpdateDiagnostics {
1953 server_id,
1954 diagnostics: diagnostic_set,
1955 lamport_timestamp,
1956 } => {
1957 let snapshot = self.snapshot();
1958 self.apply_diagnostic_update(
1959 server_id,
1960 DiagnosticSet::from_sorted_entries(diagnostic_set.iter().cloned(), &snapshot),
1961 lamport_timestamp,
1962 cx,
1963 );
1964 }
1965 Operation::UpdateSelections {
1966 selections,
1967 lamport_timestamp,
1968 line_mode,
1969 cursor_shape,
1970 } => {
1971 if let Some(set) = self.remote_selections.get(&lamport_timestamp.replica_id) {
1972 if set.lamport_timestamp > lamport_timestamp {
1973 return;
1974 }
1975 }
1976
1977 self.remote_selections.insert(
1978 lamport_timestamp.replica_id,
1979 SelectionSet {
1980 selections,
1981 lamport_timestamp,
1982 line_mode,
1983 cursor_shape,
1984 },
1985 );
1986 self.text.lamport_clock.observe(lamport_timestamp);
1987 self.non_text_state_update_count += 1;
1988 }
1989 Operation::UpdateCompletionTriggers {
1990 triggers,
1991 lamport_timestamp,
1992 } => {
1993 self.completion_triggers = triggers;
1994 self.text.lamport_clock.observe(lamport_timestamp);
1995 }
1996 }
1997 }
1998
1999 fn apply_diagnostic_update(
2000 &mut self,
2001 server_id: LanguageServerId,
2002 diagnostics: DiagnosticSet,
2003 lamport_timestamp: clock::Lamport,
2004 cx: &mut ModelContext<Self>,
2005 ) {
2006 if lamport_timestamp > self.diagnostics_timestamp {
2007 let ix = self.diagnostics.binary_search_by_key(&server_id, |e| e.0);
2008 if diagnostics.len() == 0 {
2009 if let Ok(ix) = ix {
2010 self.diagnostics.remove(ix);
2011 }
2012 } else {
2013 match ix {
2014 Err(ix) => self.diagnostics.insert(ix, (server_id, diagnostics)),
2015 Ok(ix) => self.diagnostics[ix].1 = diagnostics,
2016 };
2017 }
2018 self.diagnostics_timestamp = lamport_timestamp;
2019 self.non_text_state_update_count += 1;
2020 self.text.lamport_clock.observe(lamport_timestamp);
2021 cx.notify();
2022 cx.emit(Event::DiagnosticsUpdated);
2023 }
2024 }
2025
2026 fn send_operation(&mut self, operation: Operation, cx: &mut ModelContext<Self>) {
2027 cx.emit(Event::Operation(operation));
2028 }
2029
2030 /// Removes the selections for a given peer.
2031 pub fn remove_peer(&mut self, replica_id: ReplicaId, cx: &mut ModelContext<Self>) {
2032 self.remote_selections.remove(&replica_id);
2033 cx.notify();
2034 }
2035
2036 /// Undoes the most recent transaction.
2037 pub fn undo(&mut self, cx: &mut ModelContext<Self>) -> Option<TransactionId> {
2038 let was_dirty = self.is_dirty();
2039 let old_version = self.version.clone();
2040
2041 if let Some((transaction_id, operation)) = self.text.undo() {
2042 self.send_operation(Operation::Buffer(operation), cx);
2043 self.did_edit(&old_version, was_dirty, cx);
2044 Some(transaction_id)
2045 } else {
2046 None
2047 }
2048 }
2049
2050 /// Manually undoes a specific transaction in the buffer's undo history.
2051 pub fn undo_transaction(
2052 &mut self,
2053 transaction_id: TransactionId,
2054 cx: &mut ModelContext<Self>,
2055 ) -> bool {
2056 let was_dirty = self.is_dirty();
2057 let old_version = self.version.clone();
2058 if let Some(operation) = self.text.undo_transaction(transaction_id) {
2059 self.send_operation(Operation::Buffer(operation), cx);
2060 self.did_edit(&old_version, was_dirty, cx);
2061 true
2062 } else {
2063 false
2064 }
2065 }
2066
2067 /// Manually undoes all changes after a given transaction in the buffer's undo history.
2068 pub fn undo_to_transaction(
2069 &mut self,
2070 transaction_id: TransactionId,
2071 cx: &mut ModelContext<Self>,
2072 ) -> bool {
2073 let was_dirty = self.is_dirty();
2074 let old_version = self.version.clone();
2075
2076 let operations = self.text.undo_to_transaction(transaction_id);
2077 let undone = !operations.is_empty();
2078 for operation in operations {
2079 self.send_operation(Operation::Buffer(operation), cx);
2080 }
2081 if undone {
2082 self.did_edit(&old_version, was_dirty, cx)
2083 }
2084 undone
2085 }
2086
2087 /// Manually redoes a specific transaction in the buffer's redo history.
2088 pub fn redo(&mut self, cx: &mut ModelContext<Self>) -> Option<TransactionId> {
2089 let was_dirty = self.is_dirty();
2090 let old_version = self.version.clone();
2091
2092 if let Some((transaction_id, operation)) = self.text.redo() {
2093 self.send_operation(Operation::Buffer(operation), cx);
2094 self.did_edit(&old_version, was_dirty, cx);
2095 Some(transaction_id)
2096 } else {
2097 None
2098 }
2099 }
2100
2101 /// Manually undoes all changes until a given transaction in the buffer's redo history.
2102 pub fn redo_to_transaction(
2103 &mut self,
2104 transaction_id: TransactionId,
2105 cx: &mut ModelContext<Self>,
2106 ) -> bool {
2107 let was_dirty = self.is_dirty();
2108 let old_version = self.version.clone();
2109
2110 let operations = self.text.redo_to_transaction(transaction_id);
2111 let redone = !operations.is_empty();
2112 for operation in operations {
2113 self.send_operation(Operation::Buffer(operation), cx);
2114 }
2115 if redone {
2116 self.did_edit(&old_version, was_dirty, cx)
2117 }
2118 redone
2119 }
2120
2121 /// Override current completion triggers with the user-provided completion triggers.
2122 pub fn set_completion_triggers(&mut self, triggers: Vec<String>, cx: &mut ModelContext<Self>) {
2123 self.completion_triggers.clone_from(&triggers);
2124 self.completion_triggers_timestamp = self.text.lamport_clock.tick();
2125 self.send_operation(
2126 Operation::UpdateCompletionTriggers {
2127 triggers,
2128 lamport_timestamp: self.completion_triggers_timestamp,
2129 },
2130 cx,
2131 );
2132 cx.notify();
2133 }
2134
2135 /// Returns a list of strings which trigger a completion menu for this language.
2136 /// Usually this is driven by LSP server which returns a list of trigger characters for completions.
2137 pub fn completion_triggers(&self) -> &[String] {
2138 &self.completion_triggers
2139 }
2140}
2141
2142#[doc(hidden)]
2143#[cfg(any(test, feature = "test-support"))]
2144impl Buffer {
2145 pub fn edit_via_marked_text(
2146 &mut self,
2147 marked_string: &str,
2148 autoindent_mode: Option<AutoindentMode>,
2149 cx: &mut ModelContext<Self>,
2150 ) {
2151 let edits = self.edits_for_marked_text(marked_string);
2152 self.edit(edits, autoindent_mode, cx);
2153 }
2154
2155 pub fn set_group_interval(&mut self, group_interval: Duration) {
2156 self.text.set_group_interval(group_interval);
2157 }
2158
2159 pub fn randomly_edit<T>(
2160 &mut self,
2161 rng: &mut T,
2162 old_range_count: usize,
2163 cx: &mut ModelContext<Self>,
2164 ) where
2165 T: rand::Rng,
2166 {
2167 let mut edits: Vec<(Range<usize>, String)> = Vec::new();
2168 let mut last_end = None;
2169 for _ in 0..old_range_count {
2170 if last_end.map_or(false, |last_end| last_end >= self.len()) {
2171 break;
2172 }
2173
2174 let new_start = last_end.map_or(0, |last_end| last_end + 1);
2175 let mut range = self.random_byte_range(new_start, rng);
2176 if rng.gen_bool(0.2) {
2177 mem::swap(&mut range.start, &mut range.end);
2178 }
2179 last_end = Some(range.end);
2180
2181 let new_text_len = rng.gen_range(0..10);
2182 let new_text: String = RandomCharIter::new(&mut *rng).take(new_text_len).collect();
2183
2184 edits.push((range, new_text));
2185 }
2186 log::info!("mutating buffer {} with {:?}", self.replica_id(), edits);
2187 self.edit(edits, None, cx);
2188 }
2189
2190 pub fn randomly_undo_redo(&mut self, rng: &mut impl rand::Rng, cx: &mut ModelContext<Self>) {
2191 let was_dirty = self.is_dirty();
2192 let old_version = self.version.clone();
2193
2194 let ops = self.text.randomly_undo_redo(rng);
2195 if !ops.is_empty() {
2196 for op in ops {
2197 self.send_operation(Operation::Buffer(op), cx);
2198 self.did_edit(&old_version, was_dirty, cx);
2199 }
2200 }
2201 }
2202}
2203
2204impl EventEmitter<Event> for Buffer {}
2205
2206impl Deref for Buffer {
2207 type Target = TextBuffer;
2208
2209 fn deref(&self) -> &Self::Target {
2210 &self.text
2211 }
2212}
2213
2214impl BufferSnapshot {
2215 /// Returns [`IndentSize`] for a given line that respects user settings and /// language preferences.
2216 pub fn indent_size_for_line(&self, row: u32) -> IndentSize {
2217 indent_size_for_line(self, row)
2218 }
2219 /// Returns [`IndentSize`] for a given position that respects user settings
2220 /// and language preferences.
2221 pub fn language_indent_size_at<T: ToOffset>(&self, position: T, cx: &AppContext) -> IndentSize {
2222 let settings = language_settings(self.language_at(position), self.file(), cx);
2223 if settings.hard_tabs {
2224 IndentSize::tab()
2225 } else {
2226 IndentSize::spaces(settings.tab_size.get())
2227 }
2228 }
2229
2230 /// Retrieve the suggested indent size for all of the given rows. The unit of indentation
2231 /// is passed in as `single_indent_size`.
2232 pub fn suggested_indents(
2233 &self,
2234 rows: impl Iterator<Item = u32>,
2235 single_indent_size: IndentSize,
2236 ) -> BTreeMap<u32, IndentSize> {
2237 let mut result = BTreeMap::new();
2238
2239 for row_range in contiguous_ranges(rows, 10) {
2240 let suggestions = match self.suggest_autoindents(row_range.clone()) {
2241 Some(suggestions) => suggestions,
2242 _ => break,
2243 };
2244
2245 for (row, suggestion) in row_range.zip(suggestions) {
2246 let indent_size = if let Some(suggestion) = suggestion {
2247 result
2248 .get(&suggestion.basis_row)
2249 .copied()
2250 .unwrap_or_else(|| self.indent_size_for_line(suggestion.basis_row))
2251 .with_delta(suggestion.delta, single_indent_size)
2252 } else {
2253 self.indent_size_for_line(row)
2254 };
2255
2256 result.insert(row, indent_size);
2257 }
2258 }
2259
2260 result
2261 }
2262
2263 fn suggest_autoindents(
2264 &self,
2265 row_range: Range<u32>,
2266 ) -> Option<impl Iterator<Item = Option<IndentSuggestion>> + '_> {
2267 let config = &self.language.as_ref()?.config;
2268 let prev_non_blank_row = self.prev_non_blank_row(row_range.start);
2269
2270 // Find the suggested indentation ranges based on the syntax tree.
2271 let start = Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0);
2272 let end = Point::new(row_range.end, 0);
2273 let range = (start..end).to_offset(&self.text);
2274 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
2275 Some(&grammar.indents_config.as_ref()?.query)
2276 });
2277 let indent_configs = matches
2278 .grammars()
2279 .iter()
2280 .map(|grammar| grammar.indents_config.as_ref().unwrap())
2281 .collect::<Vec<_>>();
2282
2283 let mut indent_ranges = Vec::<Range<Point>>::new();
2284 let mut outdent_positions = Vec::<Point>::new();
2285 while let Some(mat) = matches.peek() {
2286 let mut start: Option<Point> = None;
2287 let mut end: Option<Point> = None;
2288
2289 let config = &indent_configs[mat.grammar_index];
2290 for capture in mat.captures {
2291 if capture.index == config.indent_capture_ix {
2292 start.get_or_insert(Point::from_ts_point(capture.node.start_position()));
2293 end.get_or_insert(Point::from_ts_point(capture.node.end_position()));
2294 } else if Some(capture.index) == config.start_capture_ix {
2295 start = Some(Point::from_ts_point(capture.node.end_position()));
2296 } else if Some(capture.index) == config.end_capture_ix {
2297 end = Some(Point::from_ts_point(capture.node.start_position()));
2298 } else if Some(capture.index) == config.outdent_capture_ix {
2299 outdent_positions.push(Point::from_ts_point(capture.node.start_position()));
2300 }
2301 }
2302
2303 matches.advance();
2304 if let Some((start, end)) = start.zip(end) {
2305 if start.row == end.row {
2306 continue;
2307 }
2308
2309 let range = start..end;
2310 match indent_ranges.binary_search_by_key(&range.start, |r| r.start) {
2311 Err(ix) => indent_ranges.insert(ix, range),
2312 Ok(ix) => {
2313 let prev_range = &mut indent_ranges[ix];
2314 prev_range.end = prev_range.end.max(range.end);
2315 }
2316 }
2317 }
2318 }
2319
2320 let mut error_ranges = Vec::<Range<Point>>::new();
2321 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
2322 Some(&grammar.error_query)
2323 });
2324 while let Some(mat) = matches.peek() {
2325 let node = mat.captures[0].node;
2326 let start = Point::from_ts_point(node.start_position());
2327 let end = Point::from_ts_point(node.end_position());
2328 let range = start..end;
2329 let ix = match error_ranges.binary_search_by_key(&range.start, |r| r.start) {
2330 Ok(ix) | Err(ix) => ix,
2331 };
2332 let mut end_ix = ix;
2333 while let Some(existing_range) = error_ranges.get(end_ix) {
2334 if existing_range.end < end {
2335 end_ix += 1;
2336 } else {
2337 break;
2338 }
2339 }
2340 error_ranges.splice(ix..end_ix, [range]);
2341 matches.advance();
2342 }
2343
2344 outdent_positions.sort();
2345 for outdent_position in outdent_positions {
2346 // find the innermost indent range containing this outdent_position
2347 // set its end to the outdent position
2348 if let Some(range_to_truncate) = indent_ranges
2349 .iter_mut()
2350 .filter(|indent_range| indent_range.contains(&outdent_position))
2351 .last()
2352 {
2353 range_to_truncate.end = outdent_position;
2354 }
2355 }
2356
2357 // Find the suggested indentation increases and decreased based on regexes.
2358 let mut indent_change_rows = Vec::<(u32, Ordering)>::new();
2359 self.for_each_line(
2360 Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0)
2361 ..Point::new(row_range.end, 0),
2362 |row, line| {
2363 if config
2364 .decrease_indent_pattern
2365 .as_ref()
2366 .map_or(false, |regex| regex.is_match(line))
2367 {
2368 indent_change_rows.push((row, Ordering::Less));
2369 }
2370 if config
2371 .increase_indent_pattern
2372 .as_ref()
2373 .map_or(false, |regex| regex.is_match(line))
2374 {
2375 indent_change_rows.push((row + 1, Ordering::Greater));
2376 }
2377 },
2378 );
2379
2380 let mut indent_changes = indent_change_rows.into_iter().peekable();
2381 let mut prev_row = if config.auto_indent_using_last_non_empty_line {
2382 prev_non_blank_row.unwrap_or(0)
2383 } else {
2384 row_range.start.saturating_sub(1)
2385 };
2386 let mut prev_row_start = Point::new(prev_row, self.indent_size_for_line(prev_row).len);
2387 Some(row_range.map(move |row| {
2388 let row_start = Point::new(row, self.indent_size_for_line(row).len);
2389
2390 let mut indent_from_prev_row = false;
2391 let mut outdent_from_prev_row = false;
2392 let mut outdent_to_row = u32::MAX;
2393
2394 while let Some((indent_row, delta)) = indent_changes.peek() {
2395 match indent_row.cmp(&row) {
2396 Ordering::Equal => match delta {
2397 Ordering::Less => outdent_from_prev_row = true,
2398 Ordering::Greater => indent_from_prev_row = true,
2399 _ => {}
2400 },
2401
2402 Ordering::Greater => break,
2403 Ordering::Less => {}
2404 }
2405
2406 indent_changes.next();
2407 }
2408
2409 for range in &indent_ranges {
2410 if range.start.row >= row {
2411 break;
2412 }
2413 if range.start.row == prev_row && range.end > row_start {
2414 indent_from_prev_row = true;
2415 }
2416 if range.end > prev_row_start && range.end <= row_start {
2417 outdent_to_row = outdent_to_row.min(range.start.row);
2418 }
2419 }
2420
2421 let within_error = error_ranges
2422 .iter()
2423 .any(|e| e.start.row < row && e.end > row_start);
2424
2425 let suggestion = if outdent_to_row == prev_row
2426 || (outdent_from_prev_row && indent_from_prev_row)
2427 {
2428 Some(IndentSuggestion {
2429 basis_row: prev_row,
2430 delta: Ordering::Equal,
2431 within_error,
2432 })
2433 } else if indent_from_prev_row {
2434 Some(IndentSuggestion {
2435 basis_row: prev_row,
2436 delta: Ordering::Greater,
2437 within_error,
2438 })
2439 } else if outdent_to_row < prev_row {
2440 Some(IndentSuggestion {
2441 basis_row: outdent_to_row,
2442 delta: Ordering::Equal,
2443 within_error,
2444 })
2445 } else if outdent_from_prev_row {
2446 Some(IndentSuggestion {
2447 basis_row: prev_row,
2448 delta: Ordering::Less,
2449 within_error,
2450 })
2451 } else if config.auto_indent_using_last_non_empty_line || !self.is_line_blank(prev_row)
2452 {
2453 Some(IndentSuggestion {
2454 basis_row: prev_row,
2455 delta: Ordering::Equal,
2456 within_error,
2457 })
2458 } else {
2459 None
2460 };
2461
2462 prev_row = row;
2463 prev_row_start = row_start;
2464 suggestion
2465 }))
2466 }
2467
2468 fn prev_non_blank_row(&self, mut row: u32) -> Option<u32> {
2469 while row > 0 {
2470 row -= 1;
2471 if !self.is_line_blank(row) {
2472 return Some(row);
2473 }
2474 }
2475 None
2476 }
2477
2478 /// Iterates over chunks of text in the given range of the buffer. Text is chunked
2479 /// in an arbitrary way due to being stored in a [`Rope`](text::Rope). The text is also
2480 /// returned in chunks where each chunk has a single syntax highlighting style and
2481 /// diagnostic status.
2482 pub fn chunks<T: ToOffset>(&self, range: Range<T>, language_aware: bool) -> BufferChunks {
2483 let range = range.start.to_offset(self)..range.end.to_offset(self);
2484
2485 let mut syntax = None;
2486 let mut diagnostic_endpoints = Vec::new();
2487 if language_aware {
2488 let captures = self.syntax.captures(range.clone(), &self.text, |grammar| {
2489 grammar.highlights_query.as_ref()
2490 });
2491 let highlight_maps = captures
2492 .grammars()
2493 .into_iter()
2494 .map(|grammar| grammar.highlight_map())
2495 .collect();
2496 syntax = Some((captures, highlight_maps));
2497 for entry in self.diagnostics_in_range::<_, usize>(range.clone(), false) {
2498 diagnostic_endpoints.push(DiagnosticEndpoint {
2499 offset: entry.range.start,
2500 is_start: true,
2501 severity: entry.diagnostic.severity,
2502 is_unnecessary: entry.diagnostic.is_unnecessary,
2503 });
2504 diagnostic_endpoints.push(DiagnosticEndpoint {
2505 offset: entry.range.end,
2506 is_start: false,
2507 severity: entry.diagnostic.severity,
2508 is_unnecessary: entry.diagnostic.is_unnecessary,
2509 });
2510 }
2511 diagnostic_endpoints
2512 .sort_unstable_by_key(|endpoint| (endpoint.offset, !endpoint.is_start));
2513 }
2514
2515 BufferChunks::new(self.text.as_rope(), range, syntax, diagnostic_endpoints)
2516 }
2517
2518 /// Invokes the given callback for each line of text in the given range of the buffer.
2519 /// Uses callback to avoid allocating a string for each line.
2520 fn for_each_line(&self, range: Range<Point>, mut callback: impl FnMut(u32, &str)) {
2521 let mut line = String::new();
2522 let mut row = range.start.row;
2523 for chunk in self
2524 .as_rope()
2525 .chunks_in_range(range.to_offset(self))
2526 .chain(["\n"])
2527 {
2528 for (newline_ix, text) in chunk.split('\n').enumerate() {
2529 if newline_ix > 0 {
2530 callback(row, &line);
2531 row += 1;
2532 line.clear();
2533 }
2534 line.push_str(text);
2535 }
2536 }
2537 }
2538
2539 /// Iterates over every [`SyntaxLayer`] in the buffer.
2540 pub fn syntax_layers(&self) -> impl Iterator<Item = SyntaxLayer> + '_ {
2541 self.syntax.layers_for_range(0..self.len(), &self.text)
2542 }
2543
2544 pub fn syntax_layer_at<D: ToOffset>(&self, position: D) -> Option<SyntaxLayer> {
2545 let offset = position.to_offset(self);
2546 self.syntax
2547 .layers_for_range(offset..offset, &self.text)
2548 .filter(|l| l.node().end_byte() > offset)
2549 .last()
2550 }
2551
2552 /// Returns the main [Language]
2553 pub fn language(&self) -> Option<&Arc<Language>> {
2554 self.language.as_ref()
2555 }
2556
2557 /// Returns the [Language] at the given location.
2558 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<&Arc<Language>> {
2559 self.syntax_layer_at(position)
2560 .map(|info| info.language)
2561 .or(self.language.as_ref())
2562 }
2563
2564 /// Returns the settings for the language at the given location.
2565 pub fn settings_at<'a, D: ToOffset>(
2566 &self,
2567 position: D,
2568 cx: &'a AppContext,
2569 ) -> &'a LanguageSettings {
2570 language_settings(self.language_at(position), self.file.as_ref(), cx)
2571 }
2572
2573 /// Returns the [LanguageScope] at the given location.
2574 pub fn language_scope_at<D: ToOffset>(&self, position: D) -> Option<LanguageScope> {
2575 let offset = position.to_offset(self);
2576 let mut scope = None;
2577 let mut smallest_range: Option<Range<usize>> = None;
2578
2579 // Use the layer that has the smallest node intersecting the given point.
2580 for layer in self.syntax.layers_for_range(offset..offset, &self.text) {
2581 let mut cursor = layer.node().walk();
2582
2583 let mut range = None;
2584 loop {
2585 let child_range = cursor.node().byte_range();
2586 if !child_range.to_inclusive().contains(&offset) {
2587 break;
2588 }
2589
2590 range = Some(child_range);
2591 if cursor.goto_first_child_for_byte(offset).is_none() {
2592 break;
2593 }
2594 }
2595
2596 if let Some(range) = range {
2597 if smallest_range
2598 .as_ref()
2599 .map_or(true, |smallest_range| range.len() < smallest_range.len())
2600 {
2601 smallest_range = Some(range);
2602 scope = Some(LanguageScope {
2603 language: layer.language.clone(),
2604 override_id: layer.override_id(offset, &self.text),
2605 });
2606 }
2607 }
2608 }
2609
2610 scope.or_else(|| {
2611 self.language.clone().map(|language| LanguageScope {
2612 language,
2613 override_id: None,
2614 })
2615 })
2616 }
2617
2618 /// Returns a tuple of the range and character kind of the word
2619 /// surrounding the given position.
2620 pub fn surrounding_word<T: ToOffset>(&self, start: T) -> (Range<usize>, Option<CharKind>) {
2621 let mut start = start.to_offset(self);
2622 let mut end = start;
2623 let mut next_chars = self.chars_at(start).peekable();
2624 let mut prev_chars = self.reversed_chars_at(start).peekable();
2625
2626 let scope = self.language_scope_at(start);
2627 let kind = |c| char_kind(&scope, c);
2628 let word_kind = cmp::max(
2629 prev_chars.peek().copied().map(kind),
2630 next_chars.peek().copied().map(kind),
2631 );
2632
2633 for ch in prev_chars {
2634 if Some(kind(ch)) == word_kind && ch != '\n' {
2635 start -= ch.len_utf8();
2636 } else {
2637 break;
2638 }
2639 }
2640
2641 for ch in next_chars {
2642 if Some(kind(ch)) == word_kind && ch != '\n' {
2643 end += ch.len_utf8();
2644 } else {
2645 break;
2646 }
2647 }
2648
2649 (start..end, word_kind)
2650 }
2651
2652 /// Returns the range for the closes syntax node enclosing the given range.
2653 pub fn range_for_syntax_ancestor<T: ToOffset>(&self, range: Range<T>) -> Option<Range<usize>> {
2654 let range = range.start.to_offset(self)..range.end.to_offset(self);
2655 let mut result: Option<Range<usize>> = None;
2656 'outer: for layer in self.syntax.layers_for_range(range.clone(), &self.text) {
2657 let mut cursor = layer.node().walk();
2658
2659 // Descend to the first leaf that touches the start of the range,
2660 // and if the range is non-empty, extends beyond the start.
2661 while cursor.goto_first_child_for_byte(range.start).is_some() {
2662 if !range.is_empty() && cursor.node().end_byte() == range.start {
2663 cursor.goto_next_sibling();
2664 }
2665 }
2666
2667 // Ascend to the smallest ancestor that strictly contains the range.
2668 loop {
2669 let node_range = cursor.node().byte_range();
2670 if node_range.start <= range.start
2671 && node_range.end >= range.end
2672 && node_range.len() > range.len()
2673 {
2674 break;
2675 }
2676 if !cursor.goto_parent() {
2677 continue 'outer;
2678 }
2679 }
2680
2681 let left_node = cursor.node();
2682 let mut layer_result = left_node.byte_range();
2683
2684 // For an empty range, try to find another node immediately to the right of the range.
2685 if left_node.end_byte() == range.start {
2686 let mut right_node = None;
2687 while !cursor.goto_next_sibling() {
2688 if !cursor.goto_parent() {
2689 break;
2690 }
2691 }
2692
2693 while cursor.node().start_byte() == range.start {
2694 right_node = Some(cursor.node());
2695 if !cursor.goto_first_child() {
2696 break;
2697 }
2698 }
2699
2700 // If there is a candidate node on both sides of the (empty) range, then
2701 // decide between the two by favoring a named node over an anonymous token.
2702 // If both nodes are the same in that regard, favor the right one.
2703 if let Some(right_node) = right_node {
2704 if right_node.is_named() || !left_node.is_named() {
2705 layer_result = right_node.byte_range();
2706 }
2707 }
2708 }
2709
2710 if let Some(previous_result) = &result {
2711 if previous_result.len() < layer_result.len() {
2712 continue;
2713 }
2714 }
2715 result = Some(layer_result);
2716 }
2717
2718 result
2719 }
2720
2721 /// Returns the outline for the buffer.
2722 ///
2723 /// This method allows passing an optional [SyntaxTheme] to
2724 /// syntax-highlight the returned symbols.
2725 pub fn outline(&self, theme: Option<&SyntaxTheme>) -> Option<Outline<Anchor>> {
2726 self.outline_items_containing(0..self.len(), true, theme)
2727 .map(Outline::new)
2728 }
2729
2730 /// Returns all the symbols that contain the given position.
2731 ///
2732 /// This method allows passing an optional [SyntaxTheme] to
2733 /// syntax-highlight the returned symbols.
2734 pub fn symbols_containing<T: ToOffset>(
2735 &self,
2736 position: T,
2737 theme: Option<&SyntaxTheme>,
2738 ) -> Option<Vec<OutlineItem<Anchor>>> {
2739 let position = position.to_offset(self);
2740 let mut items = self.outline_items_containing(
2741 position.saturating_sub(1)..self.len().min(position + 1),
2742 false,
2743 theme,
2744 )?;
2745 let mut prev_depth = None;
2746 items.retain(|item| {
2747 let result = prev_depth.map_or(true, |prev_depth| item.depth > prev_depth);
2748 prev_depth = Some(item.depth);
2749 result
2750 });
2751 Some(items)
2752 }
2753
2754 pub fn outline_items_containing<T: ToOffset>(
2755 &self,
2756 range: Range<T>,
2757 include_extra_context: bool,
2758 theme: Option<&SyntaxTheme>,
2759 ) -> Option<Vec<OutlineItem<Anchor>>> {
2760 let range = range.to_offset(self);
2761 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
2762 grammar.outline_config.as_ref().map(|c| &c.query)
2763 });
2764 let configs = matches
2765 .grammars()
2766 .iter()
2767 .map(|g| g.outline_config.as_ref().unwrap())
2768 .collect::<Vec<_>>();
2769
2770 let mut items = Vec::new();
2771 while let Some(mat) = matches.peek() {
2772 let config = &configs[mat.grammar_index];
2773 let item_node = mat.captures.iter().find_map(|cap| {
2774 if cap.index == config.item_capture_ix {
2775 Some(cap.node)
2776 } else {
2777 None
2778 }
2779 })?;
2780
2781 let item_range = item_node.byte_range();
2782 if item_range.end < range.start || item_range.start > range.end {
2783 matches.advance();
2784 continue;
2785 }
2786
2787 let mut open_index = None;
2788 let mut close_index = None;
2789
2790 let mut buffer_ranges = Vec::new();
2791 for capture in mat.captures {
2792 let node_is_name;
2793 if capture.index == config.name_capture_ix {
2794 node_is_name = true;
2795 } else if Some(capture.index) == config.context_capture_ix
2796 || (Some(capture.index) == config.extra_context_capture_ix
2797 && include_extra_context)
2798 {
2799 node_is_name = false;
2800 } else {
2801 if Some(capture.index) == config.open_capture_ix {
2802 open_index = Some(capture.node.end_byte());
2803 } else if Some(capture.index) == config.close_capture_ix {
2804 close_index = Some(capture.node.start_byte());
2805 }
2806
2807 continue;
2808 }
2809
2810 let mut range = capture.node.start_byte()..capture.node.end_byte();
2811 let start = capture.node.start_position();
2812 if capture.node.end_position().row > start.row {
2813 range.end =
2814 range.start + self.line_len(start.row as u32) as usize - start.column;
2815 }
2816
2817 if !range.is_empty() {
2818 buffer_ranges.push((range, node_is_name));
2819 }
2820 }
2821
2822 if buffer_ranges.is_empty() {
2823 matches.advance();
2824 continue;
2825 }
2826
2827 let mut text = String::new();
2828 let mut highlight_ranges = Vec::new();
2829 let mut name_ranges = Vec::new();
2830 let mut chunks = self.chunks(
2831 buffer_ranges.first().unwrap().0.start..buffer_ranges.last().unwrap().0.end,
2832 true,
2833 );
2834 let mut last_buffer_range_end = 0;
2835 for (buffer_range, is_name) in buffer_ranges {
2836 if !text.is_empty() && buffer_range.start > last_buffer_range_end {
2837 text.push(' ');
2838 }
2839 last_buffer_range_end = buffer_range.end;
2840 if is_name {
2841 let mut start = text.len();
2842 let end = start + buffer_range.len();
2843
2844 // When multiple names are captured, then the matcheable text
2845 // includes the whitespace in between the names.
2846 if !name_ranges.is_empty() {
2847 start -= 1;
2848 }
2849
2850 name_ranges.push(start..end);
2851 }
2852
2853 let mut offset = buffer_range.start;
2854 chunks.seek(offset);
2855 for mut chunk in chunks.by_ref() {
2856 if chunk.text.len() > buffer_range.end - offset {
2857 chunk.text = &chunk.text[0..(buffer_range.end - offset)];
2858 offset = buffer_range.end;
2859 } else {
2860 offset += chunk.text.len();
2861 }
2862 let style = chunk
2863 .syntax_highlight_id
2864 .zip(theme)
2865 .and_then(|(highlight, theme)| highlight.style(theme));
2866 if let Some(style) = style {
2867 let start = text.len();
2868 let end = start + chunk.text.len();
2869 highlight_ranges.push((start..end, style));
2870 }
2871 text.push_str(chunk.text);
2872 if offset >= buffer_range.end {
2873 break;
2874 }
2875 }
2876 }
2877
2878 matches.advance();
2879
2880 items.push(OutlineItem {
2881 depth: 0, // We'll calculate the depth later
2882 range: item_range,
2883 text,
2884 highlight_ranges,
2885 name_ranges,
2886 body_range: open_index.zip(close_index).map(|(start, end)| start..end),
2887 });
2888 }
2889
2890 items.sort_by_key(|item| (item.range.start, Reverse(item.range.end)));
2891
2892 // Assign depths based on containment relationships and convert to anchors.
2893 let mut item_ends_stack = Vec::<usize>::new();
2894 let mut anchor_items = Vec::new();
2895 for item in items {
2896 while let Some(last_end) = item_ends_stack.last().copied() {
2897 if last_end < item.range.end {
2898 item_ends_stack.pop();
2899 } else {
2900 break;
2901 }
2902 }
2903
2904 anchor_items.push(OutlineItem {
2905 depth: item_ends_stack.len(),
2906 range: self.anchor_after(item.range.start)..self.anchor_before(item.range.end),
2907 text: item.text,
2908 highlight_ranges: item.highlight_ranges,
2909 name_ranges: item.name_ranges,
2910 body_range: item.body_range.map(|body_range| {
2911 self.anchor_after(body_range.start)..self.anchor_before(body_range.end)
2912 }),
2913 });
2914 item_ends_stack.push(item.range.end);
2915 }
2916
2917 Some(anchor_items)
2918 }
2919
2920 /// For each grammar in the language, runs the provided
2921 /// [tree_sitter::Query] against the given range.
2922 pub fn matches(
2923 &self,
2924 range: Range<usize>,
2925 query: fn(&Grammar) -> Option<&tree_sitter::Query>,
2926 ) -> SyntaxMapMatches {
2927 self.syntax.matches(range, self, query)
2928 }
2929
2930 /// Returns bracket range pairs overlapping or adjacent to `range`
2931 pub fn bracket_ranges<T: ToOffset>(
2932 &self,
2933 range: Range<T>,
2934 ) -> impl Iterator<Item = (Range<usize>, Range<usize>)> + '_ {
2935 // Find bracket pairs that *inclusively* contain the given range.
2936 let range = range.start.to_offset(self).saturating_sub(1)
2937 ..self.len().min(range.end.to_offset(self) + 1);
2938
2939 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
2940 grammar.brackets_config.as_ref().map(|c| &c.query)
2941 });
2942 let configs = matches
2943 .grammars()
2944 .iter()
2945 .map(|grammar| grammar.brackets_config.as_ref().unwrap())
2946 .collect::<Vec<_>>();
2947
2948 iter::from_fn(move || {
2949 while let Some(mat) = matches.peek() {
2950 let mut open = None;
2951 let mut close = None;
2952 let config = &configs[mat.grammar_index];
2953 for capture in mat.captures {
2954 if capture.index == config.open_capture_ix {
2955 open = Some(capture.node.byte_range());
2956 } else if capture.index == config.close_capture_ix {
2957 close = Some(capture.node.byte_range());
2958 }
2959 }
2960
2961 matches.advance();
2962
2963 let Some((open, close)) = open.zip(close) else {
2964 continue;
2965 };
2966
2967 let bracket_range = open.start..=close.end;
2968 if !bracket_range.overlaps(&range) {
2969 continue;
2970 }
2971
2972 return Some((open, close));
2973 }
2974 None
2975 })
2976 }
2977
2978 /// Returns enclosing bracket ranges containing the given range
2979 pub fn enclosing_bracket_ranges<T: ToOffset>(
2980 &self,
2981 range: Range<T>,
2982 ) -> impl Iterator<Item = (Range<usize>, Range<usize>)> + '_ {
2983 let range = range.start.to_offset(self)..range.end.to_offset(self);
2984
2985 self.bracket_ranges(range.clone())
2986 .filter(move |(open, close)| open.start <= range.start && close.end >= range.end)
2987 }
2988
2989 /// Returns the smallest enclosing bracket ranges containing the given range or None if no brackets contain range
2990 ///
2991 /// Can optionally pass a range_filter to filter the ranges of brackets to consider
2992 pub fn innermost_enclosing_bracket_ranges<T: ToOffset>(
2993 &self,
2994 range: Range<T>,
2995 range_filter: Option<&dyn Fn(Range<usize>, Range<usize>) -> bool>,
2996 ) -> Option<(Range<usize>, Range<usize>)> {
2997 let range = range.start.to_offset(self)..range.end.to_offset(self);
2998
2999 // Get the ranges of the innermost pair of brackets.
3000 let mut result: Option<(Range<usize>, Range<usize>)> = None;
3001
3002 for (open, close) in self.enclosing_bracket_ranges(range.clone()) {
3003 if let Some(range_filter) = range_filter {
3004 if !range_filter(open.clone(), close.clone()) {
3005 continue;
3006 }
3007 }
3008
3009 let len = close.end - open.start;
3010
3011 if let Some((existing_open, existing_close)) = &result {
3012 let existing_len = existing_close.end - existing_open.start;
3013 if len > existing_len {
3014 continue;
3015 }
3016 }
3017
3018 result = Some((open, close));
3019 }
3020
3021 result
3022 }
3023
3024 /// Returns anchor ranges for any matches of the redaction query.
3025 /// The buffer can be associated with multiple languages, and the redaction query associated with each
3026 /// will be run on the relevant section of the buffer.
3027 pub fn redacted_ranges<T: ToOffset>(
3028 &self,
3029 range: Range<T>,
3030 ) -> impl Iterator<Item = Range<usize>> + '_ {
3031 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
3032 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
3033 grammar
3034 .redactions_config
3035 .as_ref()
3036 .map(|config| &config.query)
3037 });
3038
3039 let configs = syntax_matches
3040 .grammars()
3041 .iter()
3042 .map(|grammar| grammar.redactions_config.as_ref())
3043 .collect::<Vec<_>>();
3044
3045 iter::from_fn(move || {
3046 let redacted_range = syntax_matches
3047 .peek()
3048 .and_then(|mat| {
3049 configs[mat.grammar_index].and_then(|config| {
3050 mat.captures
3051 .iter()
3052 .find(|capture| capture.index == config.redaction_capture_ix)
3053 })
3054 })
3055 .map(|mat| mat.node.byte_range());
3056 syntax_matches.advance();
3057 redacted_range
3058 })
3059 }
3060
3061 pub fn runnable_ranges(
3062 &self,
3063 range: Range<Anchor>,
3064 ) -> impl Iterator<Item = RunnableRange> + '_ {
3065 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
3066
3067 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
3068 grammar.runnable_config.as_ref().map(|config| &config.query)
3069 });
3070
3071 let test_configs = syntax_matches
3072 .grammars()
3073 .iter()
3074 .map(|grammar| grammar.runnable_config.as_ref())
3075 .collect::<Vec<_>>();
3076
3077 iter::from_fn(move || loop {
3078 let mat = syntax_matches.peek()?;
3079
3080 let test_range = test_configs[mat.grammar_index].and_then(|test_configs| {
3081 let mut run_range = None;
3082 let full_range = mat.captures.iter().fold(
3083 Range {
3084 start: usize::MAX,
3085 end: 0,
3086 },
3087 |mut acc, next| {
3088 let byte_range = next.node.byte_range();
3089 if acc.start > byte_range.start {
3090 acc.start = byte_range.start;
3091 }
3092 if acc.end < byte_range.end {
3093 acc.end = byte_range.end;
3094 }
3095 acc
3096 },
3097 );
3098 if full_range.start > full_range.end {
3099 // We did not find a full spanning range of this match.
3100 return None;
3101 }
3102 let extra_captures: SmallVec<[_; 1]> =
3103 SmallVec::from_iter(mat.captures.iter().filter_map(|capture| {
3104 test_configs
3105 .extra_captures
3106 .get(capture.index as usize)
3107 .cloned()
3108 .and_then(|tag_name| match tag_name {
3109 RunnableCapture::Named(name) => {
3110 Some((capture.node.byte_range(), name))
3111 }
3112 RunnableCapture::Run => {
3113 let _ = run_range.insert(capture.node.byte_range());
3114 None
3115 }
3116 })
3117 }));
3118 let run_range = run_range?;
3119 let tags = test_configs
3120 .query
3121 .property_settings(mat.pattern_index)
3122 .iter()
3123 .filter_map(|property| {
3124 if *property.key == *"tag" {
3125 property
3126 .value
3127 .as_ref()
3128 .map(|value| RunnableTag(value.to_string().into()))
3129 } else {
3130 None
3131 }
3132 })
3133 .collect();
3134 let extra_captures = extra_captures
3135 .into_iter()
3136 .map(|(range, name)| {
3137 (
3138 name.to_string(),
3139 self.text_for_range(range.clone()).collect::<String>(),
3140 )
3141 })
3142 .collect();
3143 // All tags should have the same range.
3144 Some(RunnableRange {
3145 run_range,
3146 full_range,
3147 runnable: Runnable {
3148 tags,
3149 language: mat.language,
3150 buffer: self.remote_id(),
3151 },
3152 extra_captures,
3153 buffer_id: self.remote_id(),
3154 })
3155 });
3156
3157 syntax_matches.advance();
3158 if test_range.is_some() {
3159 // It's fine for us to short-circuit on .peek()? returning None. We don't want to return None from this iter if we
3160 // had a capture that did not contain a run marker, hence we'll just loop around for the next capture.
3161 return test_range;
3162 }
3163 })
3164 }
3165
3166 pub fn indent_guides_in_range(
3167 &self,
3168 range: Range<Anchor>,
3169 ignore_disabled_for_language: bool,
3170 cx: &AppContext,
3171 ) -> Vec<IndentGuide> {
3172 let language_settings = language_settings(self.language(), self.file.as_ref(), cx);
3173 let settings = language_settings.indent_guides;
3174 if !ignore_disabled_for_language && !settings.enabled {
3175 return Vec::new();
3176 }
3177 let tab_size = language_settings.tab_size.get() as u32;
3178
3179 let start_row = range.start.to_point(self).row;
3180 let end_row = range.end.to_point(self).row;
3181 let row_range = start_row..end_row + 1;
3182
3183 let mut row_indents = self.line_indents_in_row_range(row_range.clone());
3184
3185 let mut result_vec = Vec::new();
3186 let mut indent_stack = SmallVec::<[IndentGuide; 8]>::new();
3187
3188 while let Some((first_row, mut line_indent)) = row_indents.next() {
3189 let current_depth = indent_stack.len() as u32;
3190
3191 // When encountering empty, continue until found useful line indent
3192 // then add to the indent stack with the depth found
3193 let mut found_indent = false;
3194 let mut last_row = first_row;
3195 if line_indent.is_line_empty() {
3196 let mut trailing_row = end_row;
3197 while !found_indent {
3198 let (target_row, new_line_indent) =
3199 if let Some(display_row) = row_indents.next() {
3200 display_row
3201 } else {
3202 // This means we reached the end of the given range and found empty lines at the end.
3203 // We need to traverse further until we find a non-empty line to know if we need to add
3204 // an indent guide for the last visible indent.
3205 trailing_row += 1;
3206
3207 const TRAILING_ROW_SEARCH_LIMIT: u32 = 25;
3208 if trailing_row > self.max_point().row
3209 || trailing_row > end_row + TRAILING_ROW_SEARCH_LIMIT
3210 {
3211 break;
3212 }
3213 let new_line_indent = self.line_indent_for_row(trailing_row);
3214 (trailing_row, new_line_indent)
3215 };
3216
3217 if new_line_indent.is_line_empty() {
3218 continue;
3219 }
3220 last_row = target_row.min(end_row);
3221 line_indent = new_line_indent;
3222 found_indent = true;
3223 break;
3224 }
3225 } else {
3226 found_indent = true
3227 }
3228
3229 let depth = if found_indent {
3230 line_indent.len(tab_size) / tab_size
3231 + ((line_indent.len(tab_size) % tab_size) > 0) as u32
3232 } else {
3233 current_depth
3234 };
3235
3236 if depth < current_depth {
3237 for _ in 0..(current_depth - depth) {
3238 let mut indent = indent_stack.pop().unwrap();
3239 if last_row != first_row {
3240 // In this case, we landed on an empty row, had to seek forward,
3241 // and discovered that the indent we where on is ending.
3242 // This means that the last display row must
3243 // be on line that ends this indent range, so we
3244 // should display the range up to the first non-empty line
3245 indent.end_row = first_row.saturating_sub(1);
3246 }
3247
3248 result_vec.push(indent)
3249 }
3250 } else if depth > current_depth {
3251 for next_depth in current_depth..depth {
3252 indent_stack.push(IndentGuide {
3253 buffer_id: self.remote_id(),
3254 start_row: first_row,
3255 end_row: last_row,
3256 depth: next_depth,
3257 tab_size,
3258 settings,
3259 });
3260 }
3261 }
3262
3263 for indent in indent_stack.iter_mut() {
3264 indent.end_row = last_row;
3265 }
3266 }
3267
3268 result_vec.extend(indent_stack);
3269
3270 result_vec
3271 }
3272
3273 pub async fn enclosing_indent(
3274 &self,
3275 mut buffer_row: BufferRow,
3276 ) -> Option<(Range<BufferRow>, LineIndent)> {
3277 let max_row = self.max_point().row;
3278 if buffer_row >= max_row {
3279 return None;
3280 }
3281
3282 let mut target_indent = self.line_indent_for_row(buffer_row);
3283
3284 // If the current row is at the start of an indented block, we want to return this
3285 // block as the enclosing indent.
3286 if !target_indent.is_line_empty() && buffer_row < max_row {
3287 let next_line_indent = self.line_indent_for_row(buffer_row + 1);
3288 if !next_line_indent.is_line_empty()
3289 && target_indent.raw_len() < next_line_indent.raw_len()
3290 {
3291 target_indent = next_line_indent;
3292 buffer_row += 1;
3293 }
3294 }
3295
3296 const SEARCH_ROW_LIMIT: u32 = 25000;
3297 const SEARCH_WHITESPACE_ROW_LIMIT: u32 = 2500;
3298 const YIELD_INTERVAL: u32 = 100;
3299
3300 let mut accessed_row_counter = 0;
3301
3302 // If there is a blank line at the current row, search for the next non indented lines
3303 if target_indent.is_line_empty() {
3304 let start = buffer_row.saturating_sub(SEARCH_WHITESPACE_ROW_LIMIT);
3305 let end = (max_row + 1).min(buffer_row + SEARCH_WHITESPACE_ROW_LIMIT);
3306
3307 let mut non_empty_line_above = None;
3308 for (row, indent) in self
3309 .text
3310 .reversed_line_indents_in_row_range(start..buffer_row)
3311 {
3312 accessed_row_counter += 1;
3313 if accessed_row_counter == YIELD_INTERVAL {
3314 accessed_row_counter = 0;
3315 yield_now().await;
3316 }
3317 if !indent.is_line_empty() {
3318 non_empty_line_above = Some((row, indent));
3319 break;
3320 }
3321 }
3322
3323 let mut non_empty_line_below = None;
3324 for (row, indent) in self.text.line_indents_in_row_range((buffer_row + 1)..end) {
3325 accessed_row_counter += 1;
3326 if accessed_row_counter == YIELD_INTERVAL {
3327 accessed_row_counter = 0;
3328 yield_now().await;
3329 }
3330 if !indent.is_line_empty() {
3331 non_empty_line_below = Some((row, indent));
3332 break;
3333 }
3334 }
3335
3336 let (row, indent) = match (non_empty_line_above, non_empty_line_below) {
3337 (Some((above_row, above_indent)), Some((below_row, below_indent))) => {
3338 if above_indent.raw_len() >= below_indent.raw_len() {
3339 (above_row, above_indent)
3340 } else {
3341 (below_row, below_indent)
3342 }
3343 }
3344 (Some(above), None) => above,
3345 (None, Some(below)) => below,
3346 _ => return None,
3347 };
3348
3349 target_indent = indent;
3350 buffer_row = row;
3351 }
3352
3353 let start = buffer_row.saturating_sub(SEARCH_ROW_LIMIT);
3354 let end = (max_row + 1).min(buffer_row + SEARCH_ROW_LIMIT);
3355
3356 let mut start_indent = None;
3357 for (row, indent) in self
3358 .text
3359 .reversed_line_indents_in_row_range(start..buffer_row)
3360 {
3361 accessed_row_counter += 1;
3362 if accessed_row_counter == YIELD_INTERVAL {
3363 accessed_row_counter = 0;
3364 yield_now().await;
3365 }
3366 if !indent.is_line_empty() && indent.raw_len() < target_indent.raw_len() {
3367 start_indent = Some((row, indent));
3368 break;
3369 }
3370 }
3371 let (start_row, start_indent_size) = start_indent?;
3372
3373 let mut end_indent = (end, None);
3374 for (row, indent) in self.text.line_indents_in_row_range((buffer_row + 1)..end) {
3375 accessed_row_counter += 1;
3376 if accessed_row_counter == YIELD_INTERVAL {
3377 accessed_row_counter = 0;
3378 yield_now().await;
3379 }
3380 if !indent.is_line_empty() && indent.raw_len() < target_indent.raw_len() {
3381 end_indent = (row.saturating_sub(1), Some(indent));
3382 break;
3383 }
3384 }
3385 let (end_row, end_indent_size) = end_indent;
3386
3387 let indent = if let Some(end_indent_size) = end_indent_size {
3388 if start_indent_size.raw_len() > end_indent_size.raw_len() {
3389 start_indent_size
3390 } else {
3391 end_indent_size
3392 }
3393 } else {
3394 start_indent_size
3395 };
3396
3397 Some((start_row..end_row, indent))
3398 }
3399
3400 /// Returns selections for remote peers intersecting the given range.
3401 #[allow(clippy::type_complexity)]
3402 pub fn selections_in_range(
3403 &self,
3404 range: Range<Anchor>,
3405 include_local: bool,
3406 ) -> impl Iterator<
3407 Item = (
3408 ReplicaId,
3409 bool,
3410 CursorShape,
3411 impl Iterator<Item = &Selection<Anchor>> + '_,
3412 ),
3413 > + '_ {
3414 self.remote_selections
3415 .iter()
3416 .filter(move |(replica_id, set)| {
3417 (include_local || **replica_id != self.text.replica_id())
3418 && !set.selections.is_empty()
3419 })
3420 .map(move |(replica_id, set)| {
3421 let start_ix = match set.selections.binary_search_by(|probe| {
3422 probe.end.cmp(&range.start, self).then(Ordering::Greater)
3423 }) {
3424 Ok(ix) | Err(ix) => ix,
3425 };
3426 let end_ix = match set.selections.binary_search_by(|probe| {
3427 probe.start.cmp(&range.end, self).then(Ordering::Less)
3428 }) {
3429 Ok(ix) | Err(ix) => ix,
3430 };
3431
3432 (
3433 *replica_id,
3434 set.line_mode,
3435 set.cursor_shape,
3436 set.selections[start_ix..end_ix].iter(),
3437 )
3438 })
3439 }
3440
3441 /// Whether the buffer contains any git changes.
3442 pub fn has_git_diff(&self) -> bool {
3443 !self.git_diff.is_empty()
3444 }
3445
3446 /// Returns all the Git diff hunks intersecting the given
3447 /// row range.
3448 pub fn git_diff_hunks_in_row_range(
3449 &self,
3450 range: Range<BufferRow>,
3451 ) -> impl '_ + Iterator<Item = git::diff::DiffHunk<u32>> {
3452 self.git_diff.hunks_in_row_range(range, self)
3453 }
3454
3455 /// Returns all the Git diff hunks intersecting the given
3456 /// range.
3457 pub fn git_diff_hunks_intersecting_range(
3458 &self,
3459 range: Range<Anchor>,
3460 ) -> impl '_ + Iterator<Item = git::diff::DiffHunk<u32>> {
3461 self.git_diff.hunks_intersecting_range(range, self)
3462 }
3463
3464 /// Returns all the Git diff hunks intersecting the given
3465 /// range, in reverse order.
3466 pub fn git_diff_hunks_intersecting_range_rev(
3467 &self,
3468 range: Range<Anchor>,
3469 ) -> impl '_ + Iterator<Item = git::diff::DiffHunk<u32>> {
3470 self.git_diff.hunks_intersecting_range_rev(range, self)
3471 }
3472
3473 /// Returns if the buffer contains any diagnostics.
3474 pub fn has_diagnostics(&self) -> bool {
3475 !self.diagnostics.is_empty()
3476 }
3477
3478 /// Returns all the diagnostics intersecting the given range.
3479 pub fn diagnostics_in_range<'a, T, O>(
3480 &'a self,
3481 search_range: Range<T>,
3482 reversed: bool,
3483 ) -> impl 'a + Iterator<Item = DiagnosticEntry<O>>
3484 where
3485 T: 'a + Clone + ToOffset,
3486 O: 'a + FromAnchor + Ord,
3487 {
3488 let mut iterators: Vec<_> = self
3489 .diagnostics
3490 .iter()
3491 .map(|(_, collection)| {
3492 collection
3493 .range::<T, O>(search_range.clone(), self, true, reversed)
3494 .peekable()
3495 })
3496 .collect();
3497
3498 std::iter::from_fn(move || {
3499 let (next_ix, _) = iterators
3500 .iter_mut()
3501 .enumerate()
3502 .flat_map(|(ix, iter)| Some((ix, iter.peek()?)))
3503 .min_by(|(_, a), (_, b)| {
3504 let cmp = a
3505 .range
3506 .start
3507 .cmp(&b.range.start)
3508 // when range is equal, sort by diagnostic severity
3509 .then(a.diagnostic.severity.cmp(&b.diagnostic.severity))
3510 // and stabilize order with group_id
3511 .then(a.diagnostic.group_id.cmp(&b.diagnostic.group_id));
3512 if reversed {
3513 cmp.reverse()
3514 } else {
3515 cmp
3516 }
3517 })?;
3518 iterators[next_ix].next()
3519 })
3520 }
3521
3522 /// Returns all the diagnostic groups associated with the given
3523 /// language server id. If no language server id is provided,
3524 /// all diagnostics groups are returned.
3525 pub fn diagnostic_groups(
3526 &self,
3527 language_server_id: Option<LanguageServerId>,
3528 ) -> Vec<(LanguageServerId, DiagnosticGroup<Anchor>)> {
3529 let mut groups = Vec::new();
3530
3531 if let Some(language_server_id) = language_server_id {
3532 if let Ok(ix) = self
3533 .diagnostics
3534 .binary_search_by_key(&language_server_id, |e| e.0)
3535 {
3536 self.diagnostics[ix]
3537 .1
3538 .groups(language_server_id, &mut groups, self);
3539 }
3540 } else {
3541 for (language_server_id, diagnostics) in self.diagnostics.iter() {
3542 diagnostics.groups(*language_server_id, &mut groups, self);
3543 }
3544 }
3545
3546 groups.sort_by(|(id_a, group_a), (id_b, group_b)| {
3547 let a_start = &group_a.entries[group_a.primary_ix].range.start;
3548 let b_start = &group_b.entries[group_b.primary_ix].range.start;
3549 a_start.cmp(b_start, self).then_with(|| id_a.cmp(id_b))
3550 });
3551
3552 groups
3553 }
3554
3555 /// Returns an iterator over the diagnostics for the given group.
3556 pub fn diagnostic_group<'a, O>(
3557 &'a self,
3558 group_id: usize,
3559 ) -> impl 'a + Iterator<Item = DiagnosticEntry<O>>
3560 where
3561 O: 'a + FromAnchor,
3562 {
3563 self.diagnostics
3564 .iter()
3565 .flat_map(move |(_, set)| set.group(group_id, self))
3566 }
3567
3568 /// An integer version number that accounts for all updates besides
3569 /// the buffer's text itself (which is versioned via a version vector).
3570 pub fn non_text_state_update_count(&self) -> usize {
3571 self.non_text_state_update_count
3572 }
3573
3574 /// Returns a snapshot of underlying file.
3575 pub fn file(&self) -> Option<&Arc<dyn File>> {
3576 self.file.as_ref()
3577 }
3578
3579 /// Resolves the file path (relative to the worktree root) associated with the underlying file.
3580 pub fn resolve_file_path(&self, cx: &AppContext, include_root: bool) -> Option<PathBuf> {
3581 if let Some(file) = self.file() {
3582 if file.path().file_name().is_none() || include_root {
3583 Some(file.full_path(cx))
3584 } else {
3585 Some(file.path().to_path_buf())
3586 }
3587 } else {
3588 None
3589 }
3590 }
3591}
3592
3593fn indent_size_for_line(text: &text::BufferSnapshot, row: u32) -> IndentSize {
3594 indent_size_for_text(text.chars_at(Point::new(row, 0)))
3595}
3596
3597fn indent_size_for_text(text: impl Iterator<Item = char>) -> IndentSize {
3598 let mut result = IndentSize::spaces(0);
3599 for c in text {
3600 let kind = match c {
3601 ' ' => IndentKind::Space,
3602 '\t' => IndentKind::Tab,
3603 _ => break,
3604 };
3605 if result.len == 0 {
3606 result.kind = kind;
3607 }
3608 result.len += 1;
3609 }
3610 result
3611}
3612
3613impl Clone for BufferSnapshot {
3614 fn clone(&self) -> Self {
3615 Self {
3616 text: self.text.clone(),
3617 git_diff: self.git_diff.clone(),
3618 syntax: self.syntax.clone(),
3619 file: self.file.clone(),
3620 remote_selections: self.remote_selections.clone(),
3621 diagnostics: self.diagnostics.clone(),
3622 language: self.language.clone(),
3623 non_text_state_update_count: self.non_text_state_update_count,
3624 }
3625 }
3626}
3627
3628impl Deref for BufferSnapshot {
3629 type Target = text::BufferSnapshot;
3630
3631 fn deref(&self) -> &Self::Target {
3632 &self.text
3633 }
3634}
3635
3636unsafe impl<'a> Send for BufferChunks<'a> {}
3637
3638impl<'a> BufferChunks<'a> {
3639 pub(crate) fn new(
3640 text: &'a Rope,
3641 range: Range<usize>,
3642 syntax: Option<(SyntaxMapCaptures<'a>, Vec<HighlightMap>)>,
3643 diagnostic_endpoints: Vec<DiagnosticEndpoint>,
3644 ) -> Self {
3645 let mut highlights = None;
3646 if let Some((captures, highlight_maps)) = syntax {
3647 highlights = Some(BufferChunkHighlights {
3648 captures,
3649 next_capture: None,
3650 stack: Default::default(),
3651 highlight_maps,
3652 })
3653 }
3654
3655 let diagnostic_endpoints = diagnostic_endpoints.into_iter().peekable();
3656 let chunks = text.chunks_in_range(range.clone());
3657
3658 BufferChunks {
3659 range,
3660 chunks,
3661 diagnostic_endpoints,
3662 error_depth: 0,
3663 warning_depth: 0,
3664 information_depth: 0,
3665 hint_depth: 0,
3666 unnecessary_depth: 0,
3667 highlights,
3668 }
3669 }
3670
3671 /// Seeks to the given byte offset in the buffer.
3672 pub fn seek(&mut self, offset: usize) {
3673 self.range.start = offset;
3674 self.chunks.seek(self.range.start);
3675 if let Some(highlights) = self.highlights.as_mut() {
3676 highlights
3677 .stack
3678 .retain(|(end_offset, _)| *end_offset > offset);
3679 if let Some(capture) = &highlights.next_capture {
3680 if offset >= capture.node.start_byte() {
3681 let next_capture_end = capture.node.end_byte();
3682 if offset < next_capture_end {
3683 highlights.stack.push((
3684 next_capture_end,
3685 highlights.highlight_maps[capture.grammar_index].get(capture.index),
3686 ));
3687 }
3688 highlights.next_capture.take();
3689 }
3690 }
3691 highlights.captures.set_byte_range(self.range.clone());
3692 }
3693 }
3694
3695 /// The current byte offset in the buffer.
3696 pub fn offset(&self) -> usize {
3697 self.range.start
3698 }
3699
3700 fn update_diagnostic_depths(&mut self, endpoint: DiagnosticEndpoint) {
3701 let depth = match endpoint.severity {
3702 DiagnosticSeverity::ERROR => &mut self.error_depth,
3703 DiagnosticSeverity::WARNING => &mut self.warning_depth,
3704 DiagnosticSeverity::INFORMATION => &mut self.information_depth,
3705 DiagnosticSeverity::HINT => &mut self.hint_depth,
3706 _ => return,
3707 };
3708 if endpoint.is_start {
3709 *depth += 1;
3710 } else {
3711 *depth -= 1;
3712 }
3713
3714 if endpoint.is_unnecessary {
3715 if endpoint.is_start {
3716 self.unnecessary_depth += 1;
3717 } else {
3718 self.unnecessary_depth -= 1;
3719 }
3720 }
3721 }
3722
3723 fn current_diagnostic_severity(&self) -> Option<DiagnosticSeverity> {
3724 if self.error_depth > 0 {
3725 Some(DiagnosticSeverity::ERROR)
3726 } else if self.warning_depth > 0 {
3727 Some(DiagnosticSeverity::WARNING)
3728 } else if self.information_depth > 0 {
3729 Some(DiagnosticSeverity::INFORMATION)
3730 } else if self.hint_depth > 0 {
3731 Some(DiagnosticSeverity::HINT)
3732 } else {
3733 None
3734 }
3735 }
3736
3737 fn current_code_is_unnecessary(&self) -> bool {
3738 self.unnecessary_depth > 0
3739 }
3740}
3741
3742impl<'a> Iterator for BufferChunks<'a> {
3743 type Item = Chunk<'a>;
3744
3745 fn next(&mut self) -> Option<Self::Item> {
3746 let mut next_capture_start = usize::MAX;
3747 let mut next_diagnostic_endpoint = usize::MAX;
3748
3749 if let Some(highlights) = self.highlights.as_mut() {
3750 while let Some((parent_capture_end, _)) = highlights.stack.last() {
3751 if *parent_capture_end <= self.range.start {
3752 highlights.stack.pop();
3753 } else {
3754 break;
3755 }
3756 }
3757
3758 if highlights.next_capture.is_none() {
3759 highlights.next_capture = highlights.captures.next();
3760 }
3761
3762 while let Some(capture) = highlights.next_capture.as_ref() {
3763 if self.range.start < capture.node.start_byte() {
3764 next_capture_start = capture.node.start_byte();
3765 break;
3766 } else {
3767 let highlight_id =
3768 highlights.highlight_maps[capture.grammar_index].get(capture.index);
3769 highlights
3770 .stack
3771 .push((capture.node.end_byte(), highlight_id));
3772 highlights.next_capture = highlights.captures.next();
3773 }
3774 }
3775 }
3776
3777 while let Some(endpoint) = self.diagnostic_endpoints.peek().copied() {
3778 if endpoint.offset <= self.range.start {
3779 self.update_diagnostic_depths(endpoint);
3780 self.diagnostic_endpoints.next();
3781 } else {
3782 next_diagnostic_endpoint = endpoint.offset;
3783 break;
3784 }
3785 }
3786
3787 if let Some(chunk) = self.chunks.peek() {
3788 let chunk_start = self.range.start;
3789 let mut chunk_end = (self.chunks.offset() + chunk.len())
3790 .min(next_capture_start)
3791 .min(next_diagnostic_endpoint);
3792 let mut highlight_id = None;
3793 if let Some(highlights) = self.highlights.as_ref() {
3794 if let Some((parent_capture_end, parent_highlight_id)) = highlights.stack.last() {
3795 chunk_end = chunk_end.min(*parent_capture_end);
3796 highlight_id = Some(*parent_highlight_id);
3797 }
3798 }
3799
3800 let slice =
3801 &chunk[chunk_start - self.chunks.offset()..chunk_end - self.chunks.offset()];
3802 self.range.start = chunk_end;
3803 if self.range.start == self.chunks.offset() + chunk.len() {
3804 self.chunks.next().unwrap();
3805 }
3806
3807 Some(Chunk {
3808 text: slice,
3809 syntax_highlight_id: highlight_id,
3810 diagnostic_severity: self.current_diagnostic_severity(),
3811 is_unnecessary: self.current_code_is_unnecessary(),
3812 ..Default::default()
3813 })
3814 } else {
3815 None
3816 }
3817 }
3818}
3819
3820impl operation_queue::Operation for Operation {
3821 fn lamport_timestamp(&self) -> clock::Lamport {
3822 match self {
3823 Operation::Buffer(_) => {
3824 unreachable!("buffer operations should never be deferred at this layer")
3825 }
3826 Operation::UpdateDiagnostics {
3827 lamport_timestamp, ..
3828 }
3829 | Operation::UpdateSelections {
3830 lamport_timestamp, ..
3831 }
3832 | Operation::UpdateCompletionTriggers {
3833 lamport_timestamp, ..
3834 } => *lamport_timestamp,
3835 }
3836 }
3837}
3838
3839impl Default for Diagnostic {
3840 fn default() -> Self {
3841 Self {
3842 source: Default::default(),
3843 code: None,
3844 severity: DiagnosticSeverity::ERROR,
3845 message: Default::default(),
3846 group_id: 0,
3847 is_primary: false,
3848 is_disk_based: false,
3849 is_unnecessary: false,
3850 data: None,
3851 }
3852 }
3853}
3854
3855impl IndentSize {
3856 /// Returns an [IndentSize] representing the given spaces.
3857 pub fn spaces(len: u32) -> Self {
3858 Self {
3859 len,
3860 kind: IndentKind::Space,
3861 }
3862 }
3863
3864 /// Returns an [IndentSize] representing a tab.
3865 pub fn tab() -> Self {
3866 Self {
3867 len: 1,
3868 kind: IndentKind::Tab,
3869 }
3870 }
3871
3872 /// An iterator over the characters represented by this [IndentSize].
3873 pub fn chars(&self) -> impl Iterator<Item = char> {
3874 iter::repeat(self.char()).take(self.len as usize)
3875 }
3876
3877 /// The character representation of this [IndentSize].
3878 pub fn char(&self) -> char {
3879 match self.kind {
3880 IndentKind::Space => ' ',
3881 IndentKind::Tab => '\t',
3882 }
3883 }
3884
3885 /// Consumes the current [IndentSize] and returns a new one that has
3886 /// been shrunk or enlarged by the given size along the given direction.
3887 pub fn with_delta(mut self, direction: Ordering, size: IndentSize) -> Self {
3888 match direction {
3889 Ordering::Less => {
3890 if self.kind == size.kind && self.len >= size.len {
3891 self.len -= size.len;
3892 }
3893 }
3894 Ordering::Equal => {}
3895 Ordering::Greater => {
3896 if self.len == 0 {
3897 self = size;
3898 } else if self.kind == size.kind {
3899 self.len += size.len;
3900 }
3901 }
3902 }
3903 self
3904 }
3905}
3906
3907#[cfg(any(test, feature = "test-support"))]
3908pub struct TestFile {
3909 pub path: Arc<Path>,
3910 pub root_name: String,
3911}
3912
3913#[cfg(any(test, feature = "test-support"))]
3914impl File for TestFile {
3915 fn path(&self) -> &Arc<Path> {
3916 &self.path
3917 }
3918
3919 fn full_path(&self, _: &gpui::AppContext) -> PathBuf {
3920 PathBuf::from(&self.root_name).join(self.path.as_ref())
3921 }
3922
3923 fn as_local(&self) -> Option<&dyn LocalFile> {
3924 None
3925 }
3926
3927 fn mtime(&self) -> Option<SystemTime> {
3928 unimplemented!()
3929 }
3930
3931 fn file_name<'a>(&'a self, _: &'a gpui::AppContext) -> &'a std::ffi::OsStr {
3932 self.path().file_name().unwrap_or(self.root_name.as_ref())
3933 }
3934
3935 fn worktree_id(&self) -> usize {
3936 0
3937 }
3938
3939 fn is_deleted(&self) -> bool {
3940 unimplemented!()
3941 }
3942
3943 fn as_any(&self) -> &dyn std::any::Any {
3944 unimplemented!()
3945 }
3946
3947 fn to_proto(&self, _: &AppContext) -> rpc::proto::File {
3948 unimplemented!()
3949 }
3950
3951 fn is_private(&self) -> bool {
3952 false
3953 }
3954}
3955
3956pub(crate) fn contiguous_ranges(
3957 values: impl Iterator<Item = u32>,
3958 max_len: usize,
3959) -> impl Iterator<Item = Range<u32>> {
3960 let mut values = values;
3961 let mut current_range: Option<Range<u32>> = None;
3962 std::iter::from_fn(move || loop {
3963 if let Some(value) = values.next() {
3964 if let Some(range) = &mut current_range {
3965 if value == range.end && range.len() < max_len {
3966 range.end += 1;
3967 continue;
3968 }
3969 }
3970
3971 let prev_range = current_range.clone();
3972 current_range = Some(value..(value + 1));
3973 if prev_range.is_some() {
3974 return prev_range;
3975 }
3976 } else {
3977 return current_range.take();
3978 }
3979 })
3980}
3981
3982/// Returns the [CharKind] for the given character. When a scope is provided,
3983/// the function checks if the character is considered a word character
3984/// based on the language scope's word character settings.
3985pub fn char_kind(scope: &Option<LanguageScope>, c: char) -> CharKind {
3986 if c.is_whitespace() {
3987 return CharKind::Whitespace;
3988 } else if c.is_alphanumeric() || c == '_' {
3989 return CharKind::Word;
3990 }
3991
3992 if let Some(scope) = scope {
3993 if let Some(characters) = scope.word_characters() {
3994 if characters.contains(&c) {
3995 return CharKind::Word;
3996 }
3997 }
3998 }
3999
4000 CharKind::Punctuation
4001}
4002
4003/// Find all of the ranges of whitespace that occur at the ends of lines
4004/// in the given rope.
4005///
4006/// This could also be done with a regex search, but this implementation
4007/// avoids copying text.
4008pub fn trailing_whitespace_ranges(rope: &Rope) -> Vec<Range<usize>> {
4009 let mut ranges = Vec::new();
4010
4011 let mut offset = 0;
4012 let mut prev_chunk_trailing_whitespace_range = 0..0;
4013 for chunk in rope.chunks() {
4014 let mut prev_line_trailing_whitespace_range = 0..0;
4015 for (i, line) in chunk.split('\n').enumerate() {
4016 let line_end_offset = offset + line.len();
4017 let trimmed_line_len = line.trim_end_matches(|c| matches!(c, ' ' | '\t')).len();
4018 let mut trailing_whitespace_range = (offset + trimmed_line_len)..line_end_offset;
4019
4020 if i == 0 && trimmed_line_len == 0 {
4021 trailing_whitespace_range.start = prev_chunk_trailing_whitespace_range.start;
4022 }
4023 if !prev_line_trailing_whitespace_range.is_empty() {
4024 ranges.push(prev_line_trailing_whitespace_range);
4025 }
4026
4027 offset = line_end_offset + 1;
4028 prev_line_trailing_whitespace_range = trailing_whitespace_range;
4029 }
4030
4031 offset -= 1;
4032 prev_chunk_trailing_whitespace_range = prev_line_trailing_whitespace_range;
4033 }
4034
4035 if !prev_chunk_trailing_whitespace_range.is_empty() {
4036 ranges.push(prev_chunk_trailing_whitespace_range);
4037 }
4038
4039 ranges
4040}