1pub use crate::{
2 diagnostic_set::DiagnosticSet,
3 highlight_map::{HighlightId, HighlightMap},
4 markdown::ParsedMarkdown,
5 proto, Grammar, Language, LanguageRegistry,
6};
7use crate::{
8 diagnostic_set::{DiagnosticEntry, DiagnosticGroup},
9 language_settings::{language_settings, IndentGuideSettings, LanguageSettings},
10 markdown::parse_markdown,
11 outline::OutlineItem,
12 syntax_map::{
13 SyntaxLayer, SyntaxMap, SyntaxMapCapture, SyntaxMapCaptures, SyntaxMapMatches,
14 SyntaxSnapshot, ToTreeSitterPoint,
15 },
16 task_context::RunnableRange,
17 LanguageScope, Outline, RunnableCapture, RunnableTag,
18};
19use anyhow::{anyhow, Context, Result};
20use async_watch as watch;
21pub use clock::ReplicaId;
22use futures::channel::oneshot;
23use gpui::{
24 AnyElement, AppContext, EventEmitter, HighlightStyle, ModelContext, Task, TaskLabel,
25 WindowContext,
26};
27use lazy_static::lazy_static;
28use lsp::LanguageServerId;
29use parking_lot::Mutex;
30use similar::{ChangeTag, TextDiff};
31use smallvec::SmallVec;
32use smol::future::yield_now;
33use std::{
34 any::Any,
35 cell::Cell,
36 cmp::{self, Ordering, Reverse},
37 collections::BTreeMap,
38 ffi::OsStr,
39 fmt,
40 future::Future,
41 iter::{self, Iterator, Peekable},
42 mem,
43 ops::{Deref, Range},
44 path::{Path, PathBuf},
45 str,
46 sync::Arc,
47 time::{Duration, Instant, SystemTime},
48 vec,
49};
50use sum_tree::TreeMap;
51use text::operation_queue::OperationQueue;
52use text::*;
53pub use text::{
54 Anchor, Bias, Buffer as TextBuffer, BufferId, BufferSnapshot as TextBufferSnapshot, Edit,
55 OffsetRangeExt, OffsetUtf16, Patch, Point, PointUtf16, Rope, Selection, SelectionGoal,
56 Subscription, TextDimension, TextSummary, ToOffset, ToOffsetUtf16, ToPoint, ToPointUtf16,
57 Transaction, TransactionId, Unclipped,
58};
59use theme::SyntaxTheme;
60#[cfg(any(test, feature = "test-support"))]
61use util::RandomCharIter;
62use util::RangeExt;
63
64#[cfg(any(test, feature = "test-support"))]
65pub use {tree_sitter_rust, tree_sitter_typescript};
66
67pub use lsp::DiagnosticSeverity;
68
69lazy_static! {
70 /// A label for the background task spawned by the buffer to compute
71 /// a diff against the contents of its file.
72 pub static ref BUFFER_DIFF_TASK: TaskLabel = TaskLabel::new();
73}
74
75/// Indicate whether a [Buffer] has permissions to edit.
76#[derive(PartialEq, Clone, Copy, Debug)]
77pub enum Capability {
78 /// The buffer is a mutable replica.
79 ReadWrite,
80 /// The buffer is a read-only replica.
81 ReadOnly,
82}
83
84pub type BufferRow = u32;
85
86/// An in-memory representation of a source code file, including its text,
87/// syntax trees, git status, and diagnostics.
88pub struct Buffer {
89 text: TextBuffer,
90 diff_base: Option<Rope>,
91 git_diff: git::diff::BufferDiff,
92 file: Option<Arc<dyn File>>,
93 /// The mtime of the file when this buffer was last loaded from
94 /// or saved to disk.
95 saved_mtime: Option<SystemTime>,
96 /// The version vector when this buffer was last loaded from
97 /// or saved to disk.
98 saved_version: clock::Global,
99 transaction_depth: usize,
100 was_dirty_before_starting_transaction: Option<bool>,
101 reload_task: Option<Task<Result<()>>>,
102 language: Option<Arc<Language>>,
103 autoindent_requests: Vec<Arc<AutoindentRequest>>,
104 pending_autoindent: Option<Task<()>>,
105 sync_parse_timeout: Duration,
106 syntax_map: Mutex<SyntaxMap>,
107 parsing_in_background: bool,
108 parse_status: (watch::Sender<ParseStatus>, watch::Receiver<ParseStatus>),
109 non_text_state_update_count: usize,
110 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
111 remote_selections: TreeMap<ReplicaId, SelectionSet>,
112 diagnostics_timestamp: clock::Lamport,
113 completion_triggers: Vec<String>,
114 completion_triggers_timestamp: clock::Lamport,
115 deferred_ops: OperationQueue<Operation>,
116 capability: Capability,
117 has_conflict: bool,
118 diff_base_version: usize,
119 /// Memoize calls to has_changes_since(saved_version).
120 /// The contents of a cell are (self.version, has_changes) at the time of a last call.
121 has_unsaved_edits: Cell<(clock::Global, bool)>,
122}
123
124#[derive(Copy, Clone, Debug, PartialEq, Eq)]
125pub enum ParseStatus {
126 Idle,
127 Parsing,
128}
129
130/// An immutable, cheaply cloneable representation of a fixed
131/// state of a buffer.
132pub struct BufferSnapshot {
133 text: text::BufferSnapshot,
134 git_diff: git::diff::BufferDiff,
135 pub(crate) syntax: SyntaxSnapshot,
136 file: Option<Arc<dyn File>>,
137 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
138 remote_selections: TreeMap<ReplicaId, SelectionSet>,
139 language: Option<Arc<Language>>,
140 non_text_state_update_count: usize,
141}
142
143/// The kind and amount of indentation in a particular line. For now,
144/// assumes that indentation is all the same character.
145#[derive(Clone, Copy, Debug, PartialEq, Eq, Default)]
146pub struct IndentSize {
147 /// The number of bytes that comprise the indentation.
148 pub len: u32,
149 /// The kind of whitespace used for indentation.
150 pub kind: IndentKind,
151}
152
153/// A whitespace character that's used for indentation.
154#[derive(Clone, Copy, Debug, PartialEq, Eq, Default)]
155pub enum IndentKind {
156 /// An ASCII space character.
157 #[default]
158 Space,
159 /// An ASCII tab character.
160 Tab,
161}
162
163/// The shape of a selection cursor.
164#[derive(Copy, Clone, PartialEq, Eq, Debug, Default)]
165pub enum CursorShape {
166 /// A vertical bar
167 #[default]
168 Bar,
169 /// A block that surrounds the following character
170 Block,
171 /// An underline that runs along the following character
172 Underscore,
173 /// A box drawn around the following character
174 Hollow,
175}
176
177#[derive(Clone, Debug)]
178struct SelectionSet {
179 line_mode: bool,
180 cursor_shape: CursorShape,
181 selections: Arc<[Selection<Anchor>]>,
182 lamport_timestamp: clock::Lamport,
183}
184
185/// A diagnostic associated with a certain range of a buffer.
186#[derive(Clone, Debug, PartialEq, Eq)]
187pub struct Diagnostic {
188 /// The name of the service that produced this diagnostic.
189 pub source: Option<String>,
190 /// A machine-readable code that identifies this diagnostic.
191 pub code: Option<String>,
192 /// Whether this diagnostic is a hint, warning, or error.
193 pub severity: DiagnosticSeverity,
194 /// The human-readable message associated with this diagnostic.
195 pub message: String,
196 /// An id that identifies the group to which this diagnostic belongs.
197 ///
198 /// When a language server produces a diagnostic with
199 /// one or more associated diagnostics, those diagnostics are all
200 /// assigned a single group id.
201 pub group_id: usize,
202 /// Whether this diagnostic is the primary diagnostic for its group.
203 ///
204 /// In a given group, the primary diagnostic is the top-level diagnostic
205 /// returned by the language server. The non-primary diagnostics are the
206 /// associated diagnostics.
207 pub is_primary: bool,
208 /// Whether this diagnostic is considered to originate from an analysis of
209 /// files on disk, as opposed to any unsaved buffer contents. This is a
210 /// property of a given diagnostic source, and is configured for a given
211 /// language server via the [`LspAdapter::disk_based_diagnostic_sources`](crate::LspAdapter::disk_based_diagnostic_sources) method
212 /// for the language server.
213 pub is_disk_based: bool,
214 /// Whether this diagnostic marks unnecessary code.
215 pub is_unnecessary: bool,
216}
217
218/// TODO - move this into the `project` crate and make it private.
219pub async fn prepare_completion_documentation(
220 documentation: &lsp::Documentation,
221 language_registry: &Arc<LanguageRegistry>,
222 language: Option<Arc<Language>>,
223) -> Documentation {
224 match documentation {
225 lsp::Documentation::String(text) => {
226 if text.lines().count() <= 1 {
227 Documentation::SingleLine(text.clone())
228 } else {
229 Documentation::MultiLinePlainText(text.clone())
230 }
231 }
232
233 lsp::Documentation::MarkupContent(lsp::MarkupContent { kind, value }) => match kind {
234 lsp::MarkupKind::PlainText => {
235 if value.lines().count() <= 1 {
236 Documentation::SingleLine(value.clone())
237 } else {
238 Documentation::MultiLinePlainText(value.clone())
239 }
240 }
241
242 lsp::MarkupKind::Markdown => {
243 let parsed = parse_markdown(value, language_registry, language).await;
244 Documentation::MultiLineMarkdown(parsed)
245 }
246 },
247 }
248}
249
250/// Documentation associated with a [`Completion`].
251#[derive(Clone, Debug)]
252pub enum Documentation {
253 /// There is no documentation for this completion.
254 Undocumented,
255 /// A single line of documentation.
256 SingleLine(String),
257 /// Multiple lines of plain text documentation.
258 MultiLinePlainText(String),
259 /// Markdown documentation.
260 MultiLineMarkdown(ParsedMarkdown),
261}
262
263/// An operation used to synchronize this buffer with its other replicas.
264#[derive(Clone, Debug, PartialEq)]
265pub enum Operation {
266 /// A text operation.
267 Buffer(text::Operation),
268
269 /// An update to the buffer's diagnostics.
270 UpdateDiagnostics {
271 /// The id of the language server that produced the new diagnostics.
272 server_id: LanguageServerId,
273 /// The diagnostics.
274 diagnostics: Arc<[DiagnosticEntry<Anchor>]>,
275 /// The buffer's lamport timestamp.
276 lamport_timestamp: clock::Lamport,
277 },
278
279 /// An update to the most recent selections in this buffer.
280 UpdateSelections {
281 /// The selections.
282 selections: Arc<[Selection<Anchor>]>,
283 /// The buffer's lamport timestamp.
284 lamport_timestamp: clock::Lamport,
285 /// Whether the selections are in 'line mode'.
286 line_mode: bool,
287 /// The [`CursorShape`] associated with these selections.
288 cursor_shape: CursorShape,
289 },
290
291 /// An update to the characters that should trigger autocompletion
292 /// for this buffer.
293 UpdateCompletionTriggers {
294 /// The characters that trigger autocompletion.
295 triggers: Vec<String>,
296 /// The buffer's lamport timestamp.
297 lamport_timestamp: clock::Lamport,
298 },
299}
300
301/// An event that occurs in a buffer.
302#[derive(Clone, Debug, PartialEq)]
303pub enum Event {
304 /// The buffer was changed in a way that must be
305 /// propagated to its other replicas.
306 Operation(Operation),
307 /// The buffer was edited.
308 Edited,
309 /// The buffer's `dirty` bit changed.
310 DirtyChanged,
311 /// The buffer was saved.
312 Saved,
313 /// The buffer's file was changed on disk.
314 FileHandleChanged,
315 /// The buffer was reloaded.
316 Reloaded,
317 /// The buffer's diff_base changed.
318 DiffBaseChanged,
319 /// Buffer's excerpts for a certain diff base were recalculated.
320 DiffUpdated,
321 /// The buffer's language was changed.
322 LanguageChanged,
323 /// The buffer's syntax trees were updated.
324 Reparsed,
325 /// The buffer's diagnostics were updated.
326 DiagnosticsUpdated,
327 /// The buffer gained or lost editing capabilities.
328 CapabilityChanged,
329 /// The buffer was explicitly requested to close.
330 Closed,
331}
332
333/// The file associated with a buffer.
334pub trait File: Send + Sync {
335 /// Returns the [`LocalFile`] associated with this file, if the
336 /// file is local.
337 fn as_local(&self) -> Option<&dyn LocalFile>;
338
339 /// Returns whether this file is local.
340 fn is_local(&self) -> bool {
341 self.as_local().is_some()
342 }
343
344 /// Returns the file's mtime.
345 fn mtime(&self) -> Option<SystemTime>;
346
347 /// Returns the path of this file relative to the worktree's root directory.
348 fn path(&self) -> &Arc<Path>;
349
350 /// Returns the path of this file relative to the worktree's parent directory (this means it
351 /// includes the name of the worktree's root folder).
352 fn full_path(&self, cx: &AppContext) -> PathBuf;
353
354 /// Returns the last component of this handle's absolute path. If this handle refers to the root
355 /// of its worktree, then this method will return the name of the worktree itself.
356 fn file_name<'a>(&'a self, cx: &'a AppContext) -> &'a OsStr;
357
358 /// Returns the id of the worktree to which this file belongs.
359 ///
360 /// This is needed for looking up project-specific settings.
361 fn worktree_id(&self) -> usize;
362
363 /// Returns whether the file has been deleted.
364 fn is_deleted(&self) -> bool;
365
366 /// Returns whether the file existed on disk at one point
367 fn is_created(&self) -> bool {
368 self.mtime().is_some()
369 }
370
371 /// Converts this file into an [`Any`] trait object.
372 fn as_any(&self) -> &dyn Any;
373
374 /// Converts this file into a protobuf message.
375 fn to_proto(&self, cx: &AppContext) -> rpc::proto::File;
376
377 /// Return whether Zed considers this to be a private file.
378 fn is_private(&self) -> bool;
379}
380
381/// The file associated with a buffer, in the case where the file is on the local disk.
382pub trait LocalFile: File {
383 /// Returns the absolute path of this file.
384 fn abs_path(&self, cx: &AppContext) -> PathBuf;
385
386 /// Loads the file's contents from disk.
387 fn load(&self, cx: &AppContext) -> Task<Result<String>>;
388
389 /// Returns true if the file should not be shared with collaborators.
390 fn is_private(&self, _: &AppContext) -> bool {
391 false
392 }
393}
394
395/// The auto-indent behavior associated with an editing operation.
396/// For some editing operations, each affected line of text has its
397/// indentation recomputed. For other operations, the entire block
398/// of edited text is adjusted uniformly.
399#[derive(Clone, Debug)]
400pub enum AutoindentMode {
401 /// Indent each line of inserted text.
402 EachLine,
403 /// Apply the same indentation adjustment to all of the lines
404 /// in a given insertion.
405 Block {
406 /// The original indentation level of the first line of each
407 /// insertion, if it has been copied.
408 original_indent_columns: Vec<u32>,
409 },
410}
411
412#[derive(Clone)]
413struct AutoindentRequest {
414 before_edit: BufferSnapshot,
415 entries: Vec<AutoindentRequestEntry>,
416 is_block_mode: bool,
417}
418
419#[derive(Clone)]
420struct AutoindentRequestEntry {
421 /// A range of the buffer whose indentation should be adjusted.
422 range: Range<Anchor>,
423 /// Whether or not these lines should be considered brand new, for the
424 /// purpose of auto-indent. When text is not new, its indentation will
425 /// only be adjusted if the suggested indentation level has *changed*
426 /// since the edit was made.
427 first_line_is_new: bool,
428 indent_size: IndentSize,
429 original_indent_column: Option<u32>,
430}
431
432#[derive(Debug)]
433struct IndentSuggestion {
434 basis_row: u32,
435 delta: Ordering,
436 within_error: bool,
437}
438
439struct BufferChunkHighlights<'a> {
440 captures: SyntaxMapCaptures<'a>,
441 next_capture: Option<SyntaxMapCapture<'a>>,
442 stack: Vec<(usize, HighlightId)>,
443 highlight_maps: Vec<HighlightMap>,
444}
445
446/// An iterator that yields chunks of a buffer's text, along with their
447/// syntax highlights and diagnostic status.
448pub struct BufferChunks<'a> {
449 range: Range<usize>,
450 chunks: text::Chunks<'a>,
451 diagnostic_endpoints: Peekable<vec::IntoIter<DiagnosticEndpoint>>,
452 error_depth: usize,
453 warning_depth: usize,
454 information_depth: usize,
455 hint_depth: usize,
456 unnecessary_depth: usize,
457 highlights: Option<BufferChunkHighlights<'a>>,
458}
459
460/// A chunk of a buffer's text, along with its syntax highlight and
461/// diagnostic status.
462#[derive(Clone, Debug, Default)]
463pub struct Chunk<'a> {
464 /// The text of the chunk.
465 pub text: &'a str,
466 /// The syntax highlighting style of the chunk.
467 pub syntax_highlight_id: Option<HighlightId>,
468 /// The highlight style that has been applied to this chunk in
469 /// the editor.
470 pub highlight_style: Option<HighlightStyle>,
471 /// The severity of diagnostic associated with this chunk, if any.
472 pub diagnostic_severity: Option<DiagnosticSeverity>,
473 /// Whether this chunk of text is marked as unnecessary.
474 pub is_unnecessary: bool,
475 /// Whether this chunk of text was originally a tab character.
476 pub is_tab: bool,
477 /// An optional recipe for how the chunk should be presented.
478 pub renderer: Option<ChunkRenderer>,
479}
480
481/// A recipe for how the chunk should be presented.
482#[derive(Clone)]
483pub struct ChunkRenderer {
484 /// creates a custom element to represent this chunk.
485 pub render: Arc<dyn Send + Sync + Fn(&mut WindowContext) -> AnyElement>,
486 /// If true, the element is constrained to the shaped width of the text.
487 pub constrain_width: bool,
488}
489
490impl fmt::Debug for ChunkRenderer {
491 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
492 f.debug_struct("ChunkRenderer")
493 .field("constrain_width", &self.constrain_width)
494 .finish()
495 }
496}
497
498/// A set of edits to a given version of a buffer, computed asynchronously.
499#[derive(Debug)]
500pub struct Diff {
501 pub(crate) base_version: clock::Global,
502 line_ending: LineEnding,
503 edits: Vec<(Range<usize>, Arc<str>)>,
504}
505
506#[derive(Clone, Copy)]
507pub(crate) struct DiagnosticEndpoint {
508 offset: usize,
509 is_start: bool,
510 severity: DiagnosticSeverity,
511 is_unnecessary: bool,
512}
513
514/// A class of characters, used for characterizing a run of text.
515#[derive(Copy, Clone, Eq, PartialEq, PartialOrd, Ord, Debug)]
516pub enum CharKind {
517 /// Whitespace.
518 Whitespace,
519 /// Punctuation.
520 Punctuation,
521 /// Word.
522 Word,
523}
524
525/// A runnable is a set of data about a region that could be resolved into a task
526pub struct Runnable {
527 pub tags: SmallVec<[RunnableTag; 1]>,
528 pub language: Arc<Language>,
529 pub buffer: BufferId,
530}
531
532#[derive(Clone, Debug, PartialEq)]
533pub struct IndentGuide {
534 pub buffer_id: BufferId,
535 pub start_row: BufferRow,
536 pub end_row: BufferRow,
537 pub depth: u32,
538 pub tab_size: u32,
539 pub settings: IndentGuideSettings,
540}
541
542impl IndentGuide {
543 pub fn indent_level(&self) -> u32 {
544 self.depth * self.tab_size
545 }
546}
547
548impl Buffer {
549 /// Create a new buffer with the given base text.
550 pub fn local<T: Into<String>>(base_text: T, cx: &mut ModelContext<Self>) -> Self {
551 Self::build(
552 TextBuffer::new(0, cx.entity_id().as_non_zero_u64().into(), base_text.into()),
553 None,
554 None,
555 Capability::ReadWrite,
556 )
557 }
558
559 /// Create a new buffer with the given base text that has proper line endings and other normalization applied.
560 pub fn local_normalized(
561 base_text_normalized: Rope,
562 line_ending: LineEnding,
563 cx: &mut ModelContext<Self>,
564 ) -> Self {
565 Self::build(
566 TextBuffer::new_normalized(
567 0,
568 cx.entity_id().as_non_zero_u64().into(),
569 line_ending,
570 base_text_normalized,
571 ),
572 None,
573 None,
574 Capability::ReadWrite,
575 )
576 }
577
578 /// Create a new buffer that is a replica of a remote buffer.
579 pub fn remote(
580 remote_id: BufferId,
581 replica_id: ReplicaId,
582 capability: Capability,
583 base_text: impl Into<String>,
584 ) -> Self {
585 Self::build(
586 TextBuffer::new(replica_id, remote_id, base_text.into()),
587 None,
588 None,
589 capability,
590 )
591 }
592
593 /// Create a new buffer that is a replica of a remote buffer, populating its
594 /// state from the given protobuf message.
595 pub fn from_proto(
596 replica_id: ReplicaId,
597 capability: Capability,
598 message: proto::BufferState,
599 file: Option<Arc<dyn File>>,
600 ) -> Result<Self> {
601 let buffer_id = BufferId::new(message.id)
602 .with_context(|| anyhow!("Could not deserialize buffer_id"))?;
603 let buffer = TextBuffer::new(replica_id, buffer_id, message.base_text);
604 let mut this = Self::build(buffer, message.diff_base, file, capability);
605 this.text.set_line_ending(proto::deserialize_line_ending(
606 rpc::proto::LineEnding::from_i32(message.line_ending)
607 .ok_or_else(|| anyhow!("missing line_ending"))?,
608 ));
609 this.saved_version = proto::deserialize_version(&message.saved_version);
610 this.saved_mtime = message.saved_mtime.map(|time| time.into());
611 Ok(this)
612 }
613
614 /// Serialize the buffer's state to a protobuf message.
615 pub fn to_proto(&self, cx: &AppContext) -> proto::BufferState {
616 proto::BufferState {
617 id: self.remote_id().into(),
618 file: self.file.as_ref().map(|f| f.to_proto(cx)),
619 base_text: self.base_text().to_string(),
620 diff_base: self.diff_base.as_ref().map(|h| h.to_string()),
621 line_ending: proto::serialize_line_ending(self.line_ending()) as i32,
622 saved_version: proto::serialize_version(&self.saved_version),
623 saved_mtime: self.saved_mtime.map(|time| time.into()),
624 }
625 }
626
627 /// Serialize as protobufs all of the changes to the buffer since the given version.
628 pub fn serialize_ops(
629 &self,
630 since: Option<clock::Global>,
631 cx: &AppContext,
632 ) -> Task<Vec<proto::Operation>> {
633 let mut operations = Vec::new();
634 operations.extend(self.deferred_ops.iter().map(proto::serialize_operation));
635
636 operations.extend(self.remote_selections.iter().map(|(_, set)| {
637 proto::serialize_operation(&Operation::UpdateSelections {
638 selections: set.selections.clone(),
639 lamport_timestamp: set.lamport_timestamp,
640 line_mode: set.line_mode,
641 cursor_shape: set.cursor_shape,
642 })
643 }));
644
645 for (server_id, diagnostics) in &self.diagnostics {
646 operations.push(proto::serialize_operation(&Operation::UpdateDiagnostics {
647 lamport_timestamp: self.diagnostics_timestamp,
648 server_id: *server_id,
649 diagnostics: diagnostics.iter().cloned().collect(),
650 }));
651 }
652
653 operations.push(proto::serialize_operation(
654 &Operation::UpdateCompletionTriggers {
655 triggers: self.completion_triggers.clone(),
656 lamport_timestamp: self.completion_triggers_timestamp,
657 },
658 ));
659
660 let text_operations = self.text.operations().clone();
661 cx.background_executor().spawn(async move {
662 let since = since.unwrap_or_default();
663 operations.extend(
664 text_operations
665 .iter()
666 .filter(|(_, op)| !since.observed(op.timestamp()))
667 .map(|(_, op)| proto::serialize_operation(&Operation::Buffer(op.clone()))),
668 );
669 operations.sort_unstable_by_key(proto::lamport_timestamp_for_operation);
670 operations
671 })
672 }
673
674 /// Assign a language to the buffer, returning the buffer.
675 pub fn with_language(mut self, language: Arc<Language>, cx: &mut ModelContext<Self>) -> Self {
676 self.set_language(Some(language), cx);
677 self
678 }
679
680 /// Returns the [Capability] of this buffer.
681 pub fn capability(&self) -> Capability {
682 self.capability
683 }
684
685 /// Whether this buffer can only be read.
686 pub fn read_only(&self) -> bool {
687 self.capability == Capability::ReadOnly
688 }
689
690 /// Builds a [Buffer] with the given underlying [TextBuffer], diff base, [File] and [Capability].
691 pub fn build(
692 buffer: TextBuffer,
693 diff_base: Option<String>,
694 file: Option<Arc<dyn File>>,
695 capability: Capability,
696 ) -> Self {
697 let saved_mtime = file.as_ref().and_then(|file| file.mtime());
698
699 Self {
700 saved_mtime,
701 saved_version: buffer.version(),
702 reload_task: None,
703 transaction_depth: 0,
704 was_dirty_before_starting_transaction: None,
705 has_unsaved_edits: Cell::new((buffer.version(), false)),
706 text: buffer,
707 diff_base: diff_base
708 .map(|mut raw_diff_base| {
709 LineEnding::normalize(&mut raw_diff_base);
710 raw_diff_base
711 })
712 .map(Rope::from),
713 diff_base_version: 0,
714 git_diff: git::diff::BufferDiff::new(),
715 file,
716 capability,
717 syntax_map: Mutex::new(SyntaxMap::new()),
718 parsing_in_background: false,
719 non_text_state_update_count: 0,
720 sync_parse_timeout: Duration::from_millis(1),
721 parse_status: async_watch::channel(ParseStatus::Idle),
722 autoindent_requests: Default::default(),
723 pending_autoindent: Default::default(),
724 language: None,
725 remote_selections: Default::default(),
726 diagnostics: Default::default(),
727 diagnostics_timestamp: Default::default(),
728 completion_triggers: Default::default(),
729 completion_triggers_timestamp: Default::default(),
730 deferred_ops: OperationQueue::new(),
731 has_conflict: false,
732 }
733 }
734
735 /// Retrieve a snapshot of the buffer's current state. This is computationally
736 /// cheap, and allows reading from the buffer on a background thread.
737 pub fn snapshot(&self) -> BufferSnapshot {
738 let text = self.text.snapshot();
739 let mut syntax_map = self.syntax_map.lock();
740 syntax_map.interpolate(&text);
741 let syntax = syntax_map.snapshot();
742
743 BufferSnapshot {
744 text,
745 syntax,
746 git_diff: self.git_diff.clone(),
747 file: self.file.clone(),
748 remote_selections: self.remote_selections.clone(),
749 diagnostics: self.diagnostics.clone(),
750 language: self.language.clone(),
751 non_text_state_update_count: self.non_text_state_update_count,
752 }
753 }
754
755 #[cfg(test)]
756 pub(crate) fn as_text_snapshot(&self) -> &text::BufferSnapshot {
757 &self.text
758 }
759
760 /// Retrieve a snapshot of the buffer's raw text, without any
761 /// language-related state like the syntax tree or diagnostics.
762 pub fn text_snapshot(&self) -> text::BufferSnapshot {
763 self.text.snapshot()
764 }
765
766 /// The file associated with the buffer, if any.
767 pub fn file(&self) -> Option<&Arc<dyn File>> {
768 self.file.as_ref()
769 }
770
771 /// The version of the buffer that was last saved or reloaded from disk.
772 pub fn saved_version(&self) -> &clock::Global {
773 &self.saved_version
774 }
775
776 /// The mtime of the buffer's file when the buffer was last saved or reloaded from disk.
777 pub fn saved_mtime(&self) -> Option<SystemTime> {
778 self.saved_mtime
779 }
780
781 /// Assign a language to the buffer.
782 pub fn set_language(&mut self, language: Option<Arc<Language>>, cx: &mut ModelContext<Self>) {
783 self.non_text_state_update_count += 1;
784 self.syntax_map.lock().clear();
785 self.language = language;
786 self.reparse(cx);
787 cx.emit(Event::LanguageChanged);
788 }
789
790 /// Assign a language registry to the buffer. This allows the buffer to retrieve
791 /// other languages if parts of the buffer are written in different languages.
792 pub fn set_language_registry(&mut self, language_registry: Arc<LanguageRegistry>) {
793 self.syntax_map
794 .lock()
795 .set_language_registry(language_registry);
796 }
797
798 pub fn language_registry(&self) -> Option<Arc<LanguageRegistry>> {
799 self.syntax_map.lock().language_registry()
800 }
801
802 /// Assign the buffer a new [Capability].
803 pub fn set_capability(&mut self, capability: Capability, cx: &mut ModelContext<Self>) {
804 self.capability = capability;
805 cx.emit(Event::CapabilityChanged)
806 }
807
808 /// This method is called to signal that the buffer has been saved.
809 pub fn did_save(
810 &mut self,
811 version: clock::Global,
812 mtime: Option<SystemTime>,
813 cx: &mut ModelContext<Self>,
814 ) {
815 self.saved_version = version;
816 self.has_unsaved_edits
817 .set((self.saved_version().clone(), false));
818 self.has_conflict = false;
819 self.saved_mtime = mtime;
820 cx.emit(Event::Saved);
821 cx.notify();
822 }
823
824 /// Reloads the contents of the buffer from disk.
825 pub fn reload(
826 &mut self,
827 cx: &mut ModelContext<Self>,
828 ) -> oneshot::Receiver<Option<Transaction>> {
829 let (tx, rx) = futures::channel::oneshot::channel();
830 let prev_version = self.text.version();
831 self.reload_task = Some(cx.spawn(|this, mut cx| async move {
832 let Some((new_mtime, new_text)) = this.update(&mut cx, |this, cx| {
833 let file = this.file.as_ref()?.as_local()?;
834 Some((file.mtime(), file.load(cx)))
835 })?
836 else {
837 return Ok(());
838 };
839
840 let new_text = new_text.await?;
841 let diff = this
842 .update(&mut cx, |this, cx| this.diff(new_text.clone(), cx))?
843 .await;
844 this.update(&mut cx, |this, cx| {
845 if this.version() == diff.base_version {
846 this.finalize_last_transaction();
847 this.apply_diff(diff, cx);
848 tx.send(this.finalize_last_transaction().cloned()).ok();
849 this.has_conflict = false;
850 this.did_reload(this.version(), this.line_ending(), new_mtime, cx);
851 } else {
852 if !diff.edits.is_empty()
853 || this
854 .edits_since::<usize>(&diff.base_version)
855 .next()
856 .is_some()
857 {
858 this.has_conflict = true;
859 }
860
861 this.did_reload(prev_version, this.line_ending(), this.saved_mtime, cx);
862 }
863
864 this.reload_task.take();
865 })
866 }));
867 rx
868 }
869
870 /// This method is called to signal that the buffer has been reloaded.
871 pub fn did_reload(
872 &mut self,
873 version: clock::Global,
874 line_ending: LineEnding,
875 mtime: Option<SystemTime>,
876 cx: &mut ModelContext<Self>,
877 ) {
878 self.saved_version = version;
879 self.has_unsaved_edits
880 .set((self.saved_version.clone(), false));
881 self.text.set_line_ending(line_ending);
882 self.saved_mtime = mtime;
883 cx.emit(Event::Reloaded);
884 cx.notify();
885 }
886
887 /// Updates the [File] backing this buffer. This should be called when
888 /// the file has changed or has been deleted.
889 pub fn file_updated(&mut self, new_file: Arc<dyn File>, cx: &mut ModelContext<Self>) {
890 let mut file_changed = false;
891
892 if let Some(old_file) = self.file.as_ref() {
893 if new_file.path() != old_file.path() {
894 file_changed = true;
895 }
896
897 if new_file.is_deleted() {
898 if !old_file.is_deleted() {
899 file_changed = true;
900 if !self.is_dirty() {
901 cx.emit(Event::DirtyChanged);
902 }
903 }
904 } else {
905 let new_mtime = new_file.mtime();
906 if new_mtime != old_file.mtime() {
907 file_changed = true;
908
909 if !self.is_dirty() {
910 self.reload(cx).close();
911 }
912 }
913 }
914 } else {
915 file_changed = true;
916 };
917
918 self.file = Some(new_file);
919 if file_changed {
920 self.non_text_state_update_count += 1;
921 cx.emit(Event::FileHandleChanged);
922 cx.notify();
923 }
924 }
925
926 /// Returns the current diff base, see [Buffer::set_diff_base].
927 pub fn diff_base(&self) -> Option<&Rope> {
928 self.diff_base.as_ref()
929 }
930
931 /// Sets the text that will be used to compute a Git diff
932 /// against the buffer text.
933 pub fn set_diff_base(&mut self, diff_base: Option<String>, cx: &mut ModelContext<Self>) {
934 self.diff_base = diff_base
935 .map(|mut raw_diff_base| {
936 LineEnding::normalize(&mut raw_diff_base);
937 raw_diff_base
938 })
939 .map(Rope::from);
940 self.diff_base_version += 1;
941 if let Some(recalc_task) = self.git_diff_recalc(cx) {
942 cx.spawn(|buffer, mut cx| async move {
943 recalc_task.await;
944 buffer
945 .update(&mut cx, |_, cx| {
946 cx.emit(Event::DiffBaseChanged);
947 })
948 .ok();
949 })
950 .detach();
951 }
952 }
953
954 /// Returns a number, unique per diff base set to the buffer.
955 pub fn diff_base_version(&self) -> usize {
956 self.diff_base_version
957 }
958
959 /// Recomputes the Git diff status.
960 pub fn git_diff_recalc(&mut self, cx: &mut ModelContext<Self>) -> Option<Task<()>> {
961 let diff_base = self.diff_base.clone()?;
962 let snapshot = self.snapshot();
963
964 let mut diff = self.git_diff.clone();
965 let diff = cx.background_executor().spawn(async move {
966 diff.update(&diff_base, &snapshot).await;
967 diff
968 });
969
970 Some(cx.spawn(|this, mut cx| async move {
971 let buffer_diff = diff.await;
972 this.update(&mut cx, |this, cx| {
973 this.git_diff = buffer_diff;
974 this.non_text_state_update_count += 1;
975 cx.emit(Event::DiffUpdated);
976 })
977 .ok();
978 }))
979 }
980
981 /// Returns the primary [Language] assigned to this [Buffer].
982 pub fn language(&self) -> Option<&Arc<Language>> {
983 self.language.as_ref()
984 }
985
986 /// Returns the [Language] at the given location.
987 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<Arc<Language>> {
988 let offset = position.to_offset(self);
989 self.syntax_map
990 .lock()
991 .layers_for_range(offset..offset, &self.text)
992 .last()
993 .map(|info| info.language.clone())
994 .or_else(|| self.language.clone())
995 }
996
997 /// An integer version number that accounts for all updates besides
998 /// the buffer's text itself (which is versioned via a version vector).
999 pub fn non_text_state_update_count(&self) -> usize {
1000 self.non_text_state_update_count
1001 }
1002
1003 /// Whether the buffer is being parsed in the background.
1004 #[cfg(any(test, feature = "test-support"))]
1005 pub fn is_parsing(&self) -> bool {
1006 self.parsing_in_background
1007 }
1008
1009 /// Indicates whether the buffer contains any regions that may be
1010 /// written in a language that hasn't been loaded yet.
1011 pub fn contains_unknown_injections(&self) -> bool {
1012 self.syntax_map.lock().contains_unknown_injections()
1013 }
1014
1015 #[cfg(test)]
1016 pub fn set_sync_parse_timeout(&mut self, timeout: Duration) {
1017 self.sync_parse_timeout = timeout;
1018 }
1019
1020 /// Called after an edit to synchronize the buffer's main parse tree with
1021 /// the buffer's new underlying state.
1022 ///
1023 /// Locks the syntax map and interpolates the edits since the last reparse
1024 /// into the foreground syntax tree.
1025 ///
1026 /// Then takes a stable snapshot of the syntax map before unlocking it.
1027 /// The snapshot with the interpolated edits is sent to a background thread,
1028 /// where we ask Tree-sitter to perform an incremental parse.
1029 ///
1030 /// Meanwhile, in the foreground, we block the main thread for up to 1ms
1031 /// waiting on the parse to complete. As soon as it completes, we proceed
1032 /// synchronously, unless a 1ms timeout elapses.
1033 ///
1034 /// If we time out waiting on the parse, we spawn a second task waiting
1035 /// until the parse does complete and return with the interpolated tree still
1036 /// in the foreground. When the background parse completes, call back into
1037 /// the main thread and assign the foreground parse state.
1038 ///
1039 /// If the buffer or grammar changed since the start of the background parse,
1040 /// initiate an additional reparse recursively. To avoid concurrent parses
1041 /// for the same buffer, we only initiate a new parse if we are not already
1042 /// parsing in the background.
1043 pub fn reparse(&mut self, cx: &mut ModelContext<Self>) {
1044 if self.parsing_in_background {
1045 return;
1046 }
1047 let language = if let Some(language) = self.language.clone() {
1048 language
1049 } else {
1050 return;
1051 };
1052
1053 let text = self.text_snapshot();
1054 let parsed_version = self.version();
1055
1056 let mut syntax_map = self.syntax_map.lock();
1057 syntax_map.interpolate(&text);
1058 let language_registry = syntax_map.language_registry();
1059 let mut syntax_snapshot = syntax_map.snapshot();
1060 drop(syntax_map);
1061
1062 let parse_task = cx.background_executor().spawn({
1063 let language = language.clone();
1064 let language_registry = language_registry.clone();
1065 async move {
1066 syntax_snapshot.reparse(&text, language_registry, language);
1067 syntax_snapshot
1068 }
1069 });
1070
1071 self.parse_status.0.send(ParseStatus::Parsing).unwrap();
1072 match cx
1073 .background_executor()
1074 .block_with_timeout(self.sync_parse_timeout, parse_task)
1075 {
1076 Ok(new_syntax_snapshot) => {
1077 self.did_finish_parsing(new_syntax_snapshot, cx);
1078 return;
1079 }
1080 Err(parse_task) => {
1081 self.parsing_in_background = true;
1082 cx.spawn(move |this, mut cx| async move {
1083 let new_syntax_map = parse_task.await;
1084 this.update(&mut cx, move |this, cx| {
1085 let grammar_changed =
1086 this.language.as_ref().map_or(true, |current_language| {
1087 !Arc::ptr_eq(&language, current_language)
1088 });
1089 let language_registry_changed = new_syntax_map
1090 .contains_unknown_injections()
1091 && language_registry.map_or(false, |registry| {
1092 registry.version() != new_syntax_map.language_registry_version()
1093 });
1094 let parse_again = language_registry_changed
1095 || grammar_changed
1096 || this.version.changed_since(&parsed_version);
1097 this.did_finish_parsing(new_syntax_map, cx);
1098 this.parsing_in_background = false;
1099 if parse_again {
1100 this.reparse(cx);
1101 }
1102 })
1103 .ok();
1104 })
1105 .detach();
1106 }
1107 }
1108 }
1109
1110 fn did_finish_parsing(&mut self, syntax_snapshot: SyntaxSnapshot, cx: &mut ModelContext<Self>) {
1111 self.non_text_state_update_count += 1;
1112 self.syntax_map.lock().did_parse(syntax_snapshot);
1113 self.request_autoindent(cx);
1114 self.parse_status.0.send(ParseStatus::Idle).unwrap();
1115 cx.emit(Event::Reparsed);
1116 cx.notify();
1117 }
1118
1119 pub fn parse_status(&self) -> watch::Receiver<ParseStatus> {
1120 self.parse_status.1.clone()
1121 }
1122
1123 /// Assign to the buffer a set of diagnostics created by a given language server.
1124 pub fn update_diagnostics(
1125 &mut self,
1126 server_id: LanguageServerId,
1127 diagnostics: DiagnosticSet,
1128 cx: &mut ModelContext<Self>,
1129 ) {
1130 let lamport_timestamp = self.text.lamport_clock.tick();
1131 let op = Operation::UpdateDiagnostics {
1132 server_id,
1133 diagnostics: diagnostics.iter().cloned().collect(),
1134 lamport_timestamp,
1135 };
1136 self.apply_diagnostic_update(server_id, diagnostics, lamport_timestamp, cx);
1137 self.send_operation(op, cx);
1138 }
1139
1140 fn request_autoindent(&mut self, cx: &mut ModelContext<Self>) {
1141 if let Some(indent_sizes) = self.compute_autoindents() {
1142 let indent_sizes = cx.background_executor().spawn(indent_sizes);
1143 match cx
1144 .background_executor()
1145 .block_with_timeout(Duration::from_micros(500), indent_sizes)
1146 {
1147 Ok(indent_sizes) => self.apply_autoindents(indent_sizes, cx),
1148 Err(indent_sizes) => {
1149 self.pending_autoindent = Some(cx.spawn(|this, mut cx| async move {
1150 let indent_sizes = indent_sizes.await;
1151 this.update(&mut cx, |this, cx| {
1152 this.apply_autoindents(indent_sizes, cx);
1153 })
1154 .ok();
1155 }));
1156 }
1157 }
1158 } else {
1159 self.autoindent_requests.clear();
1160 }
1161 }
1162
1163 fn compute_autoindents(&self) -> Option<impl Future<Output = BTreeMap<u32, IndentSize>>> {
1164 let max_rows_between_yields = 100;
1165 let snapshot = self.snapshot();
1166 if snapshot.syntax.is_empty() || self.autoindent_requests.is_empty() {
1167 return None;
1168 }
1169
1170 let autoindent_requests = self.autoindent_requests.clone();
1171 Some(async move {
1172 let mut indent_sizes = BTreeMap::new();
1173 for request in autoindent_requests {
1174 // Resolve each edited range to its row in the current buffer and in the
1175 // buffer before this batch of edits.
1176 let mut row_ranges = Vec::new();
1177 let mut old_to_new_rows = BTreeMap::new();
1178 let mut language_indent_sizes_by_new_row = Vec::new();
1179 for entry in &request.entries {
1180 let position = entry.range.start;
1181 let new_row = position.to_point(&snapshot).row;
1182 let new_end_row = entry.range.end.to_point(&snapshot).row + 1;
1183 language_indent_sizes_by_new_row.push((new_row, entry.indent_size));
1184
1185 if !entry.first_line_is_new {
1186 let old_row = position.to_point(&request.before_edit).row;
1187 old_to_new_rows.insert(old_row, new_row);
1188 }
1189 row_ranges.push((new_row..new_end_row, entry.original_indent_column));
1190 }
1191
1192 // Build a map containing the suggested indentation for each of the edited lines
1193 // with respect to the state of the buffer before these edits. This map is keyed
1194 // by the rows for these lines in the current state of the buffer.
1195 let mut old_suggestions = BTreeMap::<u32, (IndentSize, bool)>::default();
1196 let old_edited_ranges =
1197 contiguous_ranges(old_to_new_rows.keys().copied(), max_rows_between_yields);
1198 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1199 let mut language_indent_size = IndentSize::default();
1200 for old_edited_range in old_edited_ranges {
1201 let suggestions = request
1202 .before_edit
1203 .suggest_autoindents(old_edited_range.clone())
1204 .into_iter()
1205 .flatten();
1206 for (old_row, suggestion) in old_edited_range.zip(suggestions) {
1207 if let Some(suggestion) = suggestion {
1208 let new_row = *old_to_new_rows.get(&old_row).unwrap();
1209
1210 // Find the indent size based on the language for this row.
1211 while let Some((row, size)) = language_indent_sizes.peek() {
1212 if *row > new_row {
1213 break;
1214 }
1215 language_indent_size = *size;
1216 language_indent_sizes.next();
1217 }
1218
1219 let suggested_indent = old_to_new_rows
1220 .get(&suggestion.basis_row)
1221 .and_then(|from_row| {
1222 Some(old_suggestions.get(from_row).copied()?.0)
1223 })
1224 .unwrap_or_else(|| {
1225 request
1226 .before_edit
1227 .indent_size_for_line(suggestion.basis_row)
1228 })
1229 .with_delta(suggestion.delta, language_indent_size);
1230 old_suggestions
1231 .insert(new_row, (suggested_indent, suggestion.within_error));
1232 }
1233 }
1234 yield_now().await;
1235 }
1236
1237 // In block mode, only compute indentation suggestions for the first line
1238 // of each insertion. Otherwise, compute suggestions for every inserted line.
1239 let new_edited_row_ranges = contiguous_ranges(
1240 row_ranges.iter().flat_map(|(range, _)| {
1241 if request.is_block_mode {
1242 range.start..range.start + 1
1243 } else {
1244 range.clone()
1245 }
1246 }),
1247 max_rows_between_yields,
1248 );
1249
1250 // Compute new suggestions for each line, but only include them in the result
1251 // if they differ from the old suggestion for that line.
1252 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1253 let mut language_indent_size = IndentSize::default();
1254 for new_edited_row_range in new_edited_row_ranges {
1255 let suggestions = snapshot
1256 .suggest_autoindents(new_edited_row_range.clone())
1257 .into_iter()
1258 .flatten();
1259 for (new_row, suggestion) in new_edited_row_range.zip(suggestions) {
1260 if let Some(suggestion) = suggestion {
1261 // Find the indent size based on the language for this row.
1262 while let Some((row, size)) = language_indent_sizes.peek() {
1263 if *row > new_row {
1264 break;
1265 }
1266 language_indent_size = *size;
1267 language_indent_sizes.next();
1268 }
1269
1270 let suggested_indent = indent_sizes
1271 .get(&suggestion.basis_row)
1272 .copied()
1273 .unwrap_or_else(|| {
1274 snapshot.indent_size_for_line(suggestion.basis_row)
1275 })
1276 .with_delta(suggestion.delta, language_indent_size);
1277 if old_suggestions.get(&new_row).map_or(
1278 true,
1279 |(old_indentation, was_within_error)| {
1280 suggested_indent != *old_indentation
1281 && (!suggestion.within_error || *was_within_error)
1282 },
1283 ) {
1284 indent_sizes.insert(new_row, suggested_indent);
1285 }
1286 }
1287 }
1288 yield_now().await;
1289 }
1290
1291 // For each block of inserted text, adjust the indentation of the remaining
1292 // lines of the block by the same amount as the first line was adjusted.
1293 if request.is_block_mode {
1294 for (row_range, original_indent_column) in
1295 row_ranges
1296 .into_iter()
1297 .filter_map(|(range, original_indent_column)| {
1298 if range.len() > 1 {
1299 Some((range, original_indent_column?))
1300 } else {
1301 None
1302 }
1303 })
1304 {
1305 let new_indent = indent_sizes
1306 .get(&row_range.start)
1307 .copied()
1308 .unwrap_or_else(|| snapshot.indent_size_for_line(row_range.start));
1309 let delta = new_indent.len as i64 - original_indent_column as i64;
1310 if delta != 0 {
1311 for row in row_range.skip(1) {
1312 indent_sizes.entry(row).or_insert_with(|| {
1313 let mut size = snapshot.indent_size_for_line(row);
1314 if size.kind == new_indent.kind {
1315 match delta.cmp(&0) {
1316 Ordering::Greater => size.len += delta as u32,
1317 Ordering::Less => {
1318 size.len = size.len.saturating_sub(-delta as u32)
1319 }
1320 Ordering::Equal => {}
1321 }
1322 }
1323 size
1324 });
1325 }
1326 }
1327 }
1328 }
1329 }
1330
1331 indent_sizes
1332 })
1333 }
1334
1335 fn apply_autoindents(
1336 &mut self,
1337 indent_sizes: BTreeMap<u32, IndentSize>,
1338 cx: &mut ModelContext<Self>,
1339 ) {
1340 self.autoindent_requests.clear();
1341
1342 let edits: Vec<_> = indent_sizes
1343 .into_iter()
1344 .filter_map(|(row, indent_size)| {
1345 let current_size = indent_size_for_line(self, row);
1346 Self::edit_for_indent_size_adjustment(row, current_size, indent_size)
1347 })
1348 .collect();
1349
1350 self.edit(edits, None, cx);
1351 }
1352
1353 /// Create a minimal edit that will cause the given row to be indented
1354 /// with the given size. After applying this edit, the length of the line
1355 /// will always be at least `new_size.len`.
1356 pub fn edit_for_indent_size_adjustment(
1357 row: u32,
1358 current_size: IndentSize,
1359 new_size: IndentSize,
1360 ) -> Option<(Range<Point>, String)> {
1361 if new_size.kind == current_size.kind {
1362 match new_size.len.cmp(¤t_size.len) {
1363 Ordering::Greater => {
1364 let point = Point::new(row, 0);
1365 Some((
1366 point..point,
1367 iter::repeat(new_size.char())
1368 .take((new_size.len - current_size.len) as usize)
1369 .collect::<String>(),
1370 ))
1371 }
1372
1373 Ordering::Less => Some((
1374 Point::new(row, 0)..Point::new(row, current_size.len - new_size.len),
1375 String::new(),
1376 )),
1377
1378 Ordering::Equal => None,
1379 }
1380 } else {
1381 Some((
1382 Point::new(row, 0)..Point::new(row, current_size.len),
1383 iter::repeat(new_size.char())
1384 .take(new_size.len as usize)
1385 .collect::<String>(),
1386 ))
1387 }
1388 }
1389
1390 /// Spawns a background task that asynchronously computes a `Diff` between the buffer's text
1391 /// and the given new text.
1392 pub fn diff(&self, mut new_text: String, cx: &AppContext) -> Task<Diff> {
1393 let old_text = self.as_rope().clone();
1394 let base_version = self.version();
1395 cx.background_executor()
1396 .spawn_labeled(*BUFFER_DIFF_TASK, async move {
1397 let old_text = old_text.to_string();
1398 let line_ending = LineEnding::detect(&new_text);
1399 LineEnding::normalize(&mut new_text);
1400
1401 let diff = TextDiff::from_chars(old_text.as_str(), new_text.as_str());
1402 let empty: Arc<str> = "".into();
1403
1404 let mut edits = Vec::new();
1405 let mut old_offset = 0;
1406 let mut new_offset = 0;
1407 let mut last_edit: Option<(Range<usize>, Range<usize>)> = None;
1408 for change in diff.iter_all_changes().map(Some).chain([None]) {
1409 if let Some(change) = &change {
1410 let len = change.value().len();
1411 match change.tag() {
1412 ChangeTag::Equal => {
1413 old_offset += len;
1414 new_offset += len;
1415 }
1416 ChangeTag::Delete => {
1417 let old_end_offset = old_offset + len;
1418 if let Some((last_old_range, _)) = &mut last_edit {
1419 last_old_range.end = old_end_offset;
1420 } else {
1421 last_edit =
1422 Some((old_offset..old_end_offset, new_offset..new_offset));
1423 }
1424 old_offset = old_end_offset;
1425 }
1426 ChangeTag::Insert => {
1427 let new_end_offset = new_offset + len;
1428 if let Some((_, last_new_range)) = &mut last_edit {
1429 last_new_range.end = new_end_offset;
1430 } else {
1431 last_edit =
1432 Some((old_offset..old_offset, new_offset..new_end_offset));
1433 }
1434 new_offset = new_end_offset;
1435 }
1436 }
1437 }
1438
1439 if let Some((old_range, new_range)) = &last_edit {
1440 if old_offset > old_range.end
1441 || new_offset > new_range.end
1442 || change.is_none()
1443 {
1444 let text = if new_range.is_empty() {
1445 empty.clone()
1446 } else {
1447 new_text[new_range.clone()].into()
1448 };
1449 edits.push((old_range.clone(), text));
1450 last_edit.take();
1451 }
1452 }
1453 }
1454
1455 Diff {
1456 base_version,
1457 line_ending,
1458 edits,
1459 }
1460 })
1461 }
1462
1463 /// Spawns a background task that searches the buffer for any whitespace
1464 /// at the ends of a lines, and returns a `Diff` that removes that whitespace.
1465 pub fn remove_trailing_whitespace(&self, cx: &AppContext) -> Task<Diff> {
1466 let old_text = self.as_rope().clone();
1467 let line_ending = self.line_ending();
1468 let base_version = self.version();
1469 cx.background_executor().spawn(async move {
1470 let ranges = trailing_whitespace_ranges(&old_text);
1471 let empty = Arc::<str>::from("");
1472 Diff {
1473 base_version,
1474 line_ending,
1475 edits: ranges
1476 .into_iter()
1477 .map(|range| (range, empty.clone()))
1478 .collect(),
1479 }
1480 })
1481 }
1482
1483 /// Ensures that the buffer ends with a single newline character, and
1484 /// no other whitespace.
1485 pub fn ensure_final_newline(&mut self, cx: &mut ModelContext<Self>) {
1486 let len = self.len();
1487 let mut offset = len;
1488 for chunk in self.as_rope().reversed_chunks_in_range(0..len) {
1489 let non_whitespace_len = chunk
1490 .trim_end_matches(|c: char| c.is_ascii_whitespace())
1491 .len();
1492 offset -= chunk.len();
1493 offset += non_whitespace_len;
1494 if non_whitespace_len != 0 {
1495 if offset == len - 1 && chunk.get(non_whitespace_len..) == Some("\n") {
1496 return;
1497 }
1498 break;
1499 }
1500 }
1501 self.edit([(offset..len, "\n")], None, cx);
1502 }
1503
1504 /// Applies a diff to the buffer. If the buffer has changed since the given diff was
1505 /// calculated, then adjust the diff to account for those changes, and discard any
1506 /// parts of the diff that conflict with those changes.
1507 pub fn apply_diff(&mut self, diff: Diff, cx: &mut ModelContext<Self>) -> Option<TransactionId> {
1508 // Check for any edits to the buffer that have occurred since this diff
1509 // was computed.
1510 let snapshot = self.snapshot();
1511 let mut edits_since = snapshot.edits_since::<usize>(&diff.base_version).peekable();
1512 let mut delta = 0;
1513 let adjusted_edits = diff.edits.into_iter().filter_map(|(range, new_text)| {
1514 while let Some(edit_since) = edits_since.peek() {
1515 // If the edit occurs after a diff hunk, then it does not
1516 // affect that hunk.
1517 if edit_since.old.start > range.end {
1518 break;
1519 }
1520 // If the edit precedes the diff hunk, then adjust the hunk
1521 // to reflect the edit.
1522 else if edit_since.old.end < range.start {
1523 delta += edit_since.new_len() as i64 - edit_since.old_len() as i64;
1524 edits_since.next();
1525 }
1526 // If the edit intersects a diff hunk, then discard that hunk.
1527 else {
1528 return None;
1529 }
1530 }
1531
1532 let start = (range.start as i64 + delta) as usize;
1533 let end = (range.end as i64 + delta) as usize;
1534 Some((start..end, new_text))
1535 });
1536
1537 self.start_transaction();
1538 self.text.set_line_ending(diff.line_ending);
1539 self.edit(adjusted_edits, None, cx);
1540 self.end_transaction(cx)
1541 }
1542
1543 fn has_unsaved_edits(&self) -> bool {
1544 let (last_version, has_unsaved_edits) = self.has_unsaved_edits.take();
1545
1546 if last_version == self.version {
1547 self.has_unsaved_edits
1548 .set((last_version, has_unsaved_edits));
1549 return has_unsaved_edits;
1550 }
1551
1552 let has_edits = self.has_edits_since(&self.saved_version);
1553 self.has_unsaved_edits
1554 .set((self.version.clone(), has_edits));
1555 has_edits
1556 }
1557
1558 /// Checks if the buffer has unsaved changes.
1559 pub fn is_dirty(&self) -> bool {
1560 self.has_conflict
1561 || self.has_unsaved_edits()
1562 || self
1563 .file
1564 .as_ref()
1565 .map_or(false, |file| file.is_deleted() || !file.is_created())
1566 }
1567
1568 /// Checks if the buffer and its file have both changed since the buffer
1569 /// was last saved or reloaded.
1570 pub fn has_conflict(&self) -> bool {
1571 self.has_conflict
1572 || self.file.as_ref().map_or(false, |file| {
1573 file.mtime() > self.saved_mtime && self.has_unsaved_edits()
1574 })
1575 }
1576
1577 /// Gets a [`Subscription`] that tracks all of the changes to the buffer's text.
1578 pub fn subscribe(&mut self) -> Subscription {
1579 self.text.subscribe()
1580 }
1581
1582 /// Starts a transaction, if one is not already in-progress. When undoing or
1583 /// redoing edits, all of the edits performed within a transaction are undone
1584 /// or redone together.
1585 pub fn start_transaction(&mut self) -> Option<TransactionId> {
1586 self.start_transaction_at(Instant::now())
1587 }
1588
1589 /// Starts a transaction, providing the current time. Subsequent transactions
1590 /// that occur within a short period of time will be grouped together. This
1591 /// is controlled by the buffer's undo grouping duration.
1592 pub fn start_transaction_at(&mut self, now: Instant) -> Option<TransactionId> {
1593 self.transaction_depth += 1;
1594 if self.was_dirty_before_starting_transaction.is_none() {
1595 self.was_dirty_before_starting_transaction = Some(self.is_dirty());
1596 }
1597 self.text.start_transaction_at(now)
1598 }
1599
1600 /// Terminates the current transaction, if this is the outermost transaction.
1601 pub fn end_transaction(&mut self, cx: &mut ModelContext<Self>) -> Option<TransactionId> {
1602 self.end_transaction_at(Instant::now(), cx)
1603 }
1604
1605 /// Terminates the current transaction, providing the current time. Subsequent transactions
1606 /// that occur within a short period of time will be grouped together. This
1607 /// is controlled by the buffer's undo grouping duration.
1608 pub fn end_transaction_at(
1609 &mut self,
1610 now: Instant,
1611 cx: &mut ModelContext<Self>,
1612 ) -> Option<TransactionId> {
1613 assert!(self.transaction_depth > 0);
1614 self.transaction_depth -= 1;
1615 let was_dirty = if self.transaction_depth == 0 {
1616 self.was_dirty_before_starting_transaction.take().unwrap()
1617 } else {
1618 false
1619 };
1620 if let Some((transaction_id, start_version)) = self.text.end_transaction_at(now) {
1621 self.did_edit(&start_version, was_dirty, cx);
1622 Some(transaction_id)
1623 } else {
1624 None
1625 }
1626 }
1627
1628 /// Manually add a transaction to the buffer's undo history.
1629 pub fn push_transaction(&mut self, transaction: Transaction, now: Instant) {
1630 self.text.push_transaction(transaction, now);
1631 }
1632
1633 /// Prevent the last transaction from being grouped with any subsequent transactions,
1634 /// even if they occur with the buffer's undo grouping duration.
1635 pub fn finalize_last_transaction(&mut self) -> Option<&Transaction> {
1636 self.text.finalize_last_transaction()
1637 }
1638
1639 /// Manually group all changes since a given transaction.
1640 pub fn group_until_transaction(&mut self, transaction_id: TransactionId) {
1641 self.text.group_until_transaction(transaction_id);
1642 }
1643
1644 /// Manually remove a transaction from the buffer's undo history
1645 pub fn forget_transaction(&mut self, transaction_id: TransactionId) {
1646 self.text.forget_transaction(transaction_id);
1647 }
1648
1649 /// Manually merge two adjacent transactions in the buffer's undo history.
1650 pub fn merge_transactions(&mut self, transaction: TransactionId, destination: TransactionId) {
1651 self.text.merge_transactions(transaction, destination);
1652 }
1653
1654 /// Waits for the buffer to receive operations with the given timestamps.
1655 pub fn wait_for_edits(
1656 &mut self,
1657 edit_ids: impl IntoIterator<Item = clock::Lamport>,
1658 ) -> impl Future<Output = Result<()>> {
1659 self.text.wait_for_edits(edit_ids)
1660 }
1661
1662 /// Waits for the buffer to receive the operations necessary for resolving the given anchors.
1663 pub fn wait_for_anchors(
1664 &mut self,
1665 anchors: impl IntoIterator<Item = Anchor>,
1666 ) -> impl 'static + Future<Output = Result<()>> {
1667 self.text.wait_for_anchors(anchors)
1668 }
1669
1670 /// Waits for the buffer to receive operations up to the given version.
1671 pub fn wait_for_version(&mut self, version: clock::Global) -> impl Future<Output = Result<()>> {
1672 self.text.wait_for_version(version)
1673 }
1674
1675 /// Forces all futures returned by [`Buffer::wait_for_version`], [`Buffer::wait_for_edits`], or
1676 /// [`Buffer::wait_for_version`] to resolve with an error.
1677 pub fn give_up_waiting(&mut self) {
1678 self.text.give_up_waiting();
1679 }
1680
1681 /// Stores a set of selections that should be broadcasted to all of the buffer's replicas.
1682 pub fn set_active_selections(
1683 &mut self,
1684 selections: Arc<[Selection<Anchor>]>,
1685 line_mode: bool,
1686 cursor_shape: CursorShape,
1687 cx: &mut ModelContext<Self>,
1688 ) {
1689 let lamport_timestamp = self.text.lamport_clock.tick();
1690 self.remote_selections.insert(
1691 self.text.replica_id(),
1692 SelectionSet {
1693 selections: selections.clone(),
1694 lamport_timestamp,
1695 line_mode,
1696 cursor_shape,
1697 },
1698 );
1699 self.send_operation(
1700 Operation::UpdateSelections {
1701 selections,
1702 line_mode,
1703 lamport_timestamp,
1704 cursor_shape,
1705 },
1706 cx,
1707 );
1708 self.non_text_state_update_count += 1;
1709 cx.notify();
1710 }
1711
1712 /// Clears the selections, so that other replicas of the buffer do not see any selections for
1713 /// this replica.
1714 pub fn remove_active_selections(&mut self, cx: &mut ModelContext<Self>) {
1715 if self
1716 .remote_selections
1717 .get(&self.text.replica_id())
1718 .map_or(true, |set| !set.selections.is_empty())
1719 {
1720 self.set_active_selections(Arc::from([]), false, Default::default(), cx);
1721 }
1722 }
1723
1724 /// Replaces the buffer's entire text.
1725 pub fn set_text<T>(&mut self, text: T, cx: &mut ModelContext<Self>) -> Option<clock::Lamport>
1726 where
1727 T: Into<Arc<str>>,
1728 {
1729 self.autoindent_requests.clear();
1730 self.edit([(0..self.len(), text)], None, cx)
1731 }
1732
1733 /// Applies the given edits to the buffer. Each edit is specified as a range of text to
1734 /// delete, and a string of text to insert at that location.
1735 ///
1736 /// If an [`AutoindentMode`] is provided, then the buffer will enqueue an auto-indent
1737 /// request for the edited ranges, which will be processed when the buffer finishes
1738 /// parsing.
1739 ///
1740 /// Parsing takes place at the end of a transaction, and may compute synchronously
1741 /// or asynchronously, depending on the changes.
1742 pub fn edit<I, S, T>(
1743 &mut self,
1744 edits_iter: I,
1745 autoindent_mode: Option<AutoindentMode>,
1746 cx: &mut ModelContext<Self>,
1747 ) -> Option<clock::Lamport>
1748 where
1749 I: IntoIterator<Item = (Range<S>, T)>,
1750 S: ToOffset,
1751 T: Into<Arc<str>>,
1752 {
1753 // Skip invalid edits and coalesce contiguous ones.
1754 let mut edits: Vec<(Range<usize>, Arc<str>)> = Vec::new();
1755 for (range, new_text) in edits_iter {
1756 let mut range = range.start.to_offset(self)..range.end.to_offset(self);
1757 if range.start > range.end {
1758 mem::swap(&mut range.start, &mut range.end);
1759 }
1760 let new_text = new_text.into();
1761 if !new_text.is_empty() || !range.is_empty() {
1762 if let Some((prev_range, prev_text)) = edits.last_mut() {
1763 if prev_range.end >= range.start {
1764 prev_range.end = cmp::max(prev_range.end, range.end);
1765 *prev_text = format!("{prev_text}{new_text}").into();
1766 } else {
1767 edits.push((range, new_text));
1768 }
1769 } else {
1770 edits.push((range, new_text));
1771 }
1772 }
1773 }
1774 if edits.is_empty() {
1775 return None;
1776 }
1777
1778 self.start_transaction();
1779 self.pending_autoindent.take();
1780 let autoindent_request = autoindent_mode
1781 .and_then(|mode| self.language.as_ref().map(|_| (self.snapshot(), mode)));
1782
1783 let edit_operation = self.text.edit(edits.iter().cloned());
1784 let edit_id = edit_operation.timestamp();
1785
1786 if let Some((before_edit, mode)) = autoindent_request {
1787 let mut delta = 0isize;
1788 let entries = edits
1789 .into_iter()
1790 .enumerate()
1791 .zip(&edit_operation.as_edit().unwrap().new_text)
1792 .map(|((ix, (range, _)), new_text)| {
1793 let new_text_length = new_text.len();
1794 let old_start = range.start.to_point(&before_edit);
1795 let new_start = (delta + range.start as isize) as usize;
1796 delta += new_text_length as isize - (range.end as isize - range.start as isize);
1797
1798 let mut range_of_insertion_to_indent = 0..new_text_length;
1799 let mut first_line_is_new = false;
1800 let mut original_indent_column = None;
1801
1802 // When inserting an entire line at the beginning of an existing line,
1803 // treat the insertion as new.
1804 if new_text.contains('\n')
1805 && old_start.column <= before_edit.indent_size_for_line(old_start.row).len
1806 {
1807 first_line_is_new = true;
1808 }
1809
1810 // When inserting text starting with a newline, avoid auto-indenting the
1811 // previous line.
1812 if new_text.starts_with('\n') {
1813 range_of_insertion_to_indent.start += 1;
1814 first_line_is_new = true;
1815 }
1816
1817 // Avoid auto-indenting after the insertion.
1818 if let AutoindentMode::Block {
1819 original_indent_columns,
1820 } = &mode
1821 {
1822 original_indent_column =
1823 Some(original_indent_columns.get(ix).copied().unwrap_or_else(|| {
1824 indent_size_for_text(
1825 new_text[range_of_insertion_to_indent.clone()].chars(),
1826 )
1827 .len
1828 }));
1829 if new_text[range_of_insertion_to_indent.clone()].ends_with('\n') {
1830 range_of_insertion_to_indent.end -= 1;
1831 }
1832 }
1833
1834 AutoindentRequestEntry {
1835 first_line_is_new,
1836 original_indent_column,
1837 indent_size: before_edit.language_indent_size_at(range.start, cx),
1838 range: self.anchor_before(new_start + range_of_insertion_to_indent.start)
1839 ..self.anchor_after(new_start + range_of_insertion_to_indent.end),
1840 }
1841 })
1842 .collect();
1843
1844 self.autoindent_requests.push(Arc::new(AutoindentRequest {
1845 before_edit,
1846 entries,
1847 is_block_mode: matches!(mode, AutoindentMode::Block { .. }),
1848 }));
1849 }
1850
1851 self.end_transaction(cx);
1852 self.send_operation(Operation::Buffer(edit_operation), cx);
1853 Some(edit_id)
1854 }
1855
1856 fn did_edit(
1857 &mut self,
1858 old_version: &clock::Global,
1859 was_dirty: bool,
1860 cx: &mut ModelContext<Self>,
1861 ) {
1862 if self.edits_since::<usize>(old_version).next().is_none() {
1863 return;
1864 }
1865
1866 self.reparse(cx);
1867
1868 cx.emit(Event::Edited);
1869 if was_dirty != self.is_dirty() {
1870 cx.emit(Event::DirtyChanged);
1871 }
1872 cx.notify();
1873 }
1874
1875 /// Applies the given remote operations to the buffer.
1876 pub fn apply_ops<I: IntoIterator<Item = Operation>>(
1877 &mut self,
1878 ops: I,
1879 cx: &mut ModelContext<Self>,
1880 ) -> Result<()> {
1881 self.pending_autoindent.take();
1882 let was_dirty = self.is_dirty();
1883 let old_version = self.version.clone();
1884 let mut deferred_ops = Vec::new();
1885 let buffer_ops = ops
1886 .into_iter()
1887 .filter_map(|op| match op {
1888 Operation::Buffer(op) => Some(op),
1889 _ => {
1890 if self.can_apply_op(&op) {
1891 self.apply_op(op, cx);
1892 } else {
1893 deferred_ops.push(op);
1894 }
1895 None
1896 }
1897 })
1898 .collect::<Vec<_>>();
1899 self.text.apply_ops(buffer_ops)?;
1900 self.deferred_ops.insert(deferred_ops);
1901 self.flush_deferred_ops(cx);
1902 self.did_edit(&old_version, was_dirty, cx);
1903 // Notify independently of whether the buffer was edited as the operations could include a
1904 // selection update.
1905 cx.notify();
1906 Ok(())
1907 }
1908
1909 fn flush_deferred_ops(&mut self, cx: &mut ModelContext<Self>) {
1910 let mut deferred_ops = Vec::new();
1911 for op in self.deferred_ops.drain().iter().cloned() {
1912 if self.can_apply_op(&op) {
1913 self.apply_op(op, cx);
1914 } else {
1915 deferred_ops.push(op);
1916 }
1917 }
1918 self.deferred_ops.insert(deferred_ops);
1919 }
1920
1921 pub fn has_deferred_ops(&self) -> bool {
1922 !self.deferred_ops.is_empty() || self.text.has_deferred_ops()
1923 }
1924
1925 fn can_apply_op(&self, operation: &Operation) -> bool {
1926 match operation {
1927 Operation::Buffer(_) => {
1928 unreachable!("buffer operations should never be applied at this layer")
1929 }
1930 Operation::UpdateDiagnostics {
1931 diagnostics: diagnostic_set,
1932 ..
1933 } => diagnostic_set.iter().all(|diagnostic| {
1934 self.text.can_resolve(&diagnostic.range.start)
1935 && self.text.can_resolve(&diagnostic.range.end)
1936 }),
1937 Operation::UpdateSelections { selections, .. } => selections
1938 .iter()
1939 .all(|s| self.can_resolve(&s.start) && self.can_resolve(&s.end)),
1940 Operation::UpdateCompletionTriggers { .. } => true,
1941 }
1942 }
1943
1944 fn apply_op(&mut self, operation: Operation, cx: &mut ModelContext<Self>) {
1945 match operation {
1946 Operation::Buffer(_) => {
1947 unreachable!("buffer operations should never be applied at this layer")
1948 }
1949 Operation::UpdateDiagnostics {
1950 server_id,
1951 diagnostics: diagnostic_set,
1952 lamport_timestamp,
1953 } => {
1954 let snapshot = self.snapshot();
1955 self.apply_diagnostic_update(
1956 server_id,
1957 DiagnosticSet::from_sorted_entries(diagnostic_set.iter().cloned(), &snapshot),
1958 lamport_timestamp,
1959 cx,
1960 );
1961 }
1962 Operation::UpdateSelections {
1963 selections,
1964 lamport_timestamp,
1965 line_mode,
1966 cursor_shape,
1967 } => {
1968 if let Some(set) = self.remote_selections.get(&lamport_timestamp.replica_id) {
1969 if set.lamport_timestamp > lamport_timestamp {
1970 return;
1971 }
1972 }
1973
1974 self.remote_selections.insert(
1975 lamport_timestamp.replica_id,
1976 SelectionSet {
1977 selections,
1978 lamport_timestamp,
1979 line_mode,
1980 cursor_shape,
1981 },
1982 );
1983 self.text.lamport_clock.observe(lamport_timestamp);
1984 self.non_text_state_update_count += 1;
1985 }
1986 Operation::UpdateCompletionTriggers {
1987 triggers,
1988 lamport_timestamp,
1989 } => {
1990 self.completion_triggers = triggers;
1991 self.text.lamport_clock.observe(lamport_timestamp);
1992 }
1993 }
1994 }
1995
1996 fn apply_diagnostic_update(
1997 &mut self,
1998 server_id: LanguageServerId,
1999 diagnostics: DiagnosticSet,
2000 lamport_timestamp: clock::Lamport,
2001 cx: &mut ModelContext<Self>,
2002 ) {
2003 if lamport_timestamp > self.diagnostics_timestamp {
2004 let ix = self.diagnostics.binary_search_by_key(&server_id, |e| e.0);
2005 if diagnostics.len() == 0 {
2006 if let Ok(ix) = ix {
2007 self.diagnostics.remove(ix);
2008 }
2009 } else {
2010 match ix {
2011 Err(ix) => self.diagnostics.insert(ix, (server_id, diagnostics)),
2012 Ok(ix) => self.diagnostics[ix].1 = diagnostics,
2013 };
2014 }
2015 self.diagnostics_timestamp = lamport_timestamp;
2016 self.non_text_state_update_count += 1;
2017 self.text.lamport_clock.observe(lamport_timestamp);
2018 cx.notify();
2019 cx.emit(Event::DiagnosticsUpdated);
2020 }
2021 }
2022
2023 fn send_operation(&mut self, operation: Operation, cx: &mut ModelContext<Self>) {
2024 cx.emit(Event::Operation(operation));
2025 }
2026
2027 /// Removes the selections for a given peer.
2028 pub fn remove_peer(&mut self, replica_id: ReplicaId, cx: &mut ModelContext<Self>) {
2029 self.remote_selections.remove(&replica_id);
2030 cx.notify();
2031 }
2032
2033 /// Undoes the most recent transaction.
2034 pub fn undo(&mut self, cx: &mut ModelContext<Self>) -> Option<TransactionId> {
2035 let was_dirty = self.is_dirty();
2036 let old_version = self.version.clone();
2037
2038 if let Some((transaction_id, operation)) = self.text.undo() {
2039 self.send_operation(Operation::Buffer(operation), cx);
2040 self.did_edit(&old_version, was_dirty, cx);
2041 Some(transaction_id)
2042 } else {
2043 None
2044 }
2045 }
2046
2047 /// Manually undoes a specific transaction in the buffer's undo history.
2048 pub fn undo_transaction(
2049 &mut self,
2050 transaction_id: TransactionId,
2051 cx: &mut ModelContext<Self>,
2052 ) -> bool {
2053 let was_dirty = self.is_dirty();
2054 let old_version = self.version.clone();
2055 if let Some(operation) = self.text.undo_transaction(transaction_id) {
2056 self.send_operation(Operation::Buffer(operation), cx);
2057 self.did_edit(&old_version, was_dirty, cx);
2058 true
2059 } else {
2060 false
2061 }
2062 }
2063
2064 /// Manually undoes all changes after a given transaction in the buffer's undo history.
2065 pub fn undo_to_transaction(
2066 &mut self,
2067 transaction_id: TransactionId,
2068 cx: &mut ModelContext<Self>,
2069 ) -> bool {
2070 let was_dirty = self.is_dirty();
2071 let old_version = self.version.clone();
2072
2073 let operations = self.text.undo_to_transaction(transaction_id);
2074 let undone = !operations.is_empty();
2075 for operation in operations {
2076 self.send_operation(Operation::Buffer(operation), cx);
2077 }
2078 if undone {
2079 self.did_edit(&old_version, was_dirty, cx)
2080 }
2081 undone
2082 }
2083
2084 /// Manually redoes a specific transaction in the buffer's redo history.
2085 pub fn redo(&mut self, cx: &mut ModelContext<Self>) -> Option<TransactionId> {
2086 let was_dirty = self.is_dirty();
2087 let old_version = self.version.clone();
2088
2089 if let Some((transaction_id, operation)) = self.text.redo() {
2090 self.send_operation(Operation::Buffer(operation), cx);
2091 self.did_edit(&old_version, was_dirty, cx);
2092 Some(transaction_id)
2093 } else {
2094 None
2095 }
2096 }
2097
2098 /// Manually undoes all changes until a given transaction in the buffer's redo history.
2099 pub fn redo_to_transaction(
2100 &mut self,
2101 transaction_id: TransactionId,
2102 cx: &mut ModelContext<Self>,
2103 ) -> bool {
2104 let was_dirty = self.is_dirty();
2105 let old_version = self.version.clone();
2106
2107 let operations = self.text.redo_to_transaction(transaction_id);
2108 let redone = !operations.is_empty();
2109 for operation in operations {
2110 self.send_operation(Operation::Buffer(operation), cx);
2111 }
2112 if redone {
2113 self.did_edit(&old_version, was_dirty, cx)
2114 }
2115 redone
2116 }
2117
2118 /// Override current completion triggers with the user-provided completion triggers.
2119 pub fn set_completion_triggers(&mut self, triggers: Vec<String>, cx: &mut ModelContext<Self>) {
2120 self.completion_triggers.clone_from(&triggers);
2121 self.completion_triggers_timestamp = self.text.lamport_clock.tick();
2122 self.send_operation(
2123 Operation::UpdateCompletionTriggers {
2124 triggers,
2125 lamport_timestamp: self.completion_triggers_timestamp,
2126 },
2127 cx,
2128 );
2129 cx.notify();
2130 }
2131
2132 /// Returns a list of strings which trigger a completion menu for this language.
2133 /// Usually this is driven by LSP server which returns a list of trigger characters for completions.
2134 pub fn completion_triggers(&self) -> &[String] {
2135 &self.completion_triggers
2136 }
2137}
2138
2139#[doc(hidden)]
2140#[cfg(any(test, feature = "test-support"))]
2141impl Buffer {
2142 pub fn edit_via_marked_text(
2143 &mut self,
2144 marked_string: &str,
2145 autoindent_mode: Option<AutoindentMode>,
2146 cx: &mut ModelContext<Self>,
2147 ) {
2148 let edits = self.edits_for_marked_text(marked_string);
2149 self.edit(edits, autoindent_mode, cx);
2150 }
2151
2152 pub fn set_group_interval(&mut self, group_interval: Duration) {
2153 self.text.set_group_interval(group_interval);
2154 }
2155
2156 pub fn randomly_edit<T>(
2157 &mut self,
2158 rng: &mut T,
2159 old_range_count: usize,
2160 cx: &mut ModelContext<Self>,
2161 ) where
2162 T: rand::Rng,
2163 {
2164 let mut edits: Vec<(Range<usize>, String)> = Vec::new();
2165 let mut last_end = None;
2166 for _ in 0..old_range_count {
2167 if last_end.map_or(false, |last_end| last_end >= self.len()) {
2168 break;
2169 }
2170
2171 let new_start = last_end.map_or(0, |last_end| last_end + 1);
2172 let mut range = self.random_byte_range(new_start, rng);
2173 if rng.gen_bool(0.2) {
2174 mem::swap(&mut range.start, &mut range.end);
2175 }
2176 last_end = Some(range.end);
2177
2178 let new_text_len = rng.gen_range(0..10);
2179 let new_text: String = RandomCharIter::new(&mut *rng).take(new_text_len).collect();
2180
2181 edits.push((range, new_text));
2182 }
2183 log::info!("mutating buffer {} with {:?}", self.replica_id(), edits);
2184 self.edit(edits, None, cx);
2185 }
2186
2187 pub fn randomly_undo_redo(&mut self, rng: &mut impl rand::Rng, cx: &mut ModelContext<Self>) {
2188 let was_dirty = self.is_dirty();
2189 let old_version = self.version.clone();
2190
2191 let ops = self.text.randomly_undo_redo(rng);
2192 if !ops.is_empty() {
2193 for op in ops {
2194 self.send_operation(Operation::Buffer(op), cx);
2195 self.did_edit(&old_version, was_dirty, cx);
2196 }
2197 }
2198 }
2199}
2200
2201impl EventEmitter<Event> for Buffer {}
2202
2203impl Deref for Buffer {
2204 type Target = TextBuffer;
2205
2206 fn deref(&self) -> &Self::Target {
2207 &self.text
2208 }
2209}
2210
2211impl BufferSnapshot {
2212 /// Returns [`IndentSize`] for a given line that respects user settings and /// language preferences.
2213 pub fn indent_size_for_line(&self, row: u32) -> IndentSize {
2214 indent_size_for_line(self, row)
2215 }
2216 /// Returns [`IndentSize`] for a given position that respects user settings
2217 /// and language preferences.
2218 pub fn language_indent_size_at<T: ToOffset>(&self, position: T, cx: &AppContext) -> IndentSize {
2219 let settings = language_settings(self.language_at(position), self.file(), cx);
2220 if settings.hard_tabs {
2221 IndentSize::tab()
2222 } else {
2223 IndentSize::spaces(settings.tab_size.get())
2224 }
2225 }
2226
2227 /// Retrieve the suggested indent size for all of the given rows. The unit of indentation
2228 /// is passed in as `single_indent_size`.
2229 pub fn suggested_indents(
2230 &self,
2231 rows: impl Iterator<Item = u32>,
2232 single_indent_size: IndentSize,
2233 ) -> BTreeMap<u32, IndentSize> {
2234 let mut result = BTreeMap::new();
2235
2236 for row_range in contiguous_ranges(rows, 10) {
2237 let suggestions = match self.suggest_autoindents(row_range.clone()) {
2238 Some(suggestions) => suggestions,
2239 _ => break,
2240 };
2241
2242 for (row, suggestion) in row_range.zip(suggestions) {
2243 let indent_size = if let Some(suggestion) = suggestion {
2244 result
2245 .get(&suggestion.basis_row)
2246 .copied()
2247 .unwrap_or_else(|| self.indent_size_for_line(suggestion.basis_row))
2248 .with_delta(suggestion.delta, single_indent_size)
2249 } else {
2250 self.indent_size_for_line(row)
2251 };
2252
2253 result.insert(row, indent_size);
2254 }
2255 }
2256
2257 result
2258 }
2259
2260 fn suggest_autoindents(
2261 &self,
2262 row_range: Range<u32>,
2263 ) -> Option<impl Iterator<Item = Option<IndentSuggestion>> + '_> {
2264 let config = &self.language.as_ref()?.config;
2265 let prev_non_blank_row = self.prev_non_blank_row(row_range.start);
2266
2267 // Find the suggested indentation ranges based on the syntax tree.
2268 let start = Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0);
2269 let end = Point::new(row_range.end, 0);
2270 let range = (start..end).to_offset(&self.text);
2271 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
2272 Some(&grammar.indents_config.as_ref()?.query)
2273 });
2274 let indent_configs = matches
2275 .grammars()
2276 .iter()
2277 .map(|grammar| grammar.indents_config.as_ref().unwrap())
2278 .collect::<Vec<_>>();
2279
2280 let mut indent_ranges = Vec::<Range<Point>>::new();
2281 let mut outdent_positions = Vec::<Point>::new();
2282 while let Some(mat) = matches.peek() {
2283 let mut start: Option<Point> = None;
2284 let mut end: Option<Point> = None;
2285
2286 let config = &indent_configs[mat.grammar_index];
2287 for capture in mat.captures {
2288 if capture.index == config.indent_capture_ix {
2289 start.get_or_insert(Point::from_ts_point(capture.node.start_position()));
2290 end.get_or_insert(Point::from_ts_point(capture.node.end_position()));
2291 } else if Some(capture.index) == config.start_capture_ix {
2292 start = Some(Point::from_ts_point(capture.node.end_position()));
2293 } else if Some(capture.index) == config.end_capture_ix {
2294 end = Some(Point::from_ts_point(capture.node.start_position()));
2295 } else if Some(capture.index) == config.outdent_capture_ix {
2296 outdent_positions.push(Point::from_ts_point(capture.node.start_position()));
2297 }
2298 }
2299
2300 matches.advance();
2301 if let Some((start, end)) = start.zip(end) {
2302 if start.row == end.row {
2303 continue;
2304 }
2305
2306 let range = start..end;
2307 match indent_ranges.binary_search_by_key(&range.start, |r| r.start) {
2308 Err(ix) => indent_ranges.insert(ix, range),
2309 Ok(ix) => {
2310 let prev_range = &mut indent_ranges[ix];
2311 prev_range.end = prev_range.end.max(range.end);
2312 }
2313 }
2314 }
2315 }
2316
2317 let mut error_ranges = Vec::<Range<Point>>::new();
2318 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
2319 Some(&grammar.error_query)
2320 });
2321 while let Some(mat) = matches.peek() {
2322 let node = mat.captures[0].node;
2323 let start = Point::from_ts_point(node.start_position());
2324 let end = Point::from_ts_point(node.end_position());
2325 let range = start..end;
2326 let ix = match error_ranges.binary_search_by_key(&range.start, |r| r.start) {
2327 Ok(ix) | Err(ix) => ix,
2328 };
2329 let mut end_ix = ix;
2330 while let Some(existing_range) = error_ranges.get(end_ix) {
2331 if existing_range.end < end {
2332 end_ix += 1;
2333 } else {
2334 break;
2335 }
2336 }
2337 error_ranges.splice(ix..end_ix, [range]);
2338 matches.advance();
2339 }
2340
2341 outdent_positions.sort();
2342 for outdent_position in outdent_positions {
2343 // find the innermost indent range containing this outdent_position
2344 // set its end to the outdent position
2345 if let Some(range_to_truncate) = indent_ranges
2346 .iter_mut()
2347 .filter(|indent_range| indent_range.contains(&outdent_position))
2348 .last()
2349 {
2350 range_to_truncate.end = outdent_position;
2351 }
2352 }
2353
2354 // Find the suggested indentation increases and decreased based on regexes.
2355 let mut indent_change_rows = Vec::<(u32, Ordering)>::new();
2356 self.for_each_line(
2357 Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0)
2358 ..Point::new(row_range.end, 0),
2359 |row, line| {
2360 if config
2361 .decrease_indent_pattern
2362 .as_ref()
2363 .map_or(false, |regex| regex.is_match(line))
2364 {
2365 indent_change_rows.push((row, Ordering::Less));
2366 }
2367 if config
2368 .increase_indent_pattern
2369 .as_ref()
2370 .map_or(false, |regex| regex.is_match(line))
2371 {
2372 indent_change_rows.push((row + 1, Ordering::Greater));
2373 }
2374 },
2375 );
2376
2377 let mut indent_changes = indent_change_rows.into_iter().peekable();
2378 let mut prev_row = if config.auto_indent_using_last_non_empty_line {
2379 prev_non_blank_row.unwrap_or(0)
2380 } else {
2381 row_range.start.saturating_sub(1)
2382 };
2383 let mut prev_row_start = Point::new(prev_row, self.indent_size_for_line(prev_row).len);
2384 Some(row_range.map(move |row| {
2385 let row_start = Point::new(row, self.indent_size_for_line(row).len);
2386
2387 let mut indent_from_prev_row = false;
2388 let mut outdent_from_prev_row = false;
2389 let mut outdent_to_row = u32::MAX;
2390
2391 while let Some((indent_row, delta)) = indent_changes.peek() {
2392 match indent_row.cmp(&row) {
2393 Ordering::Equal => match delta {
2394 Ordering::Less => outdent_from_prev_row = true,
2395 Ordering::Greater => indent_from_prev_row = true,
2396 _ => {}
2397 },
2398
2399 Ordering::Greater => break,
2400 Ordering::Less => {}
2401 }
2402
2403 indent_changes.next();
2404 }
2405
2406 for range in &indent_ranges {
2407 if range.start.row >= row {
2408 break;
2409 }
2410 if range.start.row == prev_row && range.end > row_start {
2411 indent_from_prev_row = true;
2412 }
2413 if range.end > prev_row_start && range.end <= row_start {
2414 outdent_to_row = outdent_to_row.min(range.start.row);
2415 }
2416 }
2417
2418 let within_error = error_ranges
2419 .iter()
2420 .any(|e| e.start.row < row && e.end > row_start);
2421
2422 let suggestion = if outdent_to_row == prev_row
2423 || (outdent_from_prev_row && indent_from_prev_row)
2424 {
2425 Some(IndentSuggestion {
2426 basis_row: prev_row,
2427 delta: Ordering::Equal,
2428 within_error,
2429 })
2430 } else if indent_from_prev_row {
2431 Some(IndentSuggestion {
2432 basis_row: prev_row,
2433 delta: Ordering::Greater,
2434 within_error,
2435 })
2436 } else if outdent_to_row < prev_row {
2437 Some(IndentSuggestion {
2438 basis_row: outdent_to_row,
2439 delta: Ordering::Equal,
2440 within_error,
2441 })
2442 } else if outdent_from_prev_row {
2443 Some(IndentSuggestion {
2444 basis_row: prev_row,
2445 delta: Ordering::Less,
2446 within_error,
2447 })
2448 } else if config.auto_indent_using_last_non_empty_line || !self.is_line_blank(prev_row)
2449 {
2450 Some(IndentSuggestion {
2451 basis_row: prev_row,
2452 delta: Ordering::Equal,
2453 within_error,
2454 })
2455 } else {
2456 None
2457 };
2458
2459 prev_row = row;
2460 prev_row_start = row_start;
2461 suggestion
2462 }))
2463 }
2464
2465 fn prev_non_blank_row(&self, mut row: u32) -> Option<u32> {
2466 while row > 0 {
2467 row -= 1;
2468 if !self.is_line_blank(row) {
2469 return Some(row);
2470 }
2471 }
2472 None
2473 }
2474
2475 /// Iterates over chunks of text in the given range of the buffer. Text is chunked
2476 /// in an arbitrary way due to being stored in a [`Rope`](text::Rope). The text is also
2477 /// returned in chunks where each chunk has a single syntax highlighting style and
2478 /// diagnostic status.
2479 pub fn chunks<T: ToOffset>(&self, range: Range<T>, language_aware: bool) -> BufferChunks {
2480 let range = range.start.to_offset(self)..range.end.to_offset(self);
2481
2482 let mut syntax = None;
2483 let mut diagnostic_endpoints = Vec::new();
2484 if language_aware {
2485 let captures = self.syntax.captures(range.clone(), &self.text, |grammar| {
2486 grammar.highlights_query.as_ref()
2487 });
2488 let highlight_maps = captures
2489 .grammars()
2490 .into_iter()
2491 .map(|grammar| grammar.highlight_map())
2492 .collect();
2493 syntax = Some((captures, highlight_maps));
2494 for entry in self.diagnostics_in_range::<_, usize>(range.clone(), false) {
2495 diagnostic_endpoints.push(DiagnosticEndpoint {
2496 offset: entry.range.start,
2497 is_start: true,
2498 severity: entry.diagnostic.severity,
2499 is_unnecessary: entry.diagnostic.is_unnecessary,
2500 });
2501 diagnostic_endpoints.push(DiagnosticEndpoint {
2502 offset: entry.range.end,
2503 is_start: false,
2504 severity: entry.diagnostic.severity,
2505 is_unnecessary: entry.diagnostic.is_unnecessary,
2506 });
2507 }
2508 diagnostic_endpoints
2509 .sort_unstable_by_key(|endpoint| (endpoint.offset, !endpoint.is_start));
2510 }
2511
2512 BufferChunks::new(self.text.as_rope(), range, syntax, diagnostic_endpoints)
2513 }
2514
2515 /// Invokes the given callback for each line of text in the given range of the buffer.
2516 /// Uses callback to avoid allocating a string for each line.
2517 fn for_each_line(&self, range: Range<Point>, mut callback: impl FnMut(u32, &str)) {
2518 let mut line = String::new();
2519 let mut row = range.start.row;
2520 for chunk in self
2521 .as_rope()
2522 .chunks_in_range(range.to_offset(self))
2523 .chain(["\n"])
2524 {
2525 for (newline_ix, text) in chunk.split('\n').enumerate() {
2526 if newline_ix > 0 {
2527 callback(row, &line);
2528 row += 1;
2529 line.clear();
2530 }
2531 line.push_str(text);
2532 }
2533 }
2534 }
2535
2536 /// Iterates over every [`SyntaxLayer`] in the buffer.
2537 pub fn syntax_layers(&self) -> impl Iterator<Item = SyntaxLayer> + '_ {
2538 self.syntax.layers_for_range(0..self.len(), &self.text)
2539 }
2540
2541 pub fn syntax_layer_at<D: ToOffset>(&self, position: D) -> Option<SyntaxLayer> {
2542 let offset = position.to_offset(self);
2543 self.syntax
2544 .layers_for_range(offset..offset, &self.text)
2545 .filter(|l| l.node().end_byte() > offset)
2546 .last()
2547 }
2548
2549 /// Returns the main [Language]
2550 pub fn language(&self) -> Option<&Arc<Language>> {
2551 self.language.as_ref()
2552 }
2553
2554 /// Returns the [Language] at the given location.
2555 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<&Arc<Language>> {
2556 self.syntax_layer_at(position)
2557 .map(|info| info.language)
2558 .or(self.language.as_ref())
2559 }
2560
2561 /// Returns the settings for the language at the given location.
2562 pub fn settings_at<'a, D: ToOffset>(
2563 &self,
2564 position: D,
2565 cx: &'a AppContext,
2566 ) -> &'a LanguageSettings {
2567 language_settings(self.language_at(position), self.file.as_ref(), cx)
2568 }
2569
2570 /// Returns the [LanguageScope] at the given location.
2571 pub fn language_scope_at<D: ToOffset>(&self, position: D) -> Option<LanguageScope> {
2572 let offset = position.to_offset(self);
2573 let mut scope = None;
2574 let mut smallest_range: Option<Range<usize>> = None;
2575
2576 // Use the layer that has the smallest node intersecting the given point.
2577 for layer in self.syntax.layers_for_range(offset..offset, &self.text) {
2578 let mut cursor = layer.node().walk();
2579
2580 let mut range = None;
2581 loop {
2582 let child_range = cursor.node().byte_range();
2583 if !child_range.to_inclusive().contains(&offset) {
2584 break;
2585 }
2586
2587 range = Some(child_range);
2588 if cursor.goto_first_child_for_byte(offset).is_none() {
2589 break;
2590 }
2591 }
2592
2593 if let Some(range) = range {
2594 if smallest_range
2595 .as_ref()
2596 .map_or(true, |smallest_range| range.len() < smallest_range.len())
2597 {
2598 smallest_range = Some(range);
2599 scope = Some(LanguageScope {
2600 language: layer.language.clone(),
2601 override_id: layer.override_id(offset, &self.text),
2602 });
2603 }
2604 }
2605 }
2606
2607 scope.or_else(|| {
2608 self.language.clone().map(|language| LanguageScope {
2609 language,
2610 override_id: None,
2611 })
2612 })
2613 }
2614
2615 /// Returns a tuple of the range and character kind of the word
2616 /// surrounding the given position.
2617 pub fn surrounding_word<T: ToOffset>(&self, start: T) -> (Range<usize>, Option<CharKind>) {
2618 let mut start = start.to_offset(self);
2619 let mut end = start;
2620 let mut next_chars = self.chars_at(start).peekable();
2621 let mut prev_chars = self.reversed_chars_at(start).peekable();
2622
2623 let scope = self.language_scope_at(start);
2624 let kind = |c| char_kind(&scope, c);
2625 let word_kind = cmp::max(
2626 prev_chars.peek().copied().map(kind),
2627 next_chars.peek().copied().map(kind),
2628 );
2629
2630 for ch in prev_chars {
2631 if Some(kind(ch)) == word_kind && ch != '\n' {
2632 start -= ch.len_utf8();
2633 } else {
2634 break;
2635 }
2636 }
2637
2638 for ch in next_chars {
2639 if Some(kind(ch)) == word_kind && ch != '\n' {
2640 end += ch.len_utf8();
2641 } else {
2642 break;
2643 }
2644 }
2645
2646 (start..end, word_kind)
2647 }
2648
2649 /// Returns the range for the closes syntax node enclosing the given range.
2650 pub fn range_for_syntax_ancestor<T: ToOffset>(&self, range: Range<T>) -> Option<Range<usize>> {
2651 let range = range.start.to_offset(self)..range.end.to_offset(self);
2652 let mut result: Option<Range<usize>> = None;
2653 'outer: for layer in self.syntax.layers_for_range(range.clone(), &self.text) {
2654 let mut cursor = layer.node().walk();
2655
2656 // Descend to the first leaf that touches the start of the range,
2657 // and if the range is non-empty, extends beyond the start.
2658 while cursor.goto_first_child_for_byte(range.start).is_some() {
2659 if !range.is_empty() && cursor.node().end_byte() == range.start {
2660 cursor.goto_next_sibling();
2661 }
2662 }
2663
2664 // Ascend to the smallest ancestor that strictly contains the range.
2665 loop {
2666 let node_range = cursor.node().byte_range();
2667 if node_range.start <= range.start
2668 && node_range.end >= range.end
2669 && node_range.len() > range.len()
2670 {
2671 break;
2672 }
2673 if !cursor.goto_parent() {
2674 continue 'outer;
2675 }
2676 }
2677
2678 let left_node = cursor.node();
2679 let mut layer_result = left_node.byte_range();
2680
2681 // For an empty range, try to find another node immediately to the right of the range.
2682 if left_node.end_byte() == range.start {
2683 let mut right_node = None;
2684 while !cursor.goto_next_sibling() {
2685 if !cursor.goto_parent() {
2686 break;
2687 }
2688 }
2689
2690 while cursor.node().start_byte() == range.start {
2691 right_node = Some(cursor.node());
2692 if !cursor.goto_first_child() {
2693 break;
2694 }
2695 }
2696
2697 // If there is a candidate node on both sides of the (empty) range, then
2698 // decide between the two by favoring a named node over an anonymous token.
2699 // If both nodes are the same in that regard, favor the right one.
2700 if let Some(right_node) = right_node {
2701 if right_node.is_named() || !left_node.is_named() {
2702 layer_result = right_node.byte_range();
2703 }
2704 }
2705 }
2706
2707 if let Some(previous_result) = &result {
2708 if previous_result.len() < layer_result.len() {
2709 continue;
2710 }
2711 }
2712 result = Some(layer_result);
2713 }
2714
2715 result
2716 }
2717
2718 /// Returns the outline for the buffer.
2719 ///
2720 /// This method allows passing an optional [SyntaxTheme] to
2721 /// syntax-highlight the returned symbols.
2722 pub fn outline(&self, theme: Option<&SyntaxTheme>) -> Option<Outline<Anchor>> {
2723 self.outline_items_containing(0..self.len(), true, theme)
2724 .map(Outline::new)
2725 }
2726
2727 /// Returns all the symbols that contain the given position.
2728 ///
2729 /// This method allows passing an optional [SyntaxTheme] to
2730 /// syntax-highlight the returned symbols.
2731 pub fn symbols_containing<T: ToOffset>(
2732 &self,
2733 position: T,
2734 theme: Option<&SyntaxTheme>,
2735 ) -> Option<Vec<OutlineItem<Anchor>>> {
2736 let position = position.to_offset(self);
2737 let mut items = self.outline_items_containing(
2738 position.saturating_sub(1)..self.len().min(position + 1),
2739 false,
2740 theme,
2741 )?;
2742 let mut prev_depth = None;
2743 items.retain(|item| {
2744 let result = prev_depth.map_or(true, |prev_depth| item.depth > prev_depth);
2745 prev_depth = Some(item.depth);
2746 result
2747 });
2748 Some(items)
2749 }
2750
2751 pub fn outline_items_containing<T: ToOffset>(
2752 &self,
2753 range: Range<T>,
2754 include_extra_context: bool,
2755 theme: Option<&SyntaxTheme>,
2756 ) -> Option<Vec<OutlineItem<Anchor>>> {
2757 let range = range.to_offset(self);
2758 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
2759 grammar.outline_config.as_ref().map(|c| &c.query)
2760 });
2761 let configs = matches
2762 .grammars()
2763 .iter()
2764 .map(|g| g.outline_config.as_ref().unwrap())
2765 .collect::<Vec<_>>();
2766
2767 let mut items = Vec::new();
2768 while let Some(mat) = matches.peek() {
2769 let config = &configs[mat.grammar_index];
2770 let item_node = mat.captures.iter().find_map(|cap| {
2771 if cap.index == config.item_capture_ix {
2772 Some(cap.node)
2773 } else {
2774 None
2775 }
2776 })?;
2777
2778 let item_range = item_node.byte_range();
2779 if item_range.end < range.start || item_range.start > range.end {
2780 matches.advance();
2781 continue;
2782 }
2783
2784 let mut open_index = None;
2785 let mut close_index = None;
2786
2787 let mut buffer_ranges = Vec::new();
2788 for capture in mat.captures {
2789 let node_is_name;
2790 if capture.index == config.name_capture_ix {
2791 node_is_name = true;
2792 } else if Some(capture.index) == config.context_capture_ix
2793 || (Some(capture.index) == config.extra_context_capture_ix
2794 && include_extra_context)
2795 {
2796 node_is_name = false;
2797 } else {
2798 if Some(capture.index) == config.open_capture_ix {
2799 open_index = Some(capture.node.end_byte());
2800 } else if Some(capture.index) == config.close_capture_ix {
2801 close_index = Some(capture.node.start_byte());
2802 }
2803
2804 continue;
2805 }
2806
2807 let mut range = capture.node.start_byte()..capture.node.end_byte();
2808 let start = capture.node.start_position();
2809 if capture.node.end_position().row > start.row {
2810 range.end =
2811 range.start + self.line_len(start.row as u32) as usize - start.column;
2812 }
2813
2814 if !range.is_empty() {
2815 buffer_ranges.push((range, node_is_name));
2816 }
2817 }
2818
2819 if buffer_ranges.is_empty() {
2820 matches.advance();
2821 continue;
2822 }
2823
2824 let mut text = String::new();
2825 let mut highlight_ranges = Vec::new();
2826 let mut name_ranges = Vec::new();
2827 let mut chunks = self.chunks(
2828 buffer_ranges.first().unwrap().0.start..buffer_ranges.last().unwrap().0.end,
2829 true,
2830 );
2831 let mut last_buffer_range_end = 0;
2832 for (buffer_range, is_name) in buffer_ranges {
2833 if !text.is_empty() && buffer_range.start > last_buffer_range_end {
2834 text.push(' ');
2835 }
2836 last_buffer_range_end = buffer_range.end;
2837 if is_name {
2838 let mut start = text.len();
2839 let end = start + buffer_range.len();
2840
2841 // When multiple names are captured, then the matcheable text
2842 // includes the whitespace in between the names.
2843 if !name_ranges.is_empty() {
2844 start -= 1;
2845 }
2846
2847 name_ranges.push(start..end);
2848 }
2849
2850 let mut offset = buffer_range.start;
2851 chunks.seek(offset);
2852 for mut chunk in chunks.by_ref() {
2853 if chunk.text.len() > buffer_range.end - offset {
2854 chunk.text = &chunk.text[0..(buffer_range.end - offset)];
2855 offset = buffer_range.end;
2856 } else {
2857 offset += chunk.text.len();
2858 }
2859 let style = chunk
2860 .syntax_highlight_id
2861 .zip(theme)
2862 .and_then(|(highlight, theme)| highlight.style(theme));
2863 if let Some(style) = style {
2864 let start = text.len();
2865 let end = start + chunk.text.len();
2866 highlight_ranges.push((start..end, style));
2867 }
2868 text.push_str(chunk.text);
2869 if offset >= buffer_range.end {
2870 break;
2871 }
2872 }
2873 }
2874
2875 matches.advance();
2876
2877 items.push(OutlineItem {
2878 depth: 0, // We'll calculate the depth later
2879 range: item_range,
2880 text,
2881 highlight_ranges,
2882 name_ranges,
2883 body_range: open_index.zip(close_index).map(|(start, end)| start..end),
2884 });
2885 }
2886
2887 items.sort_by_key(|item| (item.range.start, Reverse(item.range.end)));
2888
2889 // Assign depths based on containment relationships and convert to anchors.
2890 let mut item_ends_stack = Vec::<usize>::new();
2891 let mut anchor_items = Vec::new();
2892 for item in items {
2893 while let Some(last_end) = item_ends_stack.last().copied() {
2894 if last_end < item.range.end {
2895 item_ends_stack.pop();
2896 } else {
2897 break;
2898 }
2899 }
2900
2901 anchor_items.push(OutlineItem {
2902 depth: item_ends_stack.len(),
2903 range: self.anchor_after(item.range.start)..self.anchor_before(item.range.end),
2904 text: item.text,
2905 highlight_ranges: item.highlight_ranges,
2906 name_ranges: item.name_ranges,
2907 body_range: item.body_range.map(|body_range| {
2908 self.anchor_after(body_range.start)..self.anchor_before(body_range.end)
2909 }),
2910 });
2911 item_ends_stack.push(item.range.end);
2912 }
2913
2914 Some(anchor_items)
2915 }
2916
2917 /// For each grammar in the language, runs the provided
2918 /// [tree_sitter::Query] against the given range.
2919 pub fn matches(
2920 &self,
2921 range: Range<usize>,
2922 query: fn(&Grammar) -> Option<&tree_sitter::Query>,
2923 ) -> SyntaxMapMatches {
2924 self.syntax.matches(range, self, query)
2925 }
2926
2927 /// Returns bracket range pairs overlapping or adjacent to `range`
2928 pub fn bracket_ranges<T: ToOffset>(
2929 &self,
2930 range: Range<T>,
2931 ) -> impl Iterator<Item = (Range<usize>, Range<usize>)> + '_ {
2932 // Find bracket pairs that *inclusively* contain the given range.
2933 let range = range.start.to_offset(self).saturating_sub(1)
2934 ..self.len().min(range.end.to_offset(self) + 1);
2935
2936 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
2937 grammar.brackets_config.as_ref().map(|c| &c.query)
2938 });
2939 let configs = matches
2940 .grammars()
2941 .iter()
2942 .map(|grammar| grammar.brackets_config.as_ref().unwrap())
2943 .collect::<Vec<_>>();
2944
2945 iter::from_fn(move || {
2946 while let Some(mat) = matches.peek() {
2947 let mut open = None;
2948 let mut close = None;
2949 let config = &configs[mat.grammar_index];
2950 for capture in mat.captures {
2951 if capture.index == config.open_capture_ix {
2952 open = Some(capture.node.byte_range());
2953 } else if capture.index == config.close_capture_ix {
2954 close = Some(capture.node.byte_range());
2955 }
2956 }
2957
2958 matches.advance();
2959
2960 let Some((open, close)) = open.zip(close) else {
2961 continue;
2962 };
2963
2964 let bracket_range = open.start..=close.end;
2965 if !bracket_range.overlaps(&range) {
2966 continue;
2967 }
2968
2969 return Some((open, close));
2970 }
2971 None
2972 })
2973 }
2974
2975 /// Returns enclosing bracket ranges containing the given range
2976 pub fn enclosing_bracket_ranges<T: ToOffset>(
2977 &self,
2978 range: Range<T>,
2979 ) -> impl Iterator<Item = (Range<usize>, Range<usize>)> + '_ {
2980 let range = range.start.to_offset(self)..range.end.to_offset(self);
2981
2982 self.bracket_ranges(range.clone())
2983 .filter(move |(open, close)| open.start <= range.start && close.end >= range.end)
2984 }
2985
2986 /// Returns the smallest enclosing bracket ranges containing the given range or None if no brackets contain range
2987 ///
2988 /// Can optionally pass a range_filter to filter the ranges of brackets to consider
2989 pub fn innermost_enclosing_bracket_ranges<T: ToOffset>(
2990 &self,
2991 range: Range<T>,
2992 range_filter: Option<&dyn Fn(Range<usize>, Range<usize>) -> bool>,
2993 ) -> Option<(Range<usize>, Range<usize>)> {
2994 let range = range.start.to_offset(self)..range.end.to_offset(self);
2995
2996 // Get the ranges of the innermost pair of brackets.
2997 let mut result: Option<(Range<usize>, Range<usize>)> = None;
2998
2999 for (open, close) in self.enclosing_bracket_ranges(range.clone()) {
3000 if let Some(range_filter) = range_filter {
3001 if !range_filter(open.clone(), close.clone()) {
3002 continue;
3003 }
3004 }
3005
3006 let len = close.end - open.start;
3007
3008 if let Some((existing_open, existing_close)) = &result {
3009 let existing_len = existing_close.end - existing_open.start;
3010 if len > existing_len {
3011 continue;
3012 }
3013 }
3014
3015 result = Some((open, close));
3016 }
3017
3018 result
3019 }
3020
3021 /// Returns anchor ranges for any matches of the redaction query.
3022 /// The buffer can be associated with multiple languages, and the redaction query associated with each
3023 /// will be run on the relevant section of the buffer.
3024 pub fn redacted_ranges<T: ToOffset>(
3025 &self,
3026 range: Range<T>,
3027 ) -> impl Iterator<Item = Range<usize>> + '_ {
3028 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
3029 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
3030 grammar
3031 .redactions_config
3032 .as_ref()
3033 .map(|config| &config.query)
3034 });
3035
3036 let configs = syntax_matches
3037 .grammars()
3038 .iter()
3039 .map(|grammar| grammar.redactions_config.as_ref())
3040 .collect::<Vec<_>>();
3041
3042 iter::from_fn(move || {
3043 let redacted_range = syntax_matches
3044 .peek()
3045 .and_then(|mat| {
3046 configs[mat.grammar_index].and_then(|config| {
3047 mat.captures
3048 .iter()
3049 .find(|capture| capture.index == config.redaction_capture_ix)
3050 })
3051 })
3052 .map(|mat| mat.node.byte_range());
3053 syntax_matches.advance();
3054 redacted_range
3055 })
3056 }
3057
3058 pub fn runnable_ranges(
3059 &self,
3060 range: Range<Anchor>,
3061 ) -> impl Iterator<Item = RunnableRange> + '_ {
3062 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
3063
3064 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
3065 grammar.runnable_config.as_ref().map(|config| &config.query)
3066 });
3067
3068 let test_configs = syntax_matches
3069 .grammars()
3070 .iter()
3071 .map(|grammar| grammar.runnable_config.as_ref())
3072 .collect::<Vec<_>>();
3073
3074 iter::from_fn(move || loop {
3075 let mat = syntax_matches.peek()?;
3076
3077 let test_range = test_configs[mat.grammar_index].and_then(|test_configs| {
3078 let mut run_range = None;
3079 let full_range = mat.captures.iter().fold(
3080 Range {
3081 start: usize::MAX,
3082 end: 0,
3083 },
3084 |mut acc, next| {
3085 let byte_range = next.node.byte_range();
3086 if acc.start > byte_range.start {
3087 acc.start = byte_range.start;
3088 }
3089 if acc.end < byte_range.end {
3090 acc.end = byte_range.end;
3091 }
3092 acc
3093 },
3094 );
3095 if full_range.start > full_range.end {
3096 // We did not find a full spanning range of this match.
3097 return None;
3098 }
3099 let extra_captures: SmallVec<[_; 1]> =
3100 SmallVec::from_iter(mat.captures.iter().filter_map(|capture| {
3101 test_configs
3102 .extra_captures
3103 .get(capture.index as usize)
3104 .cloned()
3105 .and_then(|tag_name| match tag_name {
3106 RunnableCapture::Named(name) => {
3107 Some((capture.node.byte_range(), name))
3108 }
3109 RunnableCapture::Run => {
3110 let _ = run_range.insert(capture.node.byte_range());
3111 None
3112 }
3113 })
3114 }));
3115 let run_range = run_range?;
3116 let tags = test_configs
3117 .query
3118 .property_settings(mat.pattern_index)
3119 .iter()
3120 .filter_map(|property| {
3121 if *property.key == *"tag" {
3122 property
3123 .value
3124 .as_ref()
3125 .map(|value| RunnableTag(value.to_string().into()))
3126 } else {
3127 None
3128 }
3129 })
3130 .collect();
3131 let extra_captures = extra_captures
3132 .into_iter()
3133 .map(|(range, name)| {
3134 (
3135 name.to_string(),
3136 self.text_for_range(range.clone()).collect::<String>(),
3137 )
3138 })
3139 .collect();
3140 // All tags should have the same range.
3141 Some(RunnableRange {
3142 run_range,
3143 full_range,
3144 runnable: Runnable {
3145 tags,
3146 language: mat.language,
3147 buffer: self.remote_id(),
3148 },
3149 extra_captures,
3150 buffer_id: self.remote_id(),
3151 })
3152 });
3153
3154 syntax_matches.advance();
3155 if test_range.is_some() {
3156 // It's fine for us to short-circuit on .peek()? returning None. We don't want to return None from this iter if we
3157 // had a capture that did not contain a run marker, hence we'll just loop around for the next capture.
3158 return test_range;
3159 }
3160 })
3161 }
3162
3163 pub fn indent_guides_in_range(
3164 &self,
3165 range: Range<Anchor>,
3166 ignore_disabled_for_language: bool,
3167 cx: &AppContext,
3168 ) -> Vec<IndentGuide> {
3169 let language_settings = language_settings(self.language(), self.file.as_ref(), cx);
3170 let settings = language_settings.indent_guides;
3171 if !ignore_disabled_for_language && !settings.enabled {
3172 return Vec::new();
3173 }
3174 let tab_size = language_settings.tab_size.get() as u32;
3175
3176 let start_row = range.start.to_point(self).row;
3177 let end_row = range.end.to_point(self).row;
3178 let row_range = start_row..end_row + 1;
3179
3180 let mut row_indents = self.line_indents_in_row_range(row_range.clone());
3181
3182 let mut result_vec = Vec::new();
3183 let mut indent_stack = SmallVec::<[IndentGuide; 8]>::new();
3184
3185 while let Some((first_row, mut line_indent)) = row_indents.next() {
3186 let current_depth = indent_stack.len() as u32;
3187
3188 // When encountering empty, continue until found useful line indent
3189 // then add to the indent stack with the depth found
3190 let mut found_indent = false;
3191 let mut last_row = first_row;
3192 if line_indent.is_line_empty() {
3193 let mut trailing_row = end_row;
3194 while !found_indent {
3195 let (target_row, new_line_indent) =
3196 if let Some(display_row) = row_indents.next() {
3197 display_row
3198 } else {
3199 // This means we reached the end of the given range and found empty lines at the end.
3200 // We need to traverse further until we find a non-empty line to know if we need to add
3201 // an indent guide for the last visible indent.
3202 trailing_row += 1;
3203
3204 const TRAILING_ROW_SEARCH_LIMIT: u32 = 25;
3205 if trailing_row > self.max_point().row
3206 || trailing_row > end_row + TRAILING_ROW_SEARCH_LIMIT
3207 {
3208 break;
3209 }
3210 let new_line_indent = self.line_indent_for_row(trailing_row);
3211 (trailing_row, new_line_indent)
3212 };
3213
3214 if new_line_indent.is_line_empty() {
3215 continue;
3216 }
3217 last_row = target_row.min(end_row);
3218 line_indent = new_line_indent;
3219 found_indent = true;
3220 break;
3221 }
3222 } else {
3223 found_indent = true
3224 }
3225
3226 let depth = if found_indent {
3227 line_indent.len(tab_size) / tab_size
3228 + ((line_indent.len(tab_size) % tab_size) > 0) as u32
3229 } else {
3230 current_depth
3231 };
3232
3233 if depth < current_depth {
3234 for _ in 0..(current_depth - depth) {
3235 let mut indent = indent_stack.pop().unwrap();
3236 if last_row != first_row {
3237 // In this case, we landed on an empty row, had to seek forward,
3238 // and discovered that the indent we where on is ending.
3239 // This means that the last display row must
3240 // be on line that ends this indent range, so we
3241 // should display the range up to the first non-empty line
3242 indent.end_row = first_row.saturating_sub(1);
3243 }
3244
3245 result_vec.push(indent)
3246 }
3247 } else if depth > current_depth {
3248 for next_depth in current_depth..depth {
3249 indent_stack.push(IndentGuide {
3250 buffer_id: self.remote_id(),
3251 start_row: first_row,
3252 end_row: last_row,
3253 depth: next_depth,
3254 tab_size,
3255 settings,
3256 });
3257 }
3258 }
3259
3260 for indent in indent_stack.iter_mut() {
3261 indent.end_row = last_row;
3262 }
3263 }
3264
3265 result_vec.extend(indent_stack);
3266
3267 result_vec
3268 }
3269
3270 pub async fn enclosing_indent(
3271 &self,
3272 mut buffer_row: BufferRow,
3273 ) -> Option<(Range<BufferRow>, LineIndent)> {
3274 let max_row = self.max_point().row;
3275 if buffer_row >= max_row {
3276 return None;
3277 }
3278
3279 let mut target_indent = self.line_indent_for_row(buffer_row);
3280
3281 // If the current row is at the start of an indented block, we want to return this
3282 // block as the enclosing indent.
3283 if !target_indent.is_line_empty() && buffer_row < max_row {
3284 let next_line_indent = self.line_indent_for_row(buffer_row + 1);
3285 if !next_line_indent.is_line_empty()
3286 && target_indent.raw_len() < next_line_indent.raw_len()
3287 {
3288 target_indent = next_line_indent;
3289 buffer_row += 1;
3290 }
3291 }
3292
3293 const SEARCH_ROW_LIMIT: u32 = 25000;
3294 const SEARCH_WHITESPACE_ROW_LIMIT: u32 = 2500;
3295 const YIELD_INTERVAL: u32 = 100;
3296
3297 let mut accessed_row_counter = 0;
3298
3299 // If there is a blank line at the current row, search for the next non indented lines
3300 if target_indent.is_line_empty() {
3301 let start = buffer_row.saturating_sub(SEARCH_WHITESPACE_ROW_LIMIT);
3302 let end = (max_row + 1).min(buffer_row + SEARCH_WHITESPACE_ROW_LIMIT);
3303
3304 let mut non_empty_line_above = None;
3305 for (row, indent) in self
3306 .text
3307 .reversed_line_indents_in_row_range(start..buffer_row)
3308 {
3309 accessed_row_counter += 1;
3310 if accessed_row_counter == YIELD_INTERVAL {
3311 accessed_row_counter = 0;
3312 yield_now().await;
3313 }
3314 if !indent.is_line_empty() {
3315 non_empty_line_above = Some((row, indent));
3316 break;
3317 }
3318 }
3319
3320 let mut non_empty_line_below = None;
3321 for (row, indent) in self.text.line_indents_in_row_range((buffer_row + 1)..end) {
3322 accessed_row_counter += 1;
3323 if accessed_row_counter == YIELD_INTERVAL {
3324 accessed_row_counter = 0;
3325 yield_now().await;
3326 }
3327 if !indent.is_line_empty() {
3328 non_empty_line_below = Some((row, indent));
3329 break;
3330 }
3331 }
3332
3333 let (row, indent) = match (non_empty_line_above, non_empty_line_below) {
3334 (Some((above_row, above_indent)), Some((below_row, below_indent))) => {
3335 if above_indent.raw_len() >= below_indent.raw_len() {
3336 (above_row, above_indent)
3337 } else {
3338 (below_row, below_indent)
3339 }
3340 }
3341 (Some(above), None) => above,
3342 (None, Some(below)) => below,
3343 _ => return None,
3344 };
3345
3346 target_indent = indent;
3347 buffer_row = row;
3348 }
3349
3350 let start = buffer_row.saturating_sub(SEARCH_ROW_LIMIT);
3351 let end = (max_row + 1).min(buffer_row + SEARCH_ROW_LIMIT);
3352
3353 let mut start_indent = None;
3354 for (row, indent) in self
3355 .text
3356 .reversed_line_indents_in_row_range(start..buffer_row)
3357 {
3358 accessed_row_counter += 1;
3359 if accessed_row_counter == YIELD_INTERVAL {
3360 accessed_row_counter = 0;
3361 yield_now().await;
3362 }
3363 if !indent.is_line_empty() && indent.raw_len() < target_indent.raw_len() {
3364 start_indent = Some((row, indent));
3365 break;
3366 }
3367 }
3368 let (start_row, start_indent_size) = start_indent?;
3369
3370 let mut end_indent = (end, None);
3371 for (row, indent) in self.text.line_indents_in_row_range((buffer_row + 1)..end) {
3372 accessed_row_counter += 1;
3373 if accessed_row_counter == YIELD_INTERVAL {
3374 accessed_row_counter = 0;
3375 yield_now().await;
3376 }
3377 if !indent.is_line_empty() && indent.raw_len() < target_indent.raw_len() {
3378 end_indent = (row.saturating_sub(1), Some(indent));
3379 break;
3380 }
3381 }
3382 let (end_row, end_indent_size) = end_indent;
3383
3384 let indent = if let Some(end_indent_size) = end_indent_size {
3385 if start_indent_size.raw_len() > end_indent_size.raw_len() {
3386 start_indent_size
3387 } else {
3388 end_indent_size
3389 }
3390 } else {
3391 start_indent_size
3392 };
3393
3394 Some((start_row..end_row, indent))
3395 }
3396
3397 /// Returns selections for remote peers intersecting the given range.
3398 #[allow(clippy::type_complexity)]
3399 pub fn selections_in_range(
3400 &self,
3401 range: Range<Anchor>,
3402 include_local: bool,
3403 ) -> impl Iterator<
3404 Item = (
3405 ReplicaId,
3406 bool,
3407 CursorShape,
3408 impl Iterator<Item = &Selection<Anchor>> + '_,
3409 ),
3410 > + '_ {
3411 self.remote_selections
3412 .iter()
3413 .filter(move |(replica_id, set)| {
3414 (include_local || **replica_id != self.text.replica_id())
3415 && !set.selections.is_empty()
3416 })
3417 .map(move |(replica_id, set)| {
3418 let start_ix = match set.selections.binary_search_by(|probe| {
3419 probe.end.cmp(&range.start, self).then(Ordering::Greater)
3420 }) {
3421 Ok(ix) | Err(ix) => ix,
3422 };
3423 let end_ix = match set.selections.binary_search_by(|probe| {
3424 probe.start.cmp(&range.end, self).then(Ordering::Less)
3425 }) {
3426 Ok(ix) | Err(ix) => ix,
3427 };
3428
3429 (
3430 *replica_id,
3431 set.line_mode,
3432 set.cursor_shape,
3433 set.selections[start_ix..end_ix].iter(),
3434 )
3435 })
3436 }
3437
3438 /// Whether the buffer contains any git changes.
3439 pub fn has_git_diff(&self) -> bool {
3440 !self.git_diff.is_empty()
3441 }
3442
3443 /// Returns all the Git diff hunks intersecting the given
3444 /// row range.
3445 pub fn git_diff_hunks_in_row_range(
3446 &self,
3447 range: Range<BufferRow>,
3448 ) -> impl '_ + Iterator<Item = git::diff::DiffHunk<u32>> {
3449 self.git_diff.hunks_in_row_range(range, self)
3450 }
3451
3452 /// Returns all the Git diff hunks intersecting the given
3453 /// range.
3454 pub fn git_diff_hunks_intersecting_range(
3455 &self,
3456 range: Range<Anchor>,
3457 ) -> impl '_ + Iterator<Item = git::diff::DiffHunk<u32>> {
3458 self.git_diff.hunks_intersecting_range(range, self)
3459 }
3460
3461 /// Returns all the Git diff hunks intersecting the given
3462 /// range, in reverse order.
3463 pub fn git_diff_hunks_intersecting_range_rev(
3464 &self,
3465 range: Range<Anchor>,
3466 ) -> impl '_ + Iterator<Item = git::diff::DiffHunk<u32>> {
3467 self.git_diff.hunks_intersecting_range_rev(range, self)
3468 }
3469
3470 /// Returns if the buffer contains any diagnostics.
3471 pub fn has_diagnostics(&self) -> bool {
3472 !self.diagnostics.is_empty()
3473 }
3474
3475 /// Returns all the diagnostics intersecting the given range.
3476 pub fn diagnostics_in_range<'a, T, O>(
3477 &'a self,
3478 search_range: Range<T>,
3479 reversed: bool,
3480 ) -> impl 'a + Iterator<Item = DiagnosticEntry<O>>
3481 where
3482 T: 'a + Clone + ToOffset,
3483 O: 'a + FromAnchor + Ord,
3484 {
3485 let mut iterators: Vec<_> = self
3486 .diagnostics
3487 .iter()
3488 .map(|(_, collection)| {
3489 collection
3490 .range::<T, O>(search_range.clone(), self, true, reversed)
3491 .peekable()
3492 })
3493 .collect();
3494
3495 std::iter::from_fn(move || {
3496 let (next_ix, _) = iterators
3497 .iter_mut()
3498 .enumerate()
3499 .flat_map(|(ix, iter)| Some((ix, iter.peek()?)))
3500 .min_by(|(_, a), (_, b)| {
3501 let cmp = a
3502 .range
3503 .start
3504 .cmp(&b.range.start)
3505 // when range is equal, sort by diagnostic severity
3506 .then(a.diagnostic.severity.cmp(&b.diagnostic.severity))
3507 // and stabilize order with group_id
3508 .then(a.diagnostic.group_id.cmp(&b.diagnostic.group_id));
3509 if reversed {
3510 cmp.reverse()
3511 } else {
3512 cmp
3513 }
3514 })?;
3515 iterators[next_ix].next()
3516 })
3517 }
3518
3519 /// Returns all the diagnostic groups associated with the given
3520 /// language server id. If no language server id is provided,
3521 /// all diagnostics groups are returned.
3522 pub fn diagnostic_groups(
3523 &self,
3524 language_server_id: Option<LanguageServerId>,
3525 ) -> Vec<(LanguageServerId, DiagnosticGroup<Anchor>)> {
3526 let mut groups = Vec::new();
3527
3528 if let Some(language_server_id) = language_server_id {
3529 if let Ok(ix) = self
3530 .diagnostics
3531 .binary_search_by_key(&language_server_id, |e| e.0)
3532 {
3533 self.diagnostics[ix]
3534 .1
3535 .groups(language_server_id, &mut groups, self);
3536 }
3537 } else {
3538 for (language_server_id, diagnostics) in self.diagnostics.iter() {
3539 diagnostics.groups(*language_server_id, &mut groups, self);
3540 }
3541 }
3542
3543 groups.sort_by(|(id_a, group_a), (id_b, group_b)| {
3544 let a_start = &group_a.entries[group_a.primary_ix].range.start;
3545 let b_start = &group_b.entries[group_b.primary_ix].range.start;
3546 a_start.cmp(b_start, self).then_with(|| id_a.cmp(id_b))
3547 });
3548
3549 groups
3550 }
3551
3552 /// Returns an iterator over the diagnostics for the given group.
3553 pub fn diagnostic_group<'a, O>(
3554 &'a self,
3555 group_id: usize,
3556 ) -> impl 'a + Iterator<Item = DiagnosticEntry<O>>
3557 where
3558 O: 'a + FromAnchor,
3559 {
3560 self.diagnostics
3561 .iter()
3562 .flat_map(move |(_, set)| set.group(group_id, self))
3563 }
3564
3565 /// An integer version number that accounts for all updates besides
3566 /// the buffer's text itself (which is versioned via a version vector).
3567 pub fn non_text_state_update_count(&self) -> usize {
3568 self.non_text_state_update_count
3569 }
3570
3571 /// Returns a snapshot of underlying file.
3572 pub fn file(&self) -> Option<&Arc<dyn File>> {
3573 self.file.as_ref()
3574 }
3575
3576 /// Resolves the file path (relative to the worktree root) associated with the underlying file.
3577 pub fn resolve_file_path(&self, cx: &AppContext, include_root: bool) -> Option<PathBuf> {
3578 if let Some(file) = self.file() {
3579 if file.path().file_name().is_none() || include_root {
3580 Some(file.full_path(cx))
3581 } else {
3582 Some(file.path().to_path_buf())
3583 }
3584 } else {
3585 None
3586 }
3587 }
3588}
3589
3590fn indent_size_for_line(text: &text::BufferSnapshot, row: u32) -> IndentSize {
3591 indent_size_for_text(text.chars_at(Point::new(row, 0)))
3592}
3593
3594fn indent_size_for_text(text: impl Iterator<Item = char>) -> IndentSize {
3595 let mut result = IndentSize::spaces(0);
3596 for c in text {
3597 let kind = match c {
3598 ' ' => IndentKind::Space,
3599 '\t' => IndentKind::Tab,
3600 _ => break,
3601 };
3602 if result.len == 0 {
3603 result.kind = kind;
3604 }
3605 result.len += 1;
3606 }
3607 result
3608}
3609
3610impl Clone for BufferSnapshot {
3611 fn clone(&self) -> Self {
3612 Self {
3613 text: self.text.clone(),
3614 git_diff: self.git_diff.clone(),
3615 syntax: self.syntax.clone(),
3616 file: self.file.clone(),
3617 remote_selections: self.remote_selections.clone(),
3618 diagnostics: self.diagnostics.clone(),
3619 language: self.language.clone(),
3620 non_text_state_update_count: self.non_text_state_update_count,
3621 }
3622 }
3623}
3624
3625impl Deref for BufferSnapshot {
3626 type Target = text::BufferSnapshot;
3627
3628 fn deref(&self) -> &Self::Target {
3629 &self.text
3630 }
3631}
3632
3633unsafe impl<'a> Send for BufferChunks<'a> {}
3634
3635impl<'a> BufferChunks<'a> {
3636 pub(crate) fn new(
3637 text: &'a Rope,
3638 range: Range<usize>,
3639 syntax: Option<(SyntaxMapCaptures<'a>, Vec<HighlightMap>)>,
3640 diagnostic_endpoints: Vec<DiagnosticEndpoint>,
3641 ) -> Self {
3642 let mut highlights = None;
3643 if let Some((captures, highlight_maps)) = syntax {
3644 highlights = Some(BufferChunkHighlights {
3645 captures,
3646 next_capture: None,
3647 stack: Default::default(),
3648 highlight_maps,
3649 })
3650 }
3651
3652 let diagnostic_endpoints = diagnostic_endpoints.into_iter().peekable();
3653 let chunks = text.chunks_in_range(range.clone());
3654
3655 BufferChunks {
3656 range,
3657 chunks,
3658 diagnostic_endpoints,
3659 error_depth: 0,
3660 warning_depth: 0,
3661 information_depth: 0,
3662 hint_depth: 0,
3663 unnecessary_depth: 0,
3664 highlights,
3665 }
3666 }
3667
3668 /// Seeks to the given byte offset in the buffer.
3669 pub fn seek(&mut self, offset: usize) {
3670 self.range.start = offset;
3671 self.chunks.seek(self.range.start);
3672 if let Some(highlights) = self.highlights.as_mut() {
3673 highlights
3674 .stack
3675 .retain(|(end_offset, _)| *end_offset > offset);
3676 if let Some(capture) = &highlights.next_capture {
3677 if offset >= capture.node.start_byte() {
3678 let next_capture_end = capture.node.end_byte();
3679 if offset < next_capture_end {
3680 highlights.stack.push((
3681 next_capture_end,
3682 highlights.highlight_maps[capture.grammar_index].get(capture.index),
3683 ));
3684 }
3685 highlights.next_capture.take();
3686 }
3687 }
3688 highlights.captures.set_byte_range(self.range.clone());
3689 }
3690 }
3691
3692 /// The current byte offset in the buffer.
3693 pub fn offset(&self) -> usize {
3694 self.range.start
3695 }
3696
3697 fn update_diagnostic_depths(&mut self, endpoint: DiagnosticEndpoint) {
3698 let depth = match endpoint.severity {
3699 DiagnosticSeverity::ERROR => &mut self.error_depth,
3700 DiagnosticSeverity::WARNING => &mut self.warning_depth,
3701 DiagnosticSeverity::INFORMATION => &mut self.information_depth,
3702 DiagnosticSeverity::HINT => &mut self.hint_depth,
3703 _ => return,
3704 };
3705 if endpoint.is_start {
3706 *depth += 1;
3707 } else {
3708 *depth -= 1;
3709 }
3710
3711 if endpoint.is_unnecessary {
3712 if endpoint.is_start {
3713 self.unnecessary_depth += 1;
3714 } else {
3715 self.unnecessary_depth -= 1;
3716 }
3717 }
3718 }
3719
3720 fn current_diagnostic_severity(&self) -> Option<DiagnosticSeverity> {
3721 if self.error_depth > 0 {
3722 Some(DiagnosticSeverity::ERROR)
3723 } else if self.warning_depth > 0 {
3724 Some(DiagnosticSeverity::WARNING)
3725 } else if self.information_depth > 0 {
3726 Some(DiagnosticSeverity::INFORMATION)
3727 } else if self.hint_depth > 0 {
3728 Some(DiagnosticSeverity::HINT)
3729 } else {
3730 None
3731 }
3732 }
3733
3734 fn current_code_is_unnecessary(&self) -> bool {
3735 self.unnecessary_depth > 0
3736 }
3737}
3738
3739impl<'a> Iterator for BufferChunks<'a> {
3740 type Item = Chunk<'a>;
3741
3742 fn next(&mut self) -> Option<Self::Item> {
3743 let mut next_capture_start = usize::MAX;
3744 let mut next_diagnostic_endpoint = usize::MAX;
3745
3746 if let Some(highlights) = self.highlights.as_mut() {
3747 while let Some((parent_capture_end, _)) = highlights.stack.last() {
3748 if *parent_capture_end <= self.range.start {
3749 highlights.stack.pop();
3750 } else {
3751 break;
3752 }
3753 }
3754
3755 if highlights.next_capture.is_none() {
3756 highlights.next_capture = highlights.captures.next();
3757 }
3758
3759 while let Some(capture) = highlights.next_capture.as_ref() {
3760 if self.range.start < capture.node.start_byte() {
3761 next_capture_start = capture.node.start_byte();
3762 break;
3763 } else {
3764 let highlight_id =
3765 highlights.highlight_maps[capture.grammar_index].get(capture.index);
3766 highlights
3767 .stack
3768 .push((capture.node.end_byte(), highlight_id));
3769 highlights.next_capture = highlights.captures.next();
3770 }
3771 }
3772 }
3773
3774 while let Some(endpoint) = self.diagnostic_endpoints.peek().copied() {
3775 if endpoint.offset <= self.range.start {
3776 self.update_diagnostic_depths(endpoint);
3777 self.diagnostic_endpoints.next();
3778 } else {
3779 next_diagnostic_endpoint = endpoint.offset;
3780 break;
3781 }
3782 }
3783
3784 if let Some(chunk) = self.chunks.peek() {
3785 let chunk_start = self.range.start;
3786 let mut chunk_end = (self.chunks.offset() + chunk.len())
3787 .min(next_capture_start)
3788 .min(next_diagnostic_endpoint);
3789 let mut highlight_id = None;
3790 if let Some(highlights) = self.highlights.as_ref() {
3791 if let Some((parent_capture_end, parent_highlight_id)) = highlights.stack.last() {
3792 chunk_end = chunk_end.min(*parent_capture_end);
3793 highlight_id = Some(*parent_highlight_id);
3794 }
3795 }
3796
3797 let slice =
3798 &chunk[chunk_start - self.chunks.offset()..chunk_end - self.chunks.offset()];
3799 self.range.start = chunk_end;
3800 if self.range.start == self.chunks.offset() + chunk.len() {
3801 self.chunks.next().unwrap();
3802 }
3803
3804 Some(Chunk {
3805 text: slice,
3806 syntax_highlight_id: highlight_id,
3807 diagnostic_severity: self.current_diagnostic_severity(),
3808 is_unnecessary: self.current_code_is_unnecessary(),
3809 ..Default::default()
3810 })
3811 } else {
3812 None
3813 }
3814 }
3815}
3816
3817impl operation_queue::Operation for Operation {
3818 fn lamport_timestamp(&self) -> clock::Lamport {
3819 match self {
3820 Operation::Buffer(_) => {
3821 unreachable!("buffer operations should never be deferred at this layer")
3822 }
3823 Operation::UpdateDiagnostics {
3824 lamport_timestamp, ..
3825 }
3826 | Operation::UpdateSelections {
3827 lamport_timestamp, ..
3828 }
3829 | Operation::UpdateCompletionTriggers {
3830 lamport_timestamp, ..
3831 } => *lamport_timestamp,
3832 }
3833 }
3834}
3835
3836impl Default for Diagnostic {
3837 fn default() -> Self {
3838 Self {
3839 source: Default::default(),
3840 code: None,
3841 severity: DiagnosticSeverity::ERROR,
3842 message: Default::default(),
3843 group_id: 0,
3844 is_primary: false,
3845 is_disk_based: false,
3846 is_unnecessary: false,
3847 }
3848 }
3849}
3850
3851impl IndentSize {
3852 /// Returns an [IndentSize] representing the given spaces.
3853 pub fn spaces(len: u32) -> Self {
3854 Self {
3855 len,
3856 kind: IndentKind::Space,
3857 }
3858 }
3859
3860 /// Returns an [IndentSize] representing a tab.
3861 pub fn tab() -> Self {
3862 Self {
3863 len: 1,
3864 kind: IndentKind::Tab,
3865 }
3866 }
3867
3868 /// An iterator over the characters represented by this [IndentSize].
3869 pub fn chars(&self) -> impl Iterator<Item = char> {
3870 iter::repeat(self.char()).take(self.len as usize)
3871 }
3872
3873 /// The character representation of this [IndentSize].
3874 pub fn char(&self) -> char {
3875 match self.kind {
3876 IndentKind::Space => ' ',
3877 IndentKind::Tab => '\t',
3878 }
3879 }
3880
3881 /// Consumes the current [IndentSize] and returns a new one that has
3882 /// been shrunk or enlarged by the given size along the given direction.
3883 pub fn with_delta(mut self, direction: Ordering, size: IndentSize) -> Self {
3884 match direction {
3885 Ordering::Less => {
3886 if self.kind == size.kind && self.len >= size.len {
3887 self.len -= size.len;
3888 }
3889 }
3890 Ordering::Equal => {}
3891 Ordering::Greater => {
3892 if self.len == 0 {
3893 self = size;
3894 } else if self.kind == size.kind {
3895 self.len += size.len;
3896 }
3897 }
3898 }
3899 self
3900 }
3901}
3902
3903#[cfg(any(test, feature = "test-support"))]
3904pub struct TestFile {
3905 pub path: Arc<Path>,
3906 pub root_name: String,
3907}
3908
3909#[cfg(any(test, feature = "test-support"))]
3910impl File for TestFile {
3911 fn path(&self) -> &Arc<Path> {
3912 &self.path
3913 }
3914
3915 fn full_path(&self, _: &gpui::AppContext) -> PathBuf {
3916 PathBuf::from(&self.root_name).join(self.path.as_ref())
3917 }
3918
3919 fn as_local(&self) -> Option<&dyn LocalFile> {
3920 None
3921 }
3922
3923 fn mtime(&self) -> Option<SystemTime> {
3924 unimplemented!()
3925 }
3926
3927 fn file_name<'a>(&'a self, _: &'a gpui::AppContext) -> &'a std::ffi::OsStr {
3928 self.path().file_name().unwrap_or(self.root_name.as_ref())
3929 }
3930
3931 fn worktree_id(&self) -> usize {
3932 0
3933 }
3934
3935 fn is_deleted(&self) -> bool {
3936 unimplemented!()
3937 }
3938
3939 fn as_any(&self) -> &dyn std::any::Any {
3940 unimplemented!()
3941 }
3942
3943 fn to_proto(&self, _: &AppContext) -> rpc::proto::File {
3944 unimplemented!()
3945 }
3946
3947 fn is_private(&self) -> bool {
3948 false
3949 }
3950}
3951
3952pub(crate) fn contiguous_ranges(
3953 values: impl Iterator<Item = u32>,
3954 max_len: usize,
3955) -> impl Iterator<Item = Range<u32>> {
3956 let mut values = values;
3957 let mut current_range: Option<Range<u32>> = None;
3958 std::iter::from_fn(move || loop {
3959 if let Some(value) = values.next() {
3960 if let Some(range) = &mut current_range {
3961 if value == range.end && range.len() < max_len {
3962 range.end += 1;
3963 continue;
3964 }
3965 }
3966
3967 let prev_range = current_range.clone();
3968 current_range = Some(value..(value + 1));
3969 if prev_range.is_some() {
3970 return prev_range;
3971 }
3972 } else {
3973 return current_range.take();
3974 }
3975 })
3976}
3977
3978/// Returns the [CharKind] for the given character. When a scope is provided,
3979/// the function checks if the character is considered a word character
3980/// based on the language scope's word character settings.
3981pub fn char_kind(scope: &Option<LanguageScope>, c: char) -> CharKind {
3982 if c.is_whitespace() {
3983 return CharKind::Whitespace;
3984 } else if c.is_alphanumeric() || c == '_' {
3985 return CharKind::Word;
3986 }
3987
3988 if let Some(scope) = scope {
3989 if let Some(characters) = scope.word_characters() {
3990 if characters.contains(&c) {
3991 return CharKind::Word;
3992 }
3993 }
3994 }
3995
3996 CharKind::Punctuation
3997}
3998
3999/// Find all of the ranges of whitespace that occur at the ends of lines
4000/// in the given rope.
4001///
4002/// This could also be done with a regex search, but this implementation
4003/// avoids copying text.
4004pub fn trailing_whitespace_ranges(rope: &Rope) -> Vec<Range<usize>> {
4005 let mut ranges = Vec::new();
4006
4007 let mut offset = 0;
4008 let mut prev_chunk_trailing_whitespace_range = 0..0;
4009 for chunk in rope.chunks() {
4010 let mut prev_line_trailing_whitespace_range = 0..0;
4011 for (i, line) in chunk.split('\n').enumerate() {
4012 let line_end_offset = offset + line.len();
4013 let trimmed_line_len = line.trim_end_matches(|c| matches!(c, ' ' | '\t')).len();
4014 let mut trailing_whitespace_range = (offset + trimmed_line_len)..line_end_offset;
4015
4016 if i == 0 && trimmed_line_len == 0 {
4017 trailing_whitespace_range.start = prev_chunk_trailing_whitespace_range.start;
4018 }
4019 if !prev_line_trailing_whitespace_range.is_empty() {
4020 ranges.push(prev_line_trailing_whitespace_range);
4021 }
4022
4023 offset = line_end_offset + 1;
4024 prev_line_trailing_whitespace_range = trailing_whitespace_range;
4025 }
4026
4027 offset -= 1;
4028 prev_chunk_trailing_whitespace_range = prev_line_trailing_whitespace_range;
4029 }
4030
4031 if !prev_chunk_trailing_whitespace_range.is_empty() {
4032 ranges.push(prev_chunk_trailing_whitespace_range);
4033 }
4034
4035 ranges
4036}