1pub use crate::{
2 diagnostic_set::DiagnosticSet,
3 highlight_map::{HighlightId, HighlightMap},
4 markdown::ParsedMarkdown,
5 proto, Grammar, Language, LanguageRegistry,
6};
7use crate::{
8 diagnostic_set::{DiagnosticEntry, DiagnosticGroup},
9 language_settings::{language_settings, IndentGuideSettings, LanguageSettings},
10 markdown::parse_markdown,
11 outline::OutlineItem,
12 syntax_map::{
13 SyntaxLayer, SyntaxMap, SyntaxMapCapture, SyntaxMapCaptures, SyntaxMapMatches,
14 SyntaxSnapshot, ToTreeSitterPoint,
15 },
16 task_context::RunnableRange,
17 LanguageScope, Outline, RunnableCapture, RunnableTag,
18};
19use anyhow::{anyhow, Context, Result};
20pub use clock::ReplicaId;
21use futures::channel::oneshot;
22use gpui::{
23 AnyElement, AppContext, EventEmitter, HighlightStyle, ModelContext, Task, TaskLabel,
24 WindowContext,
25};
26use lazy_static::lazy_static;
27use lsp::LanguageServerId;
28use parking_lot::Mutex;
29use similar::{ChangeTag, TextDiff};
30use smallvec::SmallVec;
31use smol::future::yield_now;
32use std::{
33 any::Any,
34 cell::Cell,
35 cmp::{self, Ordering},
36 collections::BTreeMap,
37 ffi::OsStr,
38 fmt,
39 future::Future,
40 iter::{self, Iterator, Peekable},
41 mem,
42 ops::{Deref, Range},
43 path::{Path, PathBuf},
44 str,
45 sync::Arc,
46 time::{Duration, Instant, SystemTime},
47 vec,
48};
49use sum_tree::TreeMap;
50use text::operation_queue::OperationQueue;
51use text::*;
52pub use text::{
53 Anchor, Bias, Buffer as TextBuffer, BufferId, BufferSnapshot as TextBufferSnapshot, Edit,
54 OffsetRangeExt, OffsetUtf16, Patch, Point, PointUtf16, Rope, Selection, SelectionGoal,
55 Subscription, TextDimension, TextSummary, ToOffset, ToOffsetUtf16, ToPoint, ToPointUtf16,
56 Transaction, TransactionId, Unclipped,
57};
58use theme::SyntaxTheme;
59#[cfg(any(test, feature = "test-support"))]
60use util::RandomCharIter;
61use util::RangeExt;
62
63#[cfg(any(test, feature = "test-support"))]
64pub use {tree_sitter_rust, tree_sitter_typescript};
65
66pub use lsp::DiagnosticSeverity;
67
68lazy_static! {
69 /// A label for the background task spawned by the buffer to compute
70 /// a diff against the contents of its file.
71 pub static ref BUFFER_DIFF_TASK: TaskLabel = TaskLabel::new();
72}
73
74/// Indicate whether a [Buffer] has permissions to edit.
75#[derive(PartialEq, Clone, Copy, Debug)]
76pub enum Capability {
77 /// The buffer is a mutable replica.
78 ReadWrite,
79 /// The buffer is a read-only replica.
80 ReadOnly,
81}
82
83pub type BufferRow = u32;
84
85/// An in-memory representation of a source code file, including its text,
86/// syntax trees, git status, and diagnostics.
87pub struct Buffer {
88 text: TextBuffer,
89 diff_base: Option<Rope>,
90 git_diff: git::diff::BufferDiff,
91 file: Option<Arc<dyn File>>,
92 /// The mtime of the file when this buffer was last loaded from
93 /// or saved to disk.
94 saved_mtime: Option<SystemTime>,
95 /// The version vector when this buffer was last loaded from
96 /// or saved to disk.
97 saved_version: clock::Global,
98 transaction_depth: usize,
99 was_dirty_before_starting_transaction: Option<bool>,
100 reload_task: Option<Task<Result<()>>>,
101 language: Option<Arc<Language>>,
102 autoindent_requests: Vec<Arc<AutoindentRequest>>,
103 pending_autoindent: Option<Task<()>>,
104 sync_parse_timeout: Duration,
105 syntax_map: Mutex<SyntaxMap>,
106 parsing_in_background: bool,
107 non_text_state_update_count: usize,
108 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
109 remote_selections: TreeMap<ReplicaId, SelectionSet>,
110 diagnostics_timestamp: clock::Lamport,
111 completion_triggers: Vec<String>,
112 completion_triggers_timestamp: clock::Lamport,
113 deferred_ops: OperationQueue<Operation>,
114 capability: Capability,
115 has_conflict: bool,
116 diff_base_version: usize,
117 /// Memoize calls to has_changes_since(saved_version).
118 /// The contents of a cell are (self.version, has_changes) at the time of a last call.
119 has_unsaved_edits: Cell<(clock::Global, bool)>,
120}
121
122/// An immutable, cheaply cloneable representation of a fixed
123/// state of a buffer.
124pub struct BufferSnapshot {
125 text: text::BufferSnapshot,
126 git_diff: git::diff::BufferDiff,
127 pub(crate) syntax: SyntaxSnapshot,
128 file: Option<Arc<dyn File>>,
129 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
130 remote_selections: TreeMap<ReplicaId, SelectionSet>,
131 language: Option<Arc<Language>>,
132 non_text_state_update_count: usize,
133}
134
135/// The kind and amount of indentation in a particular line. For now,
136/// assumes that indentation is all the same character.
137#[derive(Clone, Copy, Debug, PartialEq, Eq, Default)]
138pub struct IndentSize {
139 /// The number of bytes that comprise the indentation.
140 pub len: u32,
141 /// The kind of whitespace used for indentation.
142 pub kind: IndentKind,
143}
144
145/// A whitespace character that's used for indentation.
146#[derive(Clone, Copy, Debug, PartialEq, Eq, Default)]
147pub enum IndentKind {
148 /// An ASCII space character.
149 #[default]
150 Space,
151 /// An ASCII tab character.
152 Tab,
153}
154
155/// The shape of a selection cursor.
156#[derive(Copy, Clone, PartialEq, Eq, Debug, Default)]
157pub enum CursorShape {
158 /// A vertical bar
159 #[default]
160 Bar,
161 /// A block that surrounds the following character
162 Block,
163 /// An underline that runs along the following character
164 Underscore,
165 /// A box drawn around the following character
166 Hollow,
167}
168
169#[derive(Clone, Debug)]
170struct SelectionSet {
171 line_mode: bool,
172 cursor_shape: CursorShape,
173 selections: Arc<[Selection<Anchor>]>,
174 lamport_timestamp: clock::Lamport,
175}
176
177/// A diagnostic associated with a certain range of a buffer.
178#[derive(Clone, Debug, PartialEq, Eq)]
179pub struct Diagnostic {
180 /// The name of the service that produced this diagnostic.
181 pub source: Option<String>,
182 /// A machine-readable code that identifies this diagnostic.
183 pub code: Option<String>,
184 /// Whether this diagnostic is a hint, warning, or error.
185 pub severity: DiagnosticSeverity,
186 /// The human-readable message associated with this diagnostic.
187 pub message: String,
188 /// An id that identifies the group to which this diagnostic belongs.
189 ///
190 /// When a language server produces a diagnostic with
191 /// one or more associated diagnostics, those diagnostics are all
192 /// assigned a single group id.
193 pub group_id: usize,
194 /// Whether this diagnostic is the primary diagnostic for its group.
195 ///
196 /// In a given group, the primary diagnostic is the top-level diagnostic
197 /// returned by the language server. The non-primary diagnostics are the
198 /// associated diagnostics.
199 pub is_primary: bool,
200 /// Whether this diagnostic is considered to originate from an analysis of
201 /// files on disk, as opposed to any unsaved buffer contents. This is a
202 /// property of a given diagnostic source, and is configured for a given
203 /// language server via the [`LspAdapter::disk_based_diagnostic_sources`](crate::LspAdapter::disk_based_diagnostic_sources) method
204 /// for the language server.
205 pub is_disk_based: bool,
206 /// Whether this diagnostic marks unnecessary code.
207 pub is_unnecessary: bool,
208}
209
210/// TODO - move this into the `project` crate and make it private.
211pub async fn prepare_completion_documentation(
212 documentation: &lsp::Documentation,
213 language_registry: &Arc<LanguageRegistry>,
214 language: Option<Arc<Language>>,
215) -> Documentation {
216 match documentation {
217 lsp::Documentation::String(text) => {
218 if text.lines().count() <= 1 {
219 Documentation::SingleLine(text.clone())
220 } else {
221 Documentation::MultiLinePlainText(text.clone())
222 }
223 }
224
225 lsp::Documentation::MarkupContent(lsp::MarkupContent { kind, value }) => match kind {
226 lsp::MarkupKind::PlainText => {
227 if value.lines().count() <= 1 {
228 Documentation::SingleLine(value.clone())
229 } else {
230 Documentation::MultiLinePlainText(value.clone())
231 }
232 }
233
234 lsp::MarkupKind::Markdown => {
235 let parsed = parse_markdown(value, language_registry, language).await;
236 Documentation::MultiLineMarkdown(parsed)
237 }
238 },
239 }
240}
241
242/// Documentation associated with a [`Completion`].
243#[derive(Clone, Debug)]
244pub enum Documentation {
245 /// There is no documentation for this completion.
246 Undocumented,
247 /// A single line of documentation.
248 SingleLine(String),
249 /// Multiple lines of plain text documentation.
250 MultiLinePlainText(String),
251 /// Markdown documentation.
252 MultiLineMarkdown(ParsedMarkdown),
253}
254
255/// An operation used to synchronize this buffer with its other replicas.
256#[derive(Clone, Debug, PartialEq)]
257pub enum Operation {
258 /// A text operation.
259 Buffer(text::Operation),
260
261 /// An update to the buffer's diagnostics.
262 UpdateDiagnostics {
263 /// The id of the language server that produced the new diagnostics.
264 server_id: LanguageServerId,
265 /// The diagnostics.
266 diagnostics: Arc<[DiagnosticEntry<Anchor>]>,
267 /// The buffer's lamport timestamp.
268 lamport_timestamp: clock::Lamport,
269 },
270
271 /// An update to the most recent selections in this buffer.
272 UpdateSelections {
273 /// The selections.
274 selections: Arc<[Selection<Anchor>]>,
275 /// The buffer's lamport timestamp.
276 lamport_timestamp: clock::Lamport,
277 /// Whether the selections are in 'line mode'.
278 line_mode: bool,
279 /// The [`CursorShape`] associated with these selections.
280 cursor_shape: CursorShape,
281 },
282
283 /// An update to the characters that should trigger autocompletion
284 /// for this buffer.
285 UpdateCompletionTriggers {
286 /// The characters that trigger autocompletion.
287 triggers: Vec<String>,
288 /// The buffer's lamport timestamp.
289 lamport_timestamp: clock::Lamport,
290 },
291}
292
293/// An event that occurs in a buffer.
294#[derive(Clone, Debug, PartialEq)]
295pub enum Event {
296 /// The buffer was changed in a way that must be
297 /// propagated to its other replicas.
298 Operation(Operation),
299 /// The buffer was edited.
300 Edited,
301 /// The buffer's `dirty` bit changed.
302 DirtyChanged,
303 /// The buffer was saved.
304 Saved,
305 /// The buffer's file was changed on disk.
306 FileHandleChanged,
307 /// The buffer was reloaded.
308 Reloaded,
309 /// The buffer's diff_base changed.
310 DiffBaseChanged,
311 /// Buffer's excerpts for a certain diff base were recalculated.
312 DiffUpdated,
313 /// The buffer's language was changed.
314 LanguageChanged,
315 /// The buffer's syntax trees were updated.
316 Reparsed,
317 /// The buffer's diagnostics were updated.
318 DiagnosticsUpdated,
319 /// The buffer gained or lost editing capabilities.
320 CapabilityChanged,
321 /// The buffer was explicitly requested to close.
322 Closed,
323}
324
325/// The file associated with a buffer.
326pub trait File: Send + Sync {
327 /// Returns the [`LocalFile`] associated with this file, if the
328 /// file is local.
329 fn as_local(&self) -> Option<&dyn LocalFile>;
330
331 /// Returns whether this file is local.
332 fn is_local(&self) -> bool {
333 self.as_local().is_some()
334 }
335
336 /// Returns the file's mtime.
337 fn mtime(&self) -> Option<SystemTime>;
338
339 /// Returns the path of this file relative to the worktree's root directory.
340 fn path(&self) -> &Arc<Path>;
341
342 /// Returns the path of this file relative to the worktree's parent directory (this means it
343 /// includes the name of the worktree's root folder).
344 fn full_path(&self, cx: &AppContext) -> PathBuf;
345
346 /// Returns the last component of this handle's absolute path. If this handle refers to the root
347 /// of its worktree, then this method will return the name of the worktree itself.
348 fn file_name<'a>(&'a self, cx: &'a AppContext) -> &'a OsStr;
349
350 /// Returns the id of the worktree to which this file belongs.
351 ///
352 /// This is needed for looking up project-specific settings.
353 fn worktree_id(&self) -> usize;
354
355 /// Returns whether the file has been deleted.
356 fn is_deleted(&self) -> bool;
357
358 /// Returns whether the file existed on disk at one point
359 fn is_created(&self) -> bool {
360 self.mtime().is_some()
361 }
362
363 /// Converts this file into an [`Any`] trait object.
364 fn as_any(&self) -> &dyn Any;
365
366 /// Converts this file into a protobuf message.
367 fn to_proto(&self) -> rpc::proto::File;
368
369 /// Return whether Zed considers this to be a private file.
370 fn is_private(&self) -> bool;
371}
372
373/// The file associated with a buffer, in the case where the file is on the local disk.
374pub trait LocalFile: File {
375 /// Returns the absolute path of this file.
376 fn abs_path(&self, cx: &AppContext) -> PathBuf;
377
378 /// Loads the file's contents from disk.
379 fn load(&self, cx: &AppContext) -> Task<Result<String>>;
380
381 /// Returns true if the file should not be shared with collaborators.
382 fn is_private(&self, _: &AppContext) -> bool {
383 false
384 }
385}
386
387/// The auto-indent behavior associated with an editing operation.
388/// For some editing operations, each affected line of text has its
389/// indentation recomputed. For other operations, the entire block
390/// of edited text is adjusted uniformly.
391#[derive(Clone, Debug)]
392pub enum AutoindentMode {
393 /// Indent each line of inserted text.
394 EachLine,
395 /// Apply the same indentation adjustment to all of the lines
396 /// in a given insertion.
397 Block {
398 /// The original indentation level of the first line of each
399 /// insertion, if it has been copied.
400 original_indent_columns: Vec<u32>,
401 },
402}
403
404#[derive(Clone)]
405struct AutoindentRequest {
406 before_edit: BufferSnapshot,
407 entries: Vec<AutoindentRequestEntry>,
408 is_block_mode: bool,
409}
410
411#[derive(Clone)]
412struct AutoindentRequestEntry {
413 /// A range of the buffer whose indentation should be adjusted.
414 range: Range<Anchor>,
415 /// Whether or not these lines should be considered brand new, for the
416 /// purpose of auto-indent. When text is not new, its indentation will
417 /// only be adjusted if the suggested indentation level has *changed*
418 /// since the edit was made.
419 first_line_is_new: bool,
420 indent_size: IndentSize,
421 original_indent_column: Option<u32>,
422}
423
424#[derive(Debug)]
425struct IndentSuggestion {
426 basis_row: u32,
427 delta: Ordering,
428 within_error: bool,
429}
430
431struct BufferChunkHighlights<'a> {
432 captures: SyntaxMapCaptures<'a>,
433 next_capture: Option<SyntaxMapCapture<'a>>,
434 stack: Vec<(usize, HighlightId)>,
435 highlight_maps: Vec<HighlightMap>,
436}
437
438/// An iterator that yields chunks of a buffer's text, along with their
439/// syntax highlights and diagnostic status.
440pub struct BufferChunks<'a> {
441 range: Range<usize>,
442 chunks: text::Chunks<'a>,
443 diagnostic_endpoints: Peekable<vec::IntoIter<DiagnosticEndpoint>>,
444 error_depth: usize,
445 warning_depth: usize,
446 information_depth: usize,
447 hint_depth: usize,
448 unnecessary_depth: usize,
449 highlights: Option<BufferChunkHighlights<'a>>,
450}
451
452/// A chunk of a buffer's text, along with its syntax highlight and
453/// diagnostic status.
454#[derive(Clone, Debug, Default)]
455pub struct Chunk<'a> {
456 /// The text of the chunk.
457 pub text: &'a str,
458 /// The syntax highlighting style of the chunk.
459 pub syntax_highlight_id: Option<HighlightId>,
460 /// The highlight style that has been applied to this chunk in
461 /// the editor.
462 pub highlight_style: Option<HighlightStyle>,
463 /// The severity of diagnostic associated with this chunk, if any.
464 pub diagnostic_severity: Option<DiagnosticSeverity>,
465 /// Whether this chunk of text is marked as unnecessary.
466 pub is_unnecessary: bool,
467 /// Whether this chunk of text was originally a tab character.
468 pub is_tab: bool,
469 /// An optional recipe for how the chunk should be presented.
470 pub renderer: Option<ChunkRenderer>,
471}
472
473/// A recipe for how the chunk should be presented.
474#[derive(Clone)]
475pub struct ChunkRenderer {
476 /// creates a custom element to represent this chunk.
477 pub render: Arc<dyn Send + Sync + Fn(&mut WindowContext) -> AnyElement>,
478 /// If true, the element is constrained to the shaped width of the text.
479 pub constrain_width: bool,
480}
481
482impl fmt::Debug for ChunkRenderer {
483 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
484 f.debug_struct("ChunkRenderer")
485 .field("constrain_width", &self.constrain_width)
486 .finish()
487 }
488}
489
490/// A set of edits to a given version of a buffer, computed asynchronously.
491#[derive(Debug)]
492pub struct Diff {
493 pub(crate) base_version: clock::Global,
494 line_ending: LineEnding,
495 edits: Vec<(Range<usize>, Arc<str>)>,
496}
497
498#[derive(Clone, Copy)]
499pub(crate) struct DiagnosticEndpoint {
500 offset: usize,
501 is_start: bool,
502 severity: DiagnosticSeverity,
503 is_unnecessary: bool,
504}
505
506/// A class of characters, used for characterizing a run of text.
507#[derive(Copy, Clone, Eq, PartialEq, PartialOrd, Ord, Debug)]
508pub enum CharKind {
509 /// Whitespace.
510 Whitespace,
511 /// Punctuation.
512 Punctuation,
513 /// Word.
514 Word,
515}
516
517/// A runnable is a set of data about a region that could be resolved into a task
518pub struct Runnable {
519 pub tags: SmallVec<[RunnableTag; 1]>,
520 pub language: Arc<Language>,
521 pub buffer: BufferId,
522}
523
524#[derive(Clone, Debug, PartialEq)]
525pub struct IndentGuide {
526 pub buffer_id: BufferId,
527 pub start_row: BufferRow,
528 pub end_row: BufferRow,
529 pub depth: u32,
530 pub tab_size: u32,
531 pub settings: IndentGuideSettings,
532}
533
534impl IndentGuide {
535 pub fn indent_level(&self) -> u32 {
536 self.depth * self.tab_size
537 }
538}
539
540impl Buffer {
541 /// Create a new buffer with the given base text.
542 pub fn local<T: Into<String>>(base_text: T, cx: &mut ModelContext<Self>) -> Self {
543 Self::build(
544 TextBuffer::new(0, cx.entity_id().as_non_zero_u64().into(), base_text.into()),
545 None,
546 None,
547 Capability::ReadWrite,
548 )
549 }
550
551 /// Create a new buffer with the given base text that has proper line endings and other normalization applied.
552 pub fn local_normalized(
553 base_text_normalized: Rope,
554 line_ending: LineEnding,
555 cx: &mut ModelContext<Self>,
556 ) -> Self {
557 Self::build(
558 TextBuffer::new_normalized(
559 0,
560 cx.entity_id().as_non_zero_u64().into(),
561 line_ending,
562 base_text_normalized,
563 ),
564 None,
565 None,
566 Capability::ReadWrite,
567 )
568 }
569
570 /// Create a new buffer that is a replica of a remote buffer.
571 pub fn remote(
572 remote_id: BufferId,
573 replica_id: ReplicaId,
574 capability: Capability,
575 base_text: impl Into<String>,
576 ) -> Self {
577 Self::build(
578 TextBuffer::new(replica_id, remote_id, base_text.into()),
579 None,
580 None,
581 capability,
582 )
583 }
584
585 /// Create a new buffer that is a replica of a remote buffer, populating its
586 /// state from the given protobuf message.
587 pub fn from_proto(
588 replica_id: ReplicaId,
589 capability: Capability,
590 message: proto::BufferState,
591 file: Option<Arc<dyn File>>,
592 ) -> Result<Self> {
593 let buffer_id = BufferId::new(message.id)
594 .with_context(|| anyhow!("Could not deserialize buffer_id"))?;
595 let buffer = TextBuffer::new(replica_id, buffer_id, message.base_text);
596 let mut this = Self::build(buffer, message.diff_base, file, capability);
597 this.text.set_line_ending(proto::deserialize_line_ending(
598 rpc::proto::LineEnding::from_i32(message.line_ending)
599 .ok_or_else(|| anyhow!("missing line_ending"))?,
600 ));
601 this.saved_version = proto::deserialize_version(&message.saved_version);
602 this.saved_mtime = message.saved_mtime.map(|time| time.into());
603 Ok(this)
604 }
605
606 /// Serialize the buffer's state to a protobuf message.
607 pub fn to_proto(&self) -> proto::BufferState {
608 proto::BufferState {
609 id: self.remote_id().into(),
610 file: self.file.as_ref().map(|f| f.to_proto()),
611 base_text: self.base_text().to_string(),
612 diff_base: self.diff_base.as_ref().map(|h| h.to_string()),
613 line_ending: proto::serialize_line_ending(self.line_ending()) as i32,
614 saved_version: proto::serialize_version(&self.saved_version),
615 saved_mtime: self.saved_mtime.map(|time| time.into()),
616 }
617 }
618
619 /// Serialize as protobufs all of the changes to the buffer since the given version.
620 pub fn serialize_ops(
621 &self,
622 since: Option<clock::Global>,
623 cx: &AppContext,
624 ) -> Task<Vec<proto::Operation>> {
625 let mut operations = Vec::new();
626 operations.extend(self.deferred_ops.iter().map(proto::serialize_operation));
627
628 operations.extend(self.remote_selections.iter().map(|(_, set)| {
629 proto::serialize_operation(&Operation::UpdateSelections {
630 selections: set.selections.clone(),
631 lamport_timestamp: set.lamport_timestamp,
632 line_mode: set.line_mode,
633 cursor_shape: set.cursor_shape,
634 })
635 }));
636
637 for (server_id, diagnostics) in &self.diagnostics {
638 operations.push(proto::serialize_operation(&Operation::UpdateDiagnostics {
639 lamport_timestamp: self.diagnostics_timestamp,
640 server_id: *server_id,
641 diagnostics: diagnostics.iter().cloned().collect(),
642 }));
643 }
644
645 operations.push(proto::serialize_operation(
646 &Operation::UpdateCompletionTriggers {
647 triggers: self.completion_triggers.clone(),
648 lamport_timestamp: self.completion_triggers_timestamp,
649 },
650 ));
651
652 let text_operations = self.text.operations().clone();
653 cx.background_executor().spawn(async move {
654 let since = since.unwrap_or_default();
655 operations.extend(
656 text_operations
657 .iter()
658 .filter(|(_, op)| !since.observed(op.timestamp()))
659 .map(|(_, op)| proto::serialize_operation(&Operation::Buffer(op.clone()))),
660 );
661 operations.sort_unstable_by_key(proto::lamport_timestamp_for_operation);
662 operations
663 })
664 }
665
666 /// Assign a language to the buffer, returning the buffer.
667 pub fn with_language(mut self, language: Arc<Language>, cx: &mut ModelContext<Self>) -> Self {
668 self.set_language(Some(language), cx);
669 self
670 }
671
672 /// Returns the [Capability] of this buffer.
673 pub fn capability(&self) -> Capability {
674 self.capability
675 }
676
677 /// Whether this buffer can only be read.
678 pub fn read_only(&self) -> bool {
679 self.capability == Capability::ReadOnly
680 }
681
682 /// Builds a [Buffer] with the given underlying [TextBuffer], diff base, [File] and [Capability].
683 pub fn build(
684 buffer: TextBuffer,
685 diff_base: Option<String>,
686 file: Option<Arc<dyn File>>,
687 capability: Capability,
688 ) -> Self {
689 let saved_mtime = file.as_ref().and_then(|file| file.mtime());
690
691 Self {
692 saved_mtime,
693 saved_version: buffer.version(),
694 reload_task: None,
695 transaction_depth: 0,
696 was_dirty_before_starting_transaction: None,
697 has_unsaved_edits: Cell::new((buffer.version(), false)),
698 text: buffer,
699 diff_base: diff_base
700 .map(|mut raw_diff_base| {
701 LineEnding::normalize(&mut raw_diff_base);
702 raw_diff_base
703 })
704 .map(Rope::from),
705 diff_base_version: 0,
706 git_diff: git::diff::BufferDiff::new(),
707 file,
708 capability,
709 syntax_map: Mutex::new(SyntaxMap::new()),
710 parsing_in_background: false,
711 non_text_state_update_count: 0,
712 sync_parse_timeout: Duration::from_millis(1),
713 autoindent_requests: Default::default(),
714 pending_autoindent: Default::default(),
715 language: None,
716 remote_selections: Default::default(),
717 diagnostics: Default::default(),
718 diagnostics_timestamp: Default::default(),
719 completion_triggers: Default::default(),
720 completion_triggers_timestamp: Default::default(),
721 deferred_ops: OperationQueue::new(),
722 has_conflict: false,
723 }
724 }
725
726 /// Retrieve a snapshot of the buffer's current state. This is computationally
727 /// cheap, and allows reading from the buffer on a background thread.
728 pub fn snapshot(&self) -> BufferSnapshot {
729 let text = self.text.snapshot();
730 let mut syntax_map = self.syntax_map.lock();
731 syntax_map.interpolate(&text);
732 let syntax = syntax_map.snapshot();
733
734 BufferSnapshot {
735 text,
736 syntax,
737 git_diff: self.git_diff.clone(),
738 file: self.file.clone(),
739 remote_selections: self.remote_selections.clone(),
740 diagnostics: self.diagnostics.clone(),
741 language: self.language.clone(),
742 non_text_state_update_count: self.non_text_state_update_count,
743 }
744 }
745
746 #[cfg(test)]
747 pub(crate) fn as_text_snapshot(&self) -> &text::BufferSnapshot {
748 &self.text
749 }
750
751 /// Retrieve a snapshot of the buffer's raw text, without any
752 /// language-related state like the syntax tree or diagnostics.
753 pub fn text_snapshot(&self) -> text::BufferSnapshot {
754 self.text.snapshot()
755 }
756
757 /// The file associated with the buffer, if any.
758 pub fn file(&self) -> Option<&Arc<dyn File>> {
759 self.file.as_ref()
760 }
761
762 /// The version of the buffer that was last saved or reloaded from disk.
763 pub fn saved_version(&self) -> &clock::Global {
764 &self.saved_version
765 }
766
767 /// The mtime of the buffer's file when the buffer was last saved or reloaded from disk.
768 pub fn saved_mtime(&self) -> Option<SystemTime> {
769 self.saved_mtime
770 }
771
772 /// Assign a language to the buffer.
773 pub fn set_language(&mut self, language: Option<Arc<Language>>, cx: &mut ModelContext<Self>) {
774 self.non_text_state_update_count += 1;
775 self.syntax_map.lock().clear();
776 self.language = language;
777 self.reparse(cx);
778 cx.emit(Event::LanguageChanged);
779 }
780
781 /// Assign a language registry to the buffer. This allows the buffer to retrieve
782 /// other languages if parts of the buffer are written in different languages.
783 pub fn set_language_registry(&mut self, language_registry: Arc<LanguageRegistry>) {
784 self.syntax_map
785 .lock()
786 .set_language_registry(language_registry);
787 }
788
789 pub fn language_registry(&self) -> Option<Arc<LanguageRegistry>> {
790 self.syntax_map.lock().language_registry()
791 }
792
793 /// Assign the buffer a new [Capability].
794 pub fn set_capability(&mut self, capability: Capability, cx: &mut ModelContext<Self>) {
795 self.capability = capability;
796 cx.emit(Event::CapabilityChanged)
797 }
798
799 /// This method is called to signal that the buffer has been saved.
800 pub fn did_save(
801 &mut self,
802 version: clock::Global,
803 mtime: Option<SystemTime>,
804 cx: &mut ModelContext<Self>,
805 ) {
806 self.saved_version = version;
807 self.has_unsaved_edits
808 .set((self.saved_version().clone(), false));
809 self.has_conflict = false;
810 self.saved_mtime = mtime;
811 cx.emit(Event::Saved);
812 cx.notify();
813 }
814
815 /// Reloads the contents of the buffer from disk.
816 pub fn reload(
817 &mut self,
818 cx: &mut ModelContext<Self>,
819 ) -> oneshot::Receiver<Option<Transaction>> {
820 let (tx, rx) = futures::channel::oneshot::channel();
821 let prev_version = self.text.version();
822 self.reload_task = Some(cx.spawn(|this, mut cx| async move {
823 let Some((new_mtime, new_text)) = this.update(&mut cx, |this, cx| {
824 let file = this.file.as_ref()?.as_local()?;
825 Some((file.mtime(), file.load(cx)))
826 })?
827 else {
828 return Ok(());
829 };
830
831 let new_text = new_text.await?;
832 let diff = this
833 .update(&mut cx, |this, cx| this.diff(new_text.clone(), cx))?
834 .await;
835 this.update(&mut cx, |this, cx| {
836 if this.version() == diff.base_version {
837 this.finalize_last_transaction();
838 this.apply_diff(diff, cx);
839 tx.send(this.finalize_last_transaction().cloned()).ok();
840 this.has_conflict = false;
841 this.did_reload(this.version(), this.line_ending(), new_mtime, cx);
842 } else {
843 if !diff.edits.is_empty()
844 || this
845 .edits_since::<usize>(&diff.base_version)
846 .next()
847 .is_some()
848 {
849 this.has_conflict = true;
850 }
851
852 this.did_reload(prev_version, this.line_ending(), this.saved_mtime, cx);
853 }
854
855 this.reload_task.take();
856 })
857 }));
858 rx
859 }
860
861 /// This method is called to signal that the buffer has been reloaded.
862 pub fn did_reload(
863 &mut self,
864 version: clock::Global,
865 line_ending: LineEnding,
866 mtime: Option<SystemTime>,
867 cx: &mut ModelContext<Self>,
868 ) {
869 self.saved_version = version;
870 self.has_unsaved_edits
871 .set((self.saved_version.clone(), false));
872 self.text.set_line_ending(line_ending);
873 self.saved_mtime = mtime;
874 cx.emit(Event::Reloaded);
875 cx.notify();
876 }
877
878 /// Updates the [File] backing this buffer. This should be called when
879 /// the file has changed or has been deleted.
880 pub fn file_updated(&mut self, new_file: Arc<dyn File>, cx: &mut ModelContext<Self>) {
881 let mut file_changed = false;
882
883 if let Some(old_file) = self.file.as_ref() {
884 if new_file.path() != old_file.path() {
885 file_changed = true;
886 }
887
888 if new_file.is_deleted() {
889 if !old_file.is_deleted() {
890 file_changed = true;
891 if !self.is_dirty() {
892 cx.emit(Event::DirtyChanged);
893 }
894 }
895 } else {
896 let new_mtime = new_file.mtime();
897 if new_mtime != old_file.mtime() {
898 file_changed = true;
899
900 if !self.is_dirty() {
901 self.reload(cx).close();
902 }
903 }
904 }
905 } else {
906 file_changed = true;
907 };
908
909 self.file = Some(new_file);
910 if file_changed {
911 self.non_text_state_update_count += 1;
912 cx.emit(Event::FileHandleChanged);
913 cx.notify();
914 }
915 }
916
917 /// Returns the current diff base, see [Buffer::set_diff_base].
918 pub fn diff_base(&self) -> Option<&Rope> {
919 self.diff_base.as_ref()
920 }
921
922 /// Sets the text that will be used to compute a Git diff
923 /// against the buffer text.
924 pub fn set_diff_base(&mut self, diff_base: Option<String>, cx: &mut ModelContext<Self>) {
925 self.diff_base = diff_base
926 .map(|mut raw_diff_base| {
927 LineEnding::normalize(&mut raw_diff_base);
928 raw_diff_base
929 })
930 .map(Rope::from);
931 self.diff_base_version += 1;
932 if let Some(recalc_task) = self.git_diff_recalc(cx) {
933 cx.spawn(|buffer, mut cx| async move {
934 recalc_task.await;
935 buffer
936 .update(&mut cx, |_, cx| {
937 cx.emit(Event::DiffBaseChanged);
938 })
939 .ok();
940 })
941 .detach();
942 }
943 }
944
945 /// Returns a number, unique per diff base set to the buffer.
946 pub fn diff_base_version(&self) -> usize {
947 self.diff_base_version
948 }
949
950 /// Recomputes the Git diff status.
951 pub fn git_diff_recalc(&mut self, cx: &mut ModelContext<Self>) -> Option<Task<()>> {
952 let diff_base = self.diff_base.clone()?;
953 let snapshot = self.snapshot();
954
955 let mut diff = self.git_diff.clone();
956 let diff = cx.background_executor().spawn(async move {
957 diff.update(&diff_base, &snapshot).await;
958 diff
959 });
960
961 Some(cx.spawn(|this, mut cx| async move {
962 let buffer_diff = diff.await;
963 this.update(&mut cx, |this, cx| {
964 this.git_diff = buffer_diff;
965 this.non_text_state_update_count += 1;
966 cx.emit(Event::DiffUpdated);
967 })
968 .ok();
969 }))
970 }
971
972 /// Returns the primary [Language] assigned to this [Buffer].
973 pub fn language(&self) -> Option<&Arc<Language>> {
974 self.language.as_ref()
975 }
976
977 /// Returns the [Language] at the given location.
978 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<Arc<Language>> {
979 let offset = position.to_offset(self);
980 self.syntax_map
981 .lock()
982 .layers_for_range(offset..offset, &self.text)
983 .last()
984 .map(|info| info.language.clone())
985 .or_else(|| self.language.clone())
986 }
987
988 /// An integer version number that accounts for all updates besides
989 /// the buffer's text itself (which is versioned via a version vector).
990 pub fn non_text_state_update_count(&self) -> usize {
991 self.non_text_state_update_count
992 }
993
994 /// Whether the buffer is being parsed in the background.
995 #[cfg(any(test, feature = "test-support"))]
996 pub fn is_parsing(&self) -> bool {
997 self.parsing_in_background
998 }
999
1000 /// Indicates whether the buffer contains any regions that may be
1001 /// written in a language that hasn't been loaded yet.
1002 pub fn contains_unknown_injections(&self) -> bool {
1003 self.syntax_map.lock().contains_unknown_injections()
1004 }
1005
1006 #[cfg(test)]
1007 pub fn set_sync_parse_timeout(&mut self, timeout: Duration) {
1008 self.sync_parse_timeout = timeout;
1009 }
1010
1011 /// Called after an edit to synchronize the buffer's main parse tree with
1012 /// the buffer's new underlying state.
1013 ///
1014 /// Locks the syntax map and interpolates the edits since the last reparse
1015 /// into the foreground syntax tree.
1016 ///
1017 /// Then takes a stable snapshot of the syntax map before unlocking it.
1018 /// The snapshot with the interpolated edits is sent to a background thread,
1019 /// where we ask Tree-sitter to perform an incremental parse.
1020 ///
1021 /// Meanwhile, in the foreground, we block the main thread for up to 1ms
1022 /// waiting on the parse to complete. As soon as it completes, we proceed
1023 /// synchronously, unless a 1ms timeout elapses.
1024 ///
1025 /// If we time out waiting on the parse, we spawn a second task waiting
1026 /// until the parse does complete and return with the interpolated tree still
1027 /// in the foreground. When the background parse completes, call back into
1028 /// the main thread and assign the foreground parse state.
1029 ///
1030 /// If the buffer or grammar changed since the start of the background parse,
1031 /// initiate an additional reparse recursively. To avoid concurrent parses
1032 /// for the same buffer, we only initiate a new parse if we are not already
1033 /// parsing in the background.
1034 pub fn reparse(&mut self, cx: &mut ModelContext<Self>) {
1035 if self.parsing_in_background {
1036 return;
1037 }
1038 let language = if let Some(language) = self.language.clone() {
1039 language
1040 } else {
1041 return;
1042 };
1043
1044 let text = self.text_snapshot();
1045 let parsed_version = self.version();
1046
1047 let mut syntax_map = self.syntax_map.lock();
1048 syntax_map.interpolate(&text);
1049 let language_registry = syntax_map.language_registry();
1050 let mut syntax_snapshot = syntax_map.snapshot();
1051 drop(syntax_map);
1052
1053 let parse_task = cx.background_executor().spawn({
1054 let language = language.clone();
1055 let language_registry = language_registry.clone();
1056 async move {
1057 syntax_snapshot.reparse(&text, language_registry, language);
1058 syntax_snapshot
1059 }
1060 });
1061
1062 match cx
1063 .background_executor()
1064 .block_with_timeout(self.sync_parse_timeout, parse_task)
1065 {
1066 Ok(new_syntax_snapshot) => {
1067 self.did_finish_parsing(new_syntax_snapshot, cx);
1068 return;
1069 }
1070 Err(parse_task) => {
1071 self.parsing_in_background = true;
1072 cx.spawn(move |this, mut cx| async move {
1073 let new_syntax_map = parse_task.await;
1074 this.update(&mut cx, move |this, cx| {
1075 let grammar_changed =
1076 this.language.as_ref().map_or(true, |current_language| {
1077 !Arc::ptr_eq(&language, current_language)
1078 });
1079 let language_registry_changed = new_syntax_map
1080 .contains_unknown_injections()
1081 && language_registry.map_or(false, |registry| {
1082 registry.version() != new_syntax_map.language_registry_version()
1083 });
1084 let parse_again = language_registry_changed
1085 || grammar_changed
1086 || this.version.changed_since(&parsed_version);
1087 this.did_finish_parsing(new_syntax_map, cx);
1088 this.parsing_in_background = false;
1089 if parse_again {
1090 this.reparse(cx);
1091 }
1092 })
1093 .ok();
1094 })
1095 .detach();
1096 }
1097 }
1098 }
1099
1100 fn did_finish_parsing(&mut self, syntax_snapshot: SyntaxSnapshot, cx: &mut ModelContext<Self>) {
1101 self.non_text_state_update_count += 1;
1102 self.syntax_map.lock().did_parse(syntax_snapshot);
1103 self.request_autoindent(cx);
1104 cx.emit(Event::Reparsed);
1105 cx.notify();
1106 }
1107
1108 /// Assign to the buffer a set of diagnostics created by a given language server.
1109 pub fn update_diagnostics(
1110 &mut self,
1111 server_id: LanguageServerId,
1112 diagnostics: DiagnosticSet,
1113 cx: &mut ModelContext<Self>,
1114 ) {
1115 let lamport_timestamp = self.text.lamport_clock.tick();
1116 let op = Operation::UpdateDiagnostics {
1117 server_id,
1118 diagnostics: diagnostics.iter().cloned().collect(),
1119 lamport_timestamp,
1120 };
1121 self.apply_diagnostic_update(server_id, diagnostics, lamport_timestamp, cx);
1122 self.send_operation(op, cx);
1123 }
1124
1125 fn request_autoindent(&mut self, cx: &mut ModelContext<Self>) {
1126 if let Some(indent_sizes) = self.compute_autoindents() {
1127 let indent_sizes = cx.background_executor().spawn(indent_sizes);
1128 match cx
1129 .background_executor()
1130 .block_with_timeout(Duration::from_micros(500), indent_sizes)
1131 {
1132 Ok(indent_sizes) => self.apply_autoindents(indent_sizes, cx),
1133 Err(indent_sizes) => {
1134 self.pending_autoindent = Some(cx.spawn(|this, mut cx| async move {
1135 let indent_sizes = indent_sizes.await;
1136 this.update(&mut cx, |this, cx| {
1137 this.apply_autoindents(indent_sizes, cx);
1138 })
1139 .ok();
1140 }));
1141 }
1142 }
1143 } else {
1144 self.autoindent_requests.clear();
1145 }
1146 }
1147
1148 fn compute_autoindents(&self) -> Option<impl Future<Output = BTreeMap<u32, IndentSize>>> {
1149 let max_rows_between_yields = 100;
1150 let snapshot = self.snapshot();
1151 if snapshot.syntax.is_empty() || self.autoindent_requests.is_empty() {
1152 return None;
1153 }
1154
1155 let autoindent_requests = self.autoindent_requests.clone();
1156 Some(async move {
1157 let mut indent_sizes = BTreeMap::new();
1158 for request in autoindent_requests {
1159 // Resolve each edited range to its row in the current buffer and in the
1160 // buffer before this batch of edits.
1161 let mut row_ranges = Vec::new();
1162 let mut old_to_new_rows = BTreeMap::new();
1163 let mut language_indent_sizes_by_new_row = Vec::new();
1164 for entry in &request.entries {
1165 let position = entry.range.start;
1166 let new_row = position.to_point(&snapshot).row;
1167 let new_end_row = entry.range.end.to_point(&snapshot).row + 1;
1168 language_indent_sizes_by_new_row.push((new_row, entry.indent_size));
1169
1170 if !entry.first_line_is_new {
1171 let old_row = position.to_point(&request.before_edit).row;
1172 old_to_new_rows.insert(old_row, new_row);
1173 }
1174 row_ranges.push((new_row..new_end_row, entry.original_indent_column));
1175 }
1176
1177 // Build a map containing the suggested indentation for each of the edited lines
1178 // with respect to the state of the buffer before these edits. This map is keyed
1179 // by the rows for these lines in the current state of the buffer.
1180 let mut old_suggestions = BTreeMap::<u32, (IndentSize, bool)>::default();
1181 let old_edited_ranges =
1182 contiguous_ranges(old_to_new_rows.keys().copied(), max_rows_between_yields);
1183 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1184 let mut language_indent_size = IndentSize::default();
1185 for old_edited_range in old_edited_ranges {
1186 let suggestions = request
1187 .before_edit
1188 .suggest_autoindents(old_edited_range.clone())
1189 .into_iter()
1190 .flatten();
1191 for (old_row, suggestion) in old_edited_range.zip(suggestions) {
1192 if let Some(suggestion) = suggestion {
1193 let new_row = *old_to_new_rows.get(&old_row).unwrap();
1194
1195 // Find the indent size based on the language for this row.
1196 while let Some((row, size)) = language_indent_sizes.peek() {
1197 if *row > new_row {
1198 break;
1199 }
1200 language_indent_size = *size;
1201 language_indent_sizes.next();
1202 }
1203
1204 let suggested_indent = old_to_new_rows
1205 .get(&suggestion.basis_row)
1206 .and_then(|from_row| {
1207 Some(old_suggestions.get(from_row).copied()?.0)
1208 })
1209 .unwrap_or_else(|| {
1210 request
1211 .before_edit
1212 .indent_size_for_line(suggestion.basis_row)
1213 })
1214 .with_delta(suggestion.delta, language_indent_size);
1215 old_suggestions
1216 .insert(new_row, (suggested_indent, suggestion.within_error));
1217 }
1218 }
1219 yield_now().await;
1220 }
1221
1222 // In block mode, only compute indentation suggestions for the first line
1223 // of each insertion. Otherwise, compute suggestions for every inserted line.
1224 let new_edited_row_ranges = contiguous_ranges(
1225 row_ranges.iter().flat_map(|(range, _)| {
1226 if request.is_block_mode {
1227 range.start..range.start + 1
1228 } else {
1229 range.clone()
1230 }
1231 }),
1232 max_rows_between_yields,
1233 );
1234
1235 // Compute new suggestions for each line, but only include them in the result
1236 // if they differ from the old suggestion for that line.
1237 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1238 let mut language_indent_size = IndentSize::default();
1239 for new_edited_row_range in new_edited_row_ranges {
1240 let suggestions = snapshot
1241 .suggest_autoindents(new_edited_row_range.clone())
1242 .into_iter()
1243 .flatten();
1244 for (new_row, suggestion) in new_edited_row_range.zip(suggestions) {
1245 if let Some(suggestion) = suggestion {
1246 // Find the indent size based on the language for this row.
1247 while let Some((row, size)) = language_indent_sizes.peek() {
1248 if *row > new_row {
1249 break;
1250 }
1251 language_indent_size = *size;
1252 language_indent_sizes.next();
1253 }
1254
1255 let suggested_indent = indent_sizes
1256 .get(&suggestion.basis_row)
1257 .copied()
1258 .unwrap_or_else(|| {
1259 snapshot.indent_size_for_line(suggestion.basis_row)
1260 })
1261 .with_delta(suggestion.delta, language_indent_size);
1262 if old_suggestions.get(&new_row).map_or(
1263 true,
1264 |(old_indentation, was_within_error)| {
1265 suggested_indent != *old_indentation
1266 && (!suggestion.within_error || *was_within_error)
1267 },
1268 ) {
1269 indent_sizes.insert(new_row, suggested_indent);
1270 }
1271 }
1272 }
1273 yield_now().await;
1274 }
1275
1276 // For each block of inserted text, adjust the indentation of the remaining
1277 // lines of the block by the same amount as the first line was adjusted.
1278 if request.is_block_mode {
1279 for (row_range, original_indent_column) in
1280 row_ranges
1281 .into_iter()
1282 .filter_map(|(range, original_indent_column)| {
1283 if range.len() > 1 {
1284 Some((range, original_indent_column?))
1285 } else {
1286 None
1287 }
1288 })
1289 {
1290 let new_indent = indent_sizes
1291 .get(&row_range.start)
1292 .copied()
1293 .unwrap_or_else(|| snapshot.indent_size_for_line(row_range.start));
1294 let delta = new_indent.len as i64 - original_indent_column as i64;
1295 if delta != 0 {
1296 for row in row_range.skip(1) {
1297 indent_sizes.entry(row).or_insert_with(|| {
1298 let mut size = snapshot.indent_size_for_line(row);
1299 if size.kind == new_indent.kind {
1300 match delta.cmp(&0) {
1301 Ordering::Greater => size.len += delta as u32,
1302 Ordering::Less => {
1303 size.len = size.len.saturating_sub(-delta as u32)
1304 }
1305 Ordering::Equal => {}
1306 }
1307 }
1308 size
1309 });
1310 }
1311 }
1312 }
1313 }
1314 }
1315
1316 indent_sizes
1317 })
1318 }
1319
1320 fn apply_autoindents(
1321 &mut self,
1322 indent_sizes: BTreeMap<u32, IndentSize>,
1323 cx: &mut ModelContext<Self>,
1324 ) {
1325 self.autoindent_requests.clear();
1326
1327 let edits: Vec<_> = indent_sizes
1328 .into_iter()
1329 .filter_map(|(row, indent_size)| {
1330 let current_size = indent_size_for_line(self, row);
1331 Self::edit_for_indent_size_adjustment(row, current_size, indent_size)
1332 })
1333 .collect();
1334
1335 self.edit(edits, None, cx);
1336 }
1337
1338 /// Create a minimal edit that will cause the given row to be indented
1339 /// with the given size. After applying this edit, the length of the line
1340 /// will always be at least `new_size.len`.
1341 pub fn edit_for_indent_size_adjustment(
1342 row: u32,
1343 current_size: IndentSize,
1344 new_size: IndentSize,
1345 ) -> Option<(Range<Point>, String)> {
1346 if new_size.kind == current_size.kind {
1347 match new_size.len.cmp(¤t_size.len) {
1348 Ordering::Greater => {
1349 let point = Point::new(row, 0);
1350 Some((
1351 point..point,
1352 iter::repeat(new_size.char())
1353 .take((new_size.len - current_size.len) as usize)
1354 .collect::<String>(),
1355 ))
1356 }
1357
1358 Ordering::Less => Some((
1359 Point::new(row, 0)..Point::new(row, current_size.len - new_size.len),
1360 String::new(),
1361 )),
1362
1363 Ordering::Equal => None,
1364 }
1365 } else {
1366 Some((
1367 Point::new(row, 0)..Point::new(row, current_size.len),
1368 iter::repeat(new_size.char())
1369 .take(new_size.len as usize)
1370 .collect::<String>(),
1371 ))
1372 }
1373 }
1374
1375 /// Spawns a background task that asynchronously computes a `Diff` between the buffer's text
1376 /// and the given new text.
1377 pub fn diff(&self, mut new_text: String, cx: &AppContext) -> Task<Diff> {
1378 let old_text = self.as_rope().clone();
1379 let base_version = self.version();
1380 cx.background_executor()
1381 .spawn_labeled(*BUFFER_DIFF_TASK, async move {
1382 let old_text = old_text.to_string();
1383 let line_ending = LineEnding::detect(&new_text);
1384 LineEnding::normalize(&mut new_text);
1385
1386 let diff = TextDiff::from_chars(old_text.as_str(), new_text.as_str());
1387 let empty: Arc<str> = "".into();
1388
1389 let mut edits = Vec::new();
1390 let mut old_offset = 0;
1391 let mut new_offset = 0;
1392 let mut last_edit: Option<(Range<usize>, Range<usize>)> = None;
1393 for change in diff.iter_all_changes().map(Some).chain([None]) {
1394 if let Some(change) = &change {
1395 let len = change.value().len();
1396 match change.tag() {
1397 ChangeTag::Equal => {
1398 old_offset += len;
1399 new_offset += len;
1400 }
1401 ChangeTag::Delete => {
1402 let old_end_offset = old_offset + len;
1403 if let Some((last_old_range, _)) = &mut last_edit {
1404 last_old_range.end = old_end_offset;
1405 } else {
1406 last_edit =
1407 Some((old_offset..old_end_offset, new_offset..new_offset));
1408 }
1409 old_offset = old_end_offset;
1410 }
1411 ChangeTag::Insert => {
1412 let new_end_offset = new_offset + len;
1413 if let Some((_, last_new_range)) = &mut last_edit {
1414 last_new_range.end = new_end_offset;
1415 } else {
1416 last_edit =
1417 Some((old_offset..old_offset, new_offset..new_end_offset));
1418 }
1419 new_offset = new_end_offset;
1420 }
1421 }
1422 }
1423
1424 if let Some((old_range, new_range)) = &last_edit {
1425 if old_offset > old_range.end
1426 || new_offset > new_range.end
1427 || change.is_none()
1428 {
1429 let text = if new_range.is_empty() {
1430 empty.clone()
1431 } else {
1432 new_text[new_range.clone()].into()
1433 };
1434 edits.push((old_range.clone(), text));
1435 last_edit.take();
1436 }
1437 }
1438 }
1439
1440 Diff {
1441 base_version,
1442 line_ending,
1443 edits,
1444 }
1445 })
1446 }
1447
1448 /// Spawns a background task that searches the buffer for any whitespace
1449 /// at the ends of a lines, and returns a `Diff` that removes that whitespace.
1450 pub fn remove_trailing_whitespace(&self, cx: &AppContext) -> Task<Diff> {
1451 let old_text = self.as_rope().clone();
1452 let line_ending = self.line_ending();
1453 let base_version = self.version();
1454 cx.background_executor().spawn(async move {
1455 let ranges = trailing_whitespace_ranges(&old_text);
1456 let empty = Arc::<str>::from("");
1457 Diff {
1458 base_version,
1459 line_ending,
1460 edits: ranges
1461 .into_iter()
1462 .map(|range| (range, empty.clone()))
1463 .collect(),
1464 }
1465 })
1466 }
1467
1468 /// Ensures that the buffer ends with a single newline character, and
1469 /// no other whitespace.
1470 pub fn ensure_final_newline(&mut self, cx: &mut ModelContext<Self>) {
1471 let len = self.len();
1472 let mut offset = len;
1473 for chunk in self.as_rope().reversed_chunks_in_range(0..len) {
1474 let non_whitespace_len = chunk
1475 .trim_end_matches(|c: char| c.is_ascii_whitespace())
1476 .len();
1477 offset -= chunk.len();
1478 offset += non_whitespace_len;
1479 if non_whitespace_len != 0 {
1480 if offset == len - 1 && chunk.get(non_whitespace_len..) == Some("\n") {
1481 return;
1482 }
1483 break;
1484 }
1485 }
1486 self.edit([(offset..len, "\n")], None, cx);
1487 }
1488
1489 /// Applies a diff to the buffer. If the buffer has changed since the given diff was
1490 /// calculated, then adjust the diff to account for those changes, and discard any
1491 /// parts of the diff that conflict with those changes.
1492 pub fn apply_diff(&mut self, diff: Diff, cx: &mut ModelContext<Self>) -> Option<TransactionId> {
1493 // Check for any edits to the buffer that have occurred since this diff
1494 // was computed.
1495 let snapshot = self.snapshot();
1496 let mut edits_since = snapshot.edits_since::<usize>(&diff.base_version).peekable();
1497 let mut delta = 0;
1498 let adjusted_edits = diff.edits.into_iter().filter_map(|(range, new_text)| {
1499 while let Some(edit_since) = edits_since.peek() {
1500 // If the edit occurs after a diff hunk, then it does not
1501 // affect that hunk.
1502 if edit_since.old.start > range.end {
1503 break;
1504 }
1505 // If the edit precedes the diff hunk, then adjust the hunk
1506 // to reflect the edit.
1507 else if edit_since.old.end < range.start {
1508 delta += edit_since.new_len() as i64 - edit_since.old_len() as i64;
1509 edits_since.next();
1510 }
1511 // If the edit intersects a diff hunk, then discard that hunk.
1512 else {
1513 return None;
1514 }
1515 }
1516
1517 let start = (range.start as i64 + delta) as usize;
1518 let end = (range.end as i64 + delta) as usize;
1519 Some((start..end, new_text))
1520 });
1521
1522 self.start_transaction();
1523 self.text.set_line_ending(diff.line_ending);
1524 self.edit(adjusted_edits, None, cx);
1525 self.end_transaction(cx)
1526 }
1527
1528 fn has_unsaved_edits(&self) -> bool {
1529 let (last_version, has_unsaved_edits) = self.has_unsaved_edits.take();
1530
1531 if last_version == self.version {
1532 self.has_unsaved_edits
1533 .set((last_version, has_unsaved_edits));
1534 return has_unsaved_edits;
1535 }
1536
1537 let has_edits = self.has_edits_since(&self.saved_version);
1538 self.has_unsaved_edits
1539 .set((self.version.clone(), has_edits));
1540 has_edits
1541 }
1542
1543 /// Checks if the buffer has unsaved changes.
1544 pub fn is_dirty(&self) -> bool {
1545 self.has_conflict
1546 || self.has_unsaved_edits()
1547 || self
1548 .file
1549 .as_ref()
1550 .map_or(false, |file| file.is_deleted() || !file.is_created())
1551 }
1552
1553 /// Checks if the buffer and its file have both changed since the buffer
1554 /// was last saved or reloaded.
1555 pub fn has_conflict(&self) -> bool {
1556 self.has_conflict
1557 || self.file.as_ref().map_or(false, |file| {
1558 file.mtime() > self.saved_mtime && self.has_unsaved_edits()
1559 })
1560 }
1561
1562 /// Gets a [`Subscription`] that tracks all of the changes to the buffer's text.
1563 pub fn subscribe(&mut self) -> Subscription {
1564 self.text.subscribe()
1565 }
1566
1567 /// Starts a transaction, if one is not already in-progress. When undoing or
1568 /// redoing edits, all of the edits performed within a transaction are undone
1569 /// or redone together.
1570 pub fn start_transaction(&mut self) -> Option<TransactionId> {
1571 self.start_transaction_at(Instant::now())
1572 }
1573
1574 /// Starts a transaction, providing the current time. Subsequent transactions
1575 /// that occur within a short period of time will be grouped together. This
1576 /// is controlled by the buffer's undo grouping duration.
1577 pub fn start_transaction_at(&mut self, now: Instant) -> Option<TransactionId> {
1578 self.transaction_depth += 1;
1579 if self.was_dirty_before_starting_transaction.is_none() {
1580 self.was_dirty_before_starting_transaction = Some(self.is_dirty());
1581 }
1582 self.text.start_transaction_at(now)
1583 }
1584
1585 /// Terminates the current transaction, if this is the outermost transaction.
1586 pub fn end_transaction(&mut self, cx: &mut ModelContext<Self>) -> Option<TransactionId> {
1587 self.end_transaction_at(Instant::now(), cx)
1588 }
1589
1590 /// Terminates the current transaction, providing the current time. Subsequent transactions
1591 /// that occur within a short period of time will be grouped together. This
1592 /// is controlled by the buffer's undo grouping duration.
1593 pub fn end_transaction_at(
1594 &mut self,
1595 now: Instant,
1596 cx: &mut ModelContext<Self>,
1597 ) -> Option<TransactionId> {
1598 assert!(self.transaction_depth > 0);
1599 self.transaction_depth -= 1;
1600 let was_dirty = if self.transaction_depth == 0 {
1601 self.was_dirty_before_starting_transaction.take().unwrap()
1602 } else {
1603 false
1604 };
1605 if let Some((transaction_id, start_version)) = self.text.end_transaction_at(now) {
1606 self.did_edit(&start_version, was_dirty, cx);
1607 Some(transaction_id)
1608 } else {
1609 None
1610 }
1611 }
1612
1613 /// Manually add a transaction to the buffer's undo history.
1614 pub fn push_transaction(&mut self, transaction: Transaction, now: Instant) {
1615 self.text.push_transaction(transaction, now);
1616 }
1617
1618 /// Prevent the last transaction from being grouped with any subsequent transactions,
1619 /// even if they occur with the buffer's undo grouping duration.
1620 pub fn finalize_last_transaction(&mut self) -> Option<&Transaction> {
1621 self.text.finalize_last_transaction()
1622 }
1623
1624 /// Manually group all changes since a given transaction.
1625 pub fn group_until_transaction(&mut self, transaction_id: TransactionId) {
1626 self.text.group_until_transaction(transaction_id);
1627 }
1628
1629 /// Manually remove a transaction from the buffer's undo history
1630 pub fn forget_transaction(&mut self, transaction_id: TransactionId) {
1631 self.text.forget_transaction(transaction_id);
1632 }
1633
1634 /// Manually merge two adjacent transactions in the buffer's undo history.
1635 pub fn merge_transactions(&mut self, transaction: TransactionId, destination: TransactionId) {
1636 self.text.merge_transactions(transaction, destination);
1637 }
1638
1639 /// Waits for the buffer to receive operations with the given timestamps.
1640 pub fn wait_for_edits(
1641 &mut self,
1642 edit_ids: impl IntoIterator<Item = clock::Lamport>,
1643 ) -> impl Future<Output = Result<()>> {
1644 self.text.wait_for_edits(edit_ids)
1645 }
1646
1647 /// Waits for the buffer to receive the operations necessary for resolving the given anchors.
1648 pub fn wait_for_anchors(
1649 &mut self,
1650 anchors: impl IntoIterator<Item = Anchor>,
1651 ) -> impl 'static + Future<Output = Result<()>> {
1652 self.text.wait_for_anchors(anchors)
1653 }
1654
1655 /// Waits for the buffer to receive operations up to the given version.
1656 pub fn wait_for_version(&mut self, version: clock::Global) -> impl Future<Output = Result<()>> {
1657 self.text.wait_for_version(version)
1658 }
1659
1660 /// Forces all futures returned by [`Buffer::wait_for_version`], [`Buffer::wait_for_edits`], or
1661 /// [`Buffer::wait_for_version`] to resolve with an error.
1662 pub fn give_up_waiting(&mut self) {
1663 self.text.give_up_waiting();
1664 }
1665
1666 /// Stores a set of selections that should be broadcasted to all of the buffer's replicas.
1667 pub fn set_active_selections(
1668 &mut self,
1669 selections: Arc<[Selection<Anchor>]>,
1670 line_mode: bool,
1671 cursor_shape: CursorShape,
1672 cx: &mut ModelContext<Self>,
1673 ) {
1674 let lamport_timestamp = self.text.lamport_clock.tick();
1675 self.remote_selections.insert(
1676 self.text.replica_id(),
1677 SelectionSet {
1678 selections: selections.clone(),
1679 lamport_timestamp,
1680 line_mode,
1681 cursor_shape,
1682 },
1683 );
1684 self.send_operation(
1685 Operation::UpdateSelections {
1686 selections,
1687 line_mode,
1688 lamport_timestamp,
1689 cursor_shape,
1690 },
1691 cx,
1692 );
1693 self.non_text_state_update_count += 1;
1694 cx.notify();
1695 }
1696
1697 /// Clears the selections, so that other replicas of the buffer do not see any selections for
1698 /// this replica.
1699 pub fn remove_active_selections(&mut self, cx: &mut ModelContext<Self>) {
1700 if self
1701 .remote_selections
1702 .get(&self.text.replica_id())
1703 .map_or(true, |set| !set.selections.is_empty())
1704 {
1705 self.set_active_selections(Arc::from([]), false, Default::default(), cx);
1706 }
1707 }
1708
1709 /// Replaces the buffer's entire text.
1710 pub fn set_text<T>(&mut self, text: T, cx: &mut ModelContext<Self>) -> Option<clock::Lamport>
1711 where
1712 T: Into<Arc<str>>,
1713 {
1714 self.autoindent_requests.clear();
1715 self.edit([(0..self.len(), text)], None, cx)
1716 }
1717
1718 /// Applies the given edits to the buffer. Each edit is specified as a range of text to
1719 /// delete, and a string of text to insert at that location.
1720 ///
1721 /// If an [`AutoindentMode`] is provided, then the buffer will enqueue an auto-indent
1722 /// request for the edited ranges, which will be processed when the buffer finishes
1723 /// parsing.
1724 ///
1725 /// Parsing takes place at the end of a transaction, and may compute synchronously
1726 /// or asynchronously, depending on the changes.
1727 pub fn edit<I, S, T>(
1728 &mut self,
1729 edits_iter: I,
1730 autoindent_mode: Option<AutoindentMode>,
1731 cx: &mut ModelContext<Self>,
1732 ) -> Option<clock::Lamport>
1733 where
1734 I: IntoIterator<Item = (Range<S>, T)>,
1735 S: ToOffset,
1736 T: Into<Arc<str>>,
1737 {
1738 // Skip invalid edits and coalesce contiguous ones.
1739 let mut edits: Vec<(Range<usize>, Arc<str>)> = Vec::new();
1740 for (range, new_text) in edits_iter {
1741 let mut range = range.start.to_offset(self)..range.end.to_offset(self);
1742 if range.start > range.end {
1743 mem::swap(&mut range.start, &mut range.end);
1744 }
1745 let new_text = new_text.into();
1746 if !new_text.is_empty() || !range.is_empty() {
1747 if let Some((prev_range, prev_text)) = edits.last_mut() {
1748 if prev_range.end >= range.start {
1749 prev_range.end = cmp::max(prev_range.end, range.end);
1750 *prev_text = format!("{prev_text}{new_text}").into();
1751 } else {
1752 edits.push((range, new_text));
1753 }
1754 } else {
1755 edits.push((range, new_text));
1756 }
1757 }
1758 }
1759 if edits.is_empty() {
1760 return None;
1761 }
1762
1763 self.start_transaction();
1764 self.pending_autoindent.take();
1765 let autoindent_request = autoindent_mode
1766 .and_then(|mode| self.language.as_ref().map(|_| (self.snapshot(), mode)));
1767
1768 let edit_operation = self.text.edit(edits.iter().cloned());
1769 let edit_id = edit_operation.timestamp();
1770
1771 if let Some((before_edit, mode)) = autoindent_request {
1772 let mut delta = 0isize;
1773 let entries = edits
1774 .into_iter()
1775 .enumerate()
1776 .zip(&edit_operation.as_edit().unwrap().new_text)
1777 .map(|((ix, (range, _)), new_text)| {
1778 let new_text_length = new_text.len();
1779 let old_start = range.start.to_point(&before_edit);
1780 let new_start = (delta + range.start as isize) as usize;
1781 delta += new_text_length as isize - (range.end as isize - range.start as isize);
1782
1783 let mut range_of_insertion_to_indent = 0..new_text_length;
1784 let mut first_line_is_new = false;
1785 let mut original_indent_column = None;
1786
1787 // When inserting an entire line at the beginning of an existing line,
1788 // treat the insertion as new.
1789 if new_text.contains('\n')
1790 && old_start.column <= before_edit.indent_size_for_line(old_start.row).len
1791 {
1792 first_line_is_new = true;
1793 }
1794
1795 // When inserting text starting with a newline, avoid auto-indenting the
1796 // previous line.
1797 if new_text.starts_with('\n') {
1798 range_of_insertion_to_indent.start += 1;
1799 first_line_is_new = true;
1800 }
1801
1802 // Avoid auto-indenting after the insertion.
1803 if let AutoindentMode::Block {
1804 original_indent_columns,
1805 } = &mode
1806 {
1807 original_indent_column =
1808 Some(original_indent_columns.get(ix).copied().unwrap_or_else(|| {
1809 indent_size_for_text(
1810 new_text[range_of_insertion_to_indent.clone()].chars(),
1811 )
1812 .len
1813 }));
1814 if new_text[range_of_insertion_to_indent.clone()].ends_with('\n') {
1815 range_of_insertion_to_indent.end -= 1;
1816 }
1817 }
1818
1819 AutoindentRequestEntry {
1820 first_line_is_new,
1821 original_indent_column,
1822 indent_size: before_edit.language_indent_size_at(range.start, cx),
1823 range: self.anchor_before(new_start + range_of_insertion_to_indent.start)
1824 ..self.anchor_after(new_start + range_of_insertion_to_indent.end),
1825 }
1826 })
1827 .collect();
1828
1829 self.autoindent_requests.push(Arc::new(AutoindentRequest {
1830 before_edit,
1831 entries,
1832 is_block_mode: matches!(mode, AutoindentMode::Block { .. }),
1833 }));
1834 }
1835
1836 self.end_transaction(cx);
1837 self.send_operation(Operation::Buffer(edit_operation), cx);
1838 Some(edit_id)
1839 }
1840
1841 fn did_edit(
1842 &mut self,
1843 old_version: &clock::Global,
1844 was_dirty: bool,
1845 cx: &mut ModelContext<Self>,
1846 ) {
1847 if self.edits_since::<usize>(old_version).next().is_none() {
1848 return;
1849 }
1850
1851 self.reparse(cx);
1852
1853 cx.emit(Event::Edited);
1854 if was_dirty != self.is_dirty() {
1855 cx.emit(Event::DirtyChanged);
1856 }
1857 cx.notify();
1858 }
1859
1860 /// Applies the given remote operations to the buffer.
1861 pub fn apply_ops<I: IntoIterator<Item = Operation>>(
1862 &mut self,
1863 ops: I,
1864 cx: &mut ModelContext<Self>,
1865 ) -> Result<()> {
1866 self.pending_autoindent.take();
1867 let was_dirty = self.is_dirty();
1868 let old_version = self.version.clone();
1869 let mut deferred_ops = Vec::new();
1870 let buffer_ops = ops
1871 .into_iter()
1872 .filter_map(|op| match op {
1873 Operation::Buffer(op) => Some(op),
1874 _ => {
1875 if self.can_apply_op(&op) {
1876 self.apply_op(op, cx);
1877 } else {
1878 deferred_ops.push(op);
1879 }
1880 None
1881 }
1882 })
1883 .collect::<Vec<_>>();
1884 self.text.apply_ops(buffer_ops)?;
1885 self.deferred_ops.insert(deferred_ops);
1886 self.flush_deferred_ops(cx);
1887 self.did_edit(&old_version, was_dirty, cx);
1888 // Notify independently of whether the buffer was edited as the operations could include a
1889 // selection update.
1890 cx.notify();
1891 Ok(())
1892 }
1893
1894 fn flush_deferred_ops(&mut self, cx: &mut ModelContext<Self>) {
1895 let mut deferred_ops = Vec::new();
1896 for op in self.deferred_ops.drain().iter().cloned() {
1897 if self.can_apply_op(&op) {
1898 self.apply_op(op, cx);
1899 } else {
1900 deferred_ops.push(op);
1901 }
1902 }
1903 self.deferred_ops.insert(deferred_ops);
1904 }
1905
1906 pub fn has_deferred_ops(&self) -> bool {
1907 !self.deferred_ops.is_empty() || self.text.has_deferred_ops()
1908 }
1909
1910 fn can_apply_op(&self, operation: &Operation) -> bool {
1911 match operation {
1912 Operation::Buffer(_) => {
1913 unreachable!("buffer operations should never be applied at this layer")
1914 }
1915 Operation::UpdateDiagnostics {
1916 diagnostics: diagnostic_set,
1917 ..
1918 } => diagnostic_set.iter().all(|diagnostic| {
1919 self.text.can_resolve(&diagnostic.range.start)
1920 && self.text.can_resolve(&diagnostic.range.end)
1921 }),
1922 Operation::UpdateSelections { selections, .. } => selections
1923 .iter()
1924 .all(|s| self.can_resolve(&s.start) && self.can_resolve(&s.end)),
1925 Operation::UpdateCompletionTriggers { .. } => true,
1926 }
1927 }
1928
1929 fn apply_op(&mut self, operation: Operation, cx: &mut ModelContext<Self>) {
1930 match operation {
1931 Operation::Buffer(_) => {
1932 unreachable!("buffer operations should never be applied at this layer")
1933 }
1934 Operation::UpdateDiagnostics {
1935 server_id,
1936 diagnostics: diagnostic_set,
1937 lamport_timestamp,
1938 } => {
1939 let snapshot = self.snapshot();
1940 self.apply_diagnostic_update(
1941 server_id,
1942 DiagnosticSet::from_sorted_entries(diagnostic_set.iter().cloned(), &snapshot),
1943 lamport_timestamp,
1944 cx,
1945 );
1946 }
1947 Operation::UpdateSelections {
1948 selections,
1949 lamport_timestamp,
1950 line_mode,
1951 cursor_shape,
1952 } => {
1953 if let Some(set) = self.remote_selections.get(&lamport_timestamp.replica_id) {
1954 if set.lamport_timestamp > lamport_timestamp {
1955 return;
1956 }
1957 }
1958
1959 self.remote_selections.insert(
1960 lamport_timestamp.replica_id,
1961 SelectionSet {
1962 selections,
1963 lamport_timestamp,
1964 line_mode,
1965 cursor_shape,
1966 },
1967 );
1968 self.text.lamport_clock.observe(lamport_timestamp);
1969 self.non_text_state_update_count += 1;
1970 }
1971 Operation::UpdateCompletionTriggers {
1972 triggers,
1973 lamport_timestamp,
1974 } => {
1975 self.completion_triggers = triggers;
1976 self.text.lamport_clock.observe(lamport_timestamp);
1977 }
1978 }
1979 }
1980
1981 fn apply_diagnostic_update(
1982 &mut self,
1983 server_id: LanguageServerId,
1984 diagnostics: DiagnosticSet,
1985 lamport_timestamp: clock::Lamport,
1986 cx: &mut ModelContext<Self>,
1987 ) {
1988 if lamport_timestamp > self.diagnostics_timestamp {
1989 let ix = self.diagnostics.binary_search_by_key(&server_id, |e| e.0);
1990 if diagnostics.len() == 0 {
1991 if let Ok(ix) = ix {
1992 self.diagnostics.remove(ix);
1993 }
1994 } else {
1995 match ix {
1996 Err(ix) => self.diagnostics.insert(ix, (server_id, diagnostics)),
1997 Ok(ix) => self.diagnostics[ix].1 = diagnostics,
1998 };
1999 }
2000 self.diagnostics_timestamp = lamport_timestamp;
2001 self.non_text_state_update_count += 1;
2002 self.text.lamport_clock.observe(lamport_timestamp);
2003 cx.notify();
2004 cx.emit(Event::DiagnosticsUpdated);
2005 }
2006 }
2007
2008 fn send_operation(&mut self, operation: Operation, cx: &mut ModelContext<Self>) {
2009 cx.emit(Event::Operation(operation));
2010 }
2011
2012 /// Removes the selections for a given peer.
2013 pub fn remove_peer(&mut self, replica_id: ReplicaId, cx: &mut ModelContext<Self>) {
2014 self.remote_selections.remove(&replica_id);
2015 cx.notify();
2016 }
2017
2018 /// Undoes the most recent transaction.
2019 pub fn undo(&mut self, cx: &mut ModelContext<Self>) -> Option<TransactionId> {
2020 let was_dirty = self.is_dirty();
2021 let old_version = self.version.clone();
2022
2023 if let Some((transaction_id, operation)) = self.text.undo() {
2024 self.send_operation(Operation::Buffer(operation), cx);
2025 self.did_edit(&old_version, was_dirty, cx);
2026 Some(transaction_id)
2027 } else {
2028 None
2029 }
2030 }
2031
2032 /// Manually undoes a specific transaction in the buffer's undo history.
2033 pub fn undo_transaction(
2034 &mut self,
2035 transaction_id: TransactionId,
2036 cx: &mut ModelContext<Self>,
2037 ) -> bool {
2038 let was_dirty = self.is_dirty();
2039 let old_version = self.version.clone();
2040 if let Some(operation) = self.text.undo_transaction(transaction_id) {
2041 self.send_operation(Operation::Buffer(operation), cx);
2042 self.did_edit(&old_version, was_dirty, cx);
2043 true
2044 } else {
2045 false
2046 }
2047 }
2048
2049 /// Manually undoes all changes after a given transaction in the buffer's undo history.
2050 pub fn undo_to_transaction(
2051 &mut self,
2052 transaction_id: TransactionId,
2053 cx: &mut ModelContext<Self>,
2054 ) -> bool {
2055 let was_dirty = self.is_dirty();
2056 let old_version = self.version.clone();
2057
2058 let operations = self.text.undo_to_transaction(transaction_id);
2059 let undone = !operations.is_empty();
2060 for operation in operations {
2061 self.send_operation(Operation::Buffer(operation), cx);
2062 }
2063 if undone {
2064 self.did_edit(&old_version, was_dirty, cx)
2065 }
2066 undone
2067 }
2068
2069 /// Manually redoes a specific transaction in the buffer's redo history.
2070 pub fn redo(&mut self, cx: &mut ModelContext<Self>) -> Option<TransactionId> {
2071 let was_dirty = self.is_dirty();
2072 let old_version = self.version.clone();
2073
2074 if let Some((transaction_id, operation)) = self.text.redo() {
2075 self.send_operation(Operation::Buffer(operation), cx);
2076 self.did_edit(&old_version, was_dirty, cx);
2077 Some(transaction_id)
2078 } else {
2079 None
2080 }
2081 }
2082
2083 /// Manually undoes all changes until a given transaction in the buffer's redo history.
2084 pub fn redo_to_transaction(
2085 &mut self,
2086 transaction_id: TransactionId,
2087 cx: &mut ModelContext<Self>,
2088 ) -> bool {
2089 let was_dirty = self.is_dirty();
2090 let old_version = self.version.clone();
2091
2092 let operations = self.text.redo_to_transaction(transaction_id);
2093 let redone = !operations.is_empty();
2094 for operation in operations {
2095 self.send_operation(Operation::Buffer(operation), cx);
2096 }
2097 if redone {
2098 self.did_edit(&old_version, was_dirty, cx)
2099 }
2100 redone
2101 }
2102
2103 /// Override current completion triggers with the user-provided completion triggers.
2104 pub fn set_completion_triggers(&mut self, triggers: Vec<String>, cx: &mut ModelContext<Self>) {
2105 self.completion_triggers.clone_from(&triggers);
2106 self.completion_triggers_timestamp = self.text.lamport_clock.tick();
2107 self.send_operation(
2108 Operation::UpdateCompletionTriggers {
2109 triggers,
2110 lamport_timestamp: self.completion_triggers_timestamp,
2111 },
2112 cx,
2113 );
2114 cx.notify();
2115 }
2116
2117 /// Returns a list of strings which trigger a completion menu for this language.
2118 /// Usually this is driven by LSP server which returns a list of trigger characters for completions.
2119 pub fn completion_triggers(&self) -> &[String] {
2120 &self.completion_triggers
2121 }
2122}
2123
2124#[doc(hidden)]
2125#[cfg(any(test, feature = "test-support"))]
2126impl Buffer {
2127 pub fn edit_via_marked_text(
2128 &mut self,
2129 marked_string: &str,
2130 autoindent_mode: Option<AutoindentMode>,
2131 cx: &mut ModelContext<Self>,
2132 ) {
2133 let edits = self.edits_for_marked_text(marked_string);
2134 self.edit(edits, autoindent_mode, cx);
2135 }
2136
2137 pub fn set_group_interval(&mut self, group_interval: Duration) {
2138 self.text.set_group_interval(group_interval);
2139 }
2140
2141 pub fn randomly_edit<T>(
2142 &mut self,
2143 rng: &mut T,
2144 old_range_count: usize,
2145 cx: &mut ModelContext<Self>,
2146 ) where
2147 T: rand::Rng,
2148 {
2149 let mut edits: Vec<(Range<usize>, String)> = Vec::new();
2150 let mut last_end = None;
2151 for _ in 0..old_range_count {
2152 if last_end.map_or(false, |last_end| last_end >= self.len()) {
2153 break;
2154 }
2155
2156 let new_start = last_end.map_or(0, |last_end| last_end + 1);
2157 let mut range = self.random_byte_range(new_start, rng);
2158 if rng.gen_bool(0.2) {
2159 mem::swap(&mut range.start, &mut range.end);
2160 }
2161 last_end = Some(range.end);
2162
2163 let new_text_len = rng.gen_range(0..10);
2164 let new_text: String = RandomCharIter::new(&mut *rng).take(new_text_len).collect();
2165
2166 edits.push((range, new_text));
2167 }
2168 log::info!("mutating buffer {} with {:?}", self.replica_id(), edits);
2169 self.edit(edits, None, cx);
2170 }
2171
2172 pub fn randomly_undo_redo(&mut self, rng: &mut impl rand::Rng, cx: &mut ModelContext<Self>) {
2173 let was_dirty = self.is_dirty();
2174 let old_version = self.version.clone();
2175
2176 let ops = self.text.randomly_undo_redo(rng);
2177 if !ops.is_empty() {
2178 for op in ops {
2179 self.send_operation(Operation::Buffer(op), cx);
2180 self.did_edit(&old_version, was_dirty, cx);
2181 }
2182 }
2183 }
2184}
2185
2186impl EventEmitter<Event> for Buffer {}
2187
2188impl Deref for Buffer {
2189 type Target = TextBuffer;
2190
2191 fn deref(&self) -> &Self::Target {
2192 &self.text
2193 }
2194}
2195
2196impl BufferSnapshot {
2197 /// Returns [`IndentSize`] for a given line that respects user settings and /// language preferences.
2198 pub fn indent_size_for_line(&self, row: u32) -> IndentSize {
2199 indent_size_for_line(self, row)
2200 }
2201 /// Returns [`IndentSize`] for a given position that respects user settings
2202 /// and language preferences.
2203 pub fn language_indent_size_at<T: ToOffset>(&self, position: T, cx: &AppContext) -> IndentSize {
2204 let settings = language_settings(self.language_at(position), self.file(), cx);
2205 if settings.hard_tabs {
2206 IndentSize::tab()
2207 } else {
2208 IndentSize::spaces(settings.tab_size.get())
2209 }
2210 }
2211
2212 /// Retrieve the suggested indent size for all of the given rows. The unit of indentation
2213 /// is passed in as `single_indent_size`.
2214 pub fn suggested_indents(
2215 &self,
2216 rows: impl Iterator<Item = u32>,
2217 single_indent_size: IndentSize,
2218 ) -> BTreeMap<u32, IndentSize> {
2219 let mut result = BTreeMap::new();
2220
2221 for row_range in contiguous_ranges(rows, 10) {
2222 let suggestions = match self.suggest_autoindents(row_range.clone()) {
2223 Some(suggestions) => suggestions,
2224 _ => break,
2225 };
2226
2227 for (row, suggestion) in row_range.zip(suggestions) {
2228 let indent_size = if let Some(suggestion) = suggestion {
2229 result
2230 .get(&suggestion.basis_row)
2231 .copied()
2232 .unwrap_or_else(|| self.indent_size_for_line(suggestion.basis_row))
2233 .with_delta(suggestion.delta, single_indent_size)
2234 } else {
2235 self.indent_size_for_line(row)
2236 };
2237
2238 result.insert(row, indent_size);
2239 }
2240 }
2241
2242 result
2243 }
2244
2245 fn suggest_autoindents(
2246 &self,
2247 row_range: Range<u32>,
2248 ) -> Option<impl Iterator<Item = Option<IndentSuggestion>> + '_> {
2249 let config = &self.language.as_ref()?.config;
2250 let prev_non_blank_row = self.prev_non_blank_row(row_range.start);
2251
2252 // Find the suggested indentation ranges based on the syntax tree.
2253 let start = Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0);
2254 let end = Point::new(row_range.end, 0);
2255 let range = (start..end).to_offset(&self.text);
2256 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
2257 Some(&grammar.indents_config.as_ref()?.query)
2258 });
2259 let indent_configs = matches
2260 .grammars()
2261 .iter()
2262 .map(|grammar| grammar.indents_config.as_ref().unwrap())
2263 .collect::<Vec<_>>();
2264
2265 let mut indent_ranges = Vec::<Range<Point>>::new();
2266 let mut outdent_positions = Vec::<Point>::new();
2267 while let Some(mat) = matches.peek() {
2268 let mut start: Option<Point> = None;
2269 let mut end: Option<Point> = None;
2270
2271 let config = &indent_configs[mat.grammar_index];
2272 for capture in mat.captures {
2273 if capture.index == config.indent_capture_ix {
2274 start.get_or_insert(Point::from_ts_point(capture.node.start_position()));
2275 end.get_or_insert(Point::from_ts_point(capture.node.end_position()));
2276 } else if Some(capture.index) == config.start_capture_ix {
2277 start = Some(Point::from_ts_point(capture.node.end_position()));
2278 } else if Some(capture.index) == config.end_capture_ix {
2279 end = Some(Point::from_ts_point(capture.node.start_position()));
2280 } else if Some(capture.index) == config.outdent_capture_ix {
2281 outdent_positions.push(Point::from_ts_point(capture.node.start_position()));
2282 }
2283 }
2284
2285 matches.advance();
2286 if let Some((start, end)) = start.zip(end) {
2287 if start.row == end.row {
2288 continue;
2289 }
2290
2291 let range = start..end;
2292 match indent_ranges.binary_search_by_key(&range.start, |r| r.start) {
2293 Err(ix) => indent_ranges.insert(ix, range),
2294 Ok(ix) => {
2295 let prev_range = &mut indent_ranges[ix];
2296 prev_range.end = prev_range.end.max(range.end);
2297 }
2298 }
2299 }
2300 }
2301
2302 let mut error_ranges = Vec::<Range<Point>>::new();
2303 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
2304 Some(&grammar.error_query)
2305 });
2306 while let Some(mat) = matches.peek() {
2307 let node = mat.captures[0].node;
2308 let start = Point::from_ts_point(node.start_position());
2309 let end = Point::from_ts_point(node.end_position());
2310 let range = start..end;
2311 let ix = match error_ranges.binary_search_by_key(&range.start, |r| r.start) {
2312 Ok(ix) | Err(ix) => ix,
2313 };
2314 let mut end_ix = ix;
2315 while let Some(existing_range) = error_ranges.get(end_ix) {
2316 if existing_range.end < end {
2317 end_ix += 1;
2318 } else {
2319 break;
2320 }
2321 }
2322 error_ranges.splice(ix..end_ix, [range]);
2323 matches.advance();
2324 }
2325
2326 outdent_positions.sort();
2327 for outdent_position in outdent_positions {
2328 // find the innermost indent range containing this outdent_position
2329 // set its end to the outdent position
2330 if let Some(range_to_truncate) = indent_ranges
2331 .iter_mut()
2332 .filter(|indent_range| indent_range.contains(&outdent_position))
2333 .last()
2334 {
2335 range_to_truncate.end = outdent_position;
2336 }
2337 }
2338
2339 // Find the suggested indentation increases and decreased based on regexes.
2340 let mut indent_change_rows = Vec::<(u32, Ordering)>::new();
2341 self.for_each_line(
2342 Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0)
2343 ..Point::new(row_range.end, 0),
2344 |row, line| {
2345 if config
2346 .decrease_indent_pattern
2347 .as_ref()
2348 .map_or(false, |regex| regex.is_match(line))
2349 {
2350 indent_change_rows.push((row, Ordering::Less));
2351 }
2352 if config
2353 .increase_indent_pattern
2354 .as_ref()
2355 .map_or(false, |regex| regex.is_match(line))
2356 {
2357 indent_change_rows.push((row + 1, Ordering::Greater));
2358 }
2359 },
2360 );
2361
2362 let mut indent_changes = indent_change_rows.into_iter().peekable();
2363 let mut prev_row = if config.auto_indent_using_last_non_empty_line {
2364 prev_non_blank_row.unwrap_or(0)
2365 } else {
2366 row_range.start.saturating_sub(1)
2367 };
2368 let mut prev_row_start = Point::new(prev_row, self.indent_size_for_line(prev_row).len);
2369 Some(row_range.map(move |row| {
2370 let row_start = Point::new(row, self.indent_size_for_line(row).len);
2371
2372 let mut indent_from_prev_row = false;
2373 let mut outdent_from_prev_row = false;
2374 let mut outdent_to_row = u32::MAX;
2375
2376 while let Some((indent_row, delta)) = indent_changes.peek() {
2377 match indent_row.cmp(&row) {
2378 Ordering::Equal => match delta {
2379 Ordering::Less => outdent_from_prev_row = true,
2380 Ordering::Greater => indent_from_prev_row = true,
2381 _ => {}
2382 },
2383
2384 Ordering::Greater => break,
2385 Ordering::Less => {}
2386 }
2387
2388 indent_changes.next();
2389 }
2390
2391 for range in &indent_ranges {
2392 if range.start.row >= row {
2393 break;
2394 }
2395 if range.start.row == prev_row && range.end > row_start {
2396 indent_from_prev_row = true;
2397 }
2398 if range.end > prev_row_start && range.end <= row_start {
2399 outdent_to_row = outdent_to_row.min(range.start.row);
2400 }
2401 }
2402
2403 let within_error = error_ranges
2404 .iter()
2405 .any(|e| e.start.row < row && e.end > row_start);
2406
2407 let suggestion = if outdent_to_row == prev_row
2408 || (outdent_from_prev_row && indent_from_prev_row)
2409 {
2410 Some(IndentSuggestion {
2411 basis_row: prev_row,
2412 delta: Ordering::Equal,
2413 within_error,
2414 })
2415 } else if indent_from_prev_row {
2416 Some(IndentSuggestion {
2417 basis_row: prev_row,
2418 delta: Ordering::Greater,
2419 within_error,
2420 })
2421 } else if outdent_to_row < prev_row {
2422 Some(IndentSuggestion {
2423 basis_row: outdent_to_row,
2424 delta: Ordering::Equal,
2425 within_error,
2426 })
2427 } else if outdent_from_prev_row {
2428 Some(IndentSuggestion {
2429 basis_row: prev_row,
2430 delta: Ordering::Less,
2431 within_error,
2432 })
2433 } else if config.auto_indent_using_last_non_empty_line || !self.is_line_blank(prev_row)
2434 {
2435 Some(IndentSuggestion {
2436 basis_row: prev_row,
2437 delta: Ordering::Equal,
2438 within_error,
2439 })
2440 } else {
2441 None
2442 };
2443
2444 prev_row = row;
2445 prev_row_start = row_start;
2446 suggestion
2447 }))
2448 }
2449
2450 fn prev_non_blank_row(&self, mut row: u32) -> Option<u32> {
2451 while row > 0 {
2452 row -= 1;
2453 if !self.is_line_blank(row) {
2454 return Some(row);
2455 }
2456 }
2457 None
2458 }
2459
2460 /// Iterates over chunks of text in the given range of the buffer. Text is chunked
2461 /// in an arbitrary way due to being stored in a [`Rope`](text::Rope). The text is also
2462 /// returned in chunks where each chunk has a single syntax highlighting style and
2463 /// diagnostic status.
2464 pub fn chunks<T: ToOffset>(&self, range: Range<T>, language_aware: bool) -> BufferChunks {
2465 let range = range.start.to_offset(self)..range.end.to_offset(self);
2466
2467 let mut syntax = None;
2468 let mut diagnostic_endpoints = Vec::new();
2469 if language_aware {
2470 let captures = self.syntax.captures(range.clone(), &self.text, |grammar| {
2471 grammar.highlights_query.as_ref()
2472 });
2473 let highlight_maps = captures
2474 .grammars()
2475 .into_iter()
2476 .map(|grammar| grammar.highlight_map())
2477 .collect();
2478 syntax = Some((captures, highlight_maps));
2479 for entry in self.diagnostics_in_range::<_, usize>(range.clone(), false) {
2480 diagnostic_endpoints.push(DiagnosticEndpoint {
2481 offset: entry.range.start,
2482 is_start: true,
2483 severity: entry.diagnostic.severity,
2484 is_unnecessary: entry.diagnostic.is_unnecessary,
2485 });
2486 diagnostic_endpoints.push(DiagnosticEndpoint {
2487 offset: entry.range.end,
2488 is_start: false,
2489 severity: entry.diagnostic.severity,
2490 is_unnecessary: entry.diagnostic.is_unnecessary,
2491 });
2492 }
2493 diagnostic_endpoints
2494 .sort_unstable_by_key(|endpoint| (endpoint.offset, !endpoint.is_start));
2495 }
2496
2497 BufferChunks::new(self.text.as_rope(), range, syntax, diagnostic_endpoints)
2498 }
2499
2500 /// Invokes the given callback for each line of text in the given range of the buffer.
2501 /// Uses callback to avoid allocating a string for each line.
2502 fn for_each_line(&self, range: Range<Point>, mut callback: impl FnMut(u32, &str)) {
2503 let mut line = String::new();
2504 let mut row = range.start.row;
2505 for chunk in self
2506 .as_rope()
2507 .chunks_in_range(range.to_offset(self))
2508 .chain(["\n"])
2509 {
2510 for (newline_ix, text) in chunk.split('\n').enumerate() {
2511 if newline_ix > 0 {
2512 callback(row, &line);
2513 row += 1;
2514 line.clear();
2515 }
2516 line.push_str(text);
2517 }
2518 }
2519 }
2520
2521 /// Iterates over every [`SyntaxLayer`] in the buffer.
2522 pub fn syntax_layers(&self) -> impl Iterator<Item = SyntaxLayer> + '_ {
2523 self.syntax.layers_for_range(0..self.len(), &self.text)
2524 }
2525
2526 pub fn syntax_layer_at<D: ToOffset>(&self, position: D) -> Option<SyntaxLayer> {
2527 let offset = position.to_offset(self);
2528 self.syntax
2529 .layers_for_range(offset..offset, &self.text)
2530 .filter(|l| l.node().end_byte() > offset)
2531 .last()
2532 }
2533
2534 /// Returns the main [Language]
2535 pub fn language(&self) -> Option<&Arc<Language>> {
2536 self.language.as_ref()
2537 }
2538
2539 /// Returns the [Language] at the given location.
2540 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<&Arc<Language>> {
2541 self.syntax_layer_at(position)
2542 .map(|info| info.language)
2543 .or(self.language.as_ref())
2544 }
2545
2546 /// Returns the settings for the language at the given location.
2547 pub fn settings_at<'a, D: ToOffset>(
2548 &self,
2549 position: D,
2550 cx: &'a AppContext,
2551 ) -> &'a LanguageSettings {
2552 language_settings(self.language_at(position), self.file.as_ref(), cx)
2553 }
2554
2555 /// Returns the [LanguageScope] at the given location.
2556 pub fn language_scope_at<D: ToOffset>(&self, position: D) -> Option<LanguageScope> {
2557 let offset = position.to_offset(self);
2558 let mut scope = None;
2559 let mut smallest_range: Option<Range<usize>> = None;
2560
2561 // Use the layer that has the smallest node intersecting the given point.
2562 for layer in self.syntax.layers_for_range(offset..offset, &self.text) {
2563 let mut cursor = layer.node().walk();
2564
2565 let mut range = None;
2566 loop {
2567 let child_range = cursor.node().byte_range();
2568 if !child_range.to_inclusive().contains(&offset) {
2569 break;
2570 }
2571
2572 range = Some(child_range);
2573 if cursor.goto_first_child_for_byte(offset).is_none() {
2574 break;
2575 }
2576 }
2577
2578 if let Some(range) = range {
2579 if smallest_range
2580 .as_ref()
2581 .map_or(true, |smallest_range| range.len() < smallest_range.len())
2582 {
2583 smallest_range = Some(range);
2584 scope = Some(LanguageScope {
2585 language: layer.language.clone(),
2586 override_id: layer.override_id(offset, &self.text),
2587 });
2588 }
2589 }
2590 }
2591
2592 scope.or_else(|| {
2593 self.language.clone().map(|language| LanguageScope {
2594 language,
2595 override_id: None,
2596 })
2597 })
2598 }
2599
2600 /// Returns a tuple of the range and character kind of the word
2601 /// surrounding the given position.
2602 pub fn surrounding_word<T: ToOffset>(&self, start: T) -> (Range<usize>, Option<CharKind>) {
2603 let mut start = start.to_offset(self);
2604 let mut end = start;
2605 let mut next_chars = self.chars_at(start).peekable();
2606 let mut prev_chars = self.reversed_chars_at(start).peekable();
2607
2608 let scope = self.language_scope_at(start);
2609 let kind = |c| char_kind(&scope, c);
2610 let word_kind = cmp::max(
2611 prev_chars.peek().copied().map(kind),
2612 next_chars.peek().copied().map(kind),
2613 );
2614
2615 for ch in prev_chars {
2616 if Some(kind(ch)) == word_kind && ch != '\n' {
2617 start -= ch.len_utf8();
2618 } else {
2619 break;
2620 }
2621 }
2622
2623 for ch in next_chars {
2624 if Some(kind(ch)) == word_kind && ch != '\n' {
2625 end += ch.len_utf8();
2626 } else {
2627 break;
2628 }
2629 }
2630
2631 (start..end, word_kind)
2632 }
2633
2634 /// Returns the range for the closes syntax node enclosing the given range.
2635 pub fn range_for_syntax_ancestor<T: ToOffset>(&self, range: Range<T>) -> Option<Range<usize>> {
2636 let range = range.start.to_offset(self)..range.end.to_offset(self);
2637 let mut result: Option<Range<usize>> = None;
2638 'outer: for layer in self.syntax.layers_for_range(range.clone(), &self.text) {
2639 let mut cursor = layer.node().walk();
2640
2641 // Descend to the first leaf that touches the start of the range,
2642 // and if the range is non-empty, extends beyond the start.
2643 while cursor.goto_first_child_for_byte(range.start).is_some() {
2644 if !range.is_empty() && cursor.node().end_byte() == range.start {
2645 cursor.goto_next_sibling();
2646 }
2647 }
2648
2649 // Ascend to the smallest ancestor that strictly contains the range.
2650 loop {
2651 let node_range = cursor.node().byte_range();
2652 if node_range.start <= range.start
2653 && node_range.end >= range.end
2654 && node_range.len() > range.len()
2655 {
2656 break;
2657 }
2658 if !cursor.goto_parent() {
2659 continue 'outer;
2660 }
2661 }
2662
2663 let left_node = cursor.node();
2664 let mut layer_result = left_node.byte_range();
2665
2666 // For an empty range, try to find another node immediately to the right of the range.
2667 if left_node.end_byte() == range.start {
2668 let mut right_node = None;
2669 while !cursor.goto_next_sibling() {
2670 if !cursor.goto_parent() {
2671 break;
2672 }
2673 }
2674
2675 while cursor.node().start_byte() == range.start {
2676 right_node = Some(cursor.node());
2677 if !cursor.goto_first_child() {
2678 break;
2679 }
2680 }
2681
2682 // If there is a candidate node on both sides of the (empty) range, then
2683 // decide between the two by favoring a named node over an anonymous token.
2684 // If both nodes are the same in that regard, favor the right one.
2685 if let Some(right_node) = right_node {
2686 if right_node.is_named() || !left_node.is_named() {
2687 layer_result = right_node.byte_range();
2688 }
2689 }
2690 }
2691
2692 if let Some(previous_result) = &result {
2693 if previous_result.len() < layer_result.len() {
2694 continue;
2695 }
2696 }
2697 result = Some(layer_result);
2698 }
2699
2700 result
2701 }
2702
2703 /// Returns the outline for the buffer.
2704 ///
2705 /// This method allows passing an optional [SyntaxTheme] to
2706 /// syntax-highlight the returned symbols.
2707 pub fn outline(&self, theme: Option<&SyntaxTheme>) -> Option<Outline<Anchor>> {
2708 self.outline_items_containing(0..self.len(), true, theme)
2709 .map(Outline::new)
2710 }
2711
2712 /// Returns all the symbols that contain the given position.
2713 ///
2714 /// This method allows passing an optional [SyntaxTheme] to
2715 /// syntax-highlight the returned symbols.
2716 pub fn symbols_containing<T: ToOffset>(
2717 &self,
2718 position: T,
2719 theme: Option<&SyntaxTheme>,
2720 ) -> Option<Vec<OutlineItem<Anchor>>> {
2721 let position = position.to_offset(self);
2722 let mut items = self.outline_items_containing(
2723 position.saturating_sub(1)..self.len().min(position + 1),
2724 false,
2725 theme,
2726 )?;
2727 let mut prev_depth = None;
2728 items.retain(|item| {
2729 let result = prev_depth.map_or(true, |prev_depth| item.depth > prev_depth);
2730 prev_depth = Some(item.depth);
2731 result
2732 });
2733 Some(items)
2734 }
2735
2736 pub fn outline_items_containing<T: ToOffset>(
2737 &self,
2738 range: Range<T>,
2739 include_extra_context: bool,
2740 theme: Option<&SyntaxTheme>,
2741 ) -> Option<Vec<OutlineItem<Anchor>>> {
2742 let range = range.to_offset(self);
2743 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
2744 grammar.outline_config.as_ref().map(|c| &c.query)
2745 });
2746 let configs = matches
2747 .grammars()
2748 .iter()
2749 .map(|g| g.outline_config.as_ref().unwrap())
2750 .collect::<Vec<_>>();
2751
2752 let mut stack = Vec::<Range<usize>>::new();
2753 let mut items = Vec::new();
2754 while let Some(mat) = matches.peek() {
2755 let config = &configs[mat.grammar_index];
2756 let item_node = mat.captures.iter().find_map(|cap| {
2757 if cap.index == config.item_capture_ix {
2758 Some(cap.node)
2759 } else {
2760 None
2761 }
2762 })?;
2763
2764 let item_range = item_node.byte_range();
2765 if item_range.end < range.start || item_range.start > range.end {
2766 matches.advance();
2767 continue;
2768 }
2769
2770 let mut buffer_ranges = Vec::new();
2771 for capture in mat.captures {
2772 let node_is_name;
2773 if capture.index == config.name_capture_ix {
2774 node_is_name = true;
2775 } else if Some(capture.index) == config.context_capture_ix
2776 || (Some(capture.index) == config.extra_context_capture_ix
2777 && include_extra_context)
2778 {
2779 node_is_name = false;
2780 } else {
2781 continue;
2782 }
2783
2784 let mut range = capture.node.start_byte()..capture.node.end_byte();
2785 let start = capture.node.start_position();
2786 if capture.node.end_position().row > start.row {
2787 range.end =
2788 range.start + self.line_len(start.row as u32) as usize - start.column;
2789 }
2790
2791 if !range.is_empty() {
2792 buffer_ranges.push((range, node_is_name));
2793 }
2794 }
2795
2796 if buffer_ranges.is_empty() {
2797 matches.advance();
2798 continue;
2799 }
2800
2801 let mut text = String::new();
2802 let mut highlight_ranges = Vec::new();
2803 let mut name_ranges = Vec::new();
2804 let mut chunks = self.chunks(
2805 buffer_ranges.first().unwrap().0.start..buffer_ranges.last().unwrap().0.end,
2806 true,
2807 );
2808 let mut last_buffer_range_end = 0;
2809 for (buffer_range, is_name) in buffer_ranges {
2810 if !text.is_empty() && buffer_range.start > last_buffer_range_end {
2811 text.push(' ');
2812 }
2813 last_buffer_range_end = buffer_range.end;
2814 if is_name {
2815 let mut start = text.len();
2816 let end = start + buffer_range.len();
2817
2818 // When multiple names are captured, then the matcheable text
2819 // includes the whitespace in between the names.
2820 if !name_ranges.is_empty() {
2821 start -= 1;
2822 }
2823
2824 name_ranges.push(start..end);
2825 }
2826
2827 let mut offset = buffer_range.start;
2828 chunks.seek(offset);
2829 for mut chunk in chunks.by_ref() {
2830 if chunk.text.len() > buffer_range.end - offset {
2831 chunk.text = &chunk.text[0..(buffer_range.end - offset)];
2832 offset = buffer_range.end;
2833 } else {
2834 offset += chunk.text.len();
2835 }
2836 let style = chunk
2837 .syntax_highlight_id
2838 .zip(theme)
2839 .and_then(|(highlight, theme)| highlight.style(theme));
2840 if let Some(style) = style {
2841 let start = text.len();
2842 let end = start + chunk.text.len();
2843 highlight_ranges.push((start..end, style));
2844 }
2845 text.push_str(chunk.text);
2846 if offset >= buffer_range.end {
2847 break;
2848 }
2849 }
2850 }
2851
2852 matches.advance();
2853 while stack.last().map_or(false, |prev_range| {
2854 prev_range.start > item_range.start || prev_range.end < item_range.end
2855 }) {
2856 stack.pop();
2857 }
2858 stack.push(item_range.clone());
2859
2860 items.push(OutlineItem {
2861 depth: stack.len() - 1,
2862 range: self.anchor_after(item_range.start)..self.anchor_before(item_range.end),
2863 text,
2864 highlight_ranges,
2865 name_ranges,
2866 })
2867 }
2868 Some(items)
2869 }
2870
2871 /// For each grammar in the language, runs the provided
2872 /// [tree_sitter::Query] against the given range.
2873 pub fn matches(
2874 &self,
2875 range: Range<usize>,
2876 query: fn(&Grammar) -> Option<&tree_sitter::Query>,
2877 ) -> SyntaxMapMatches {
2878 self.syntax.matches(range, self, query)
2879 }
2880
2881 /// Returns bracket range pairs overlapping or adjacent to `range`
2882 pub fn bracket_ranges<T: ToOffset>(
2883 &self,
2884 range: Range<T>,
2885 ) -> impl Iterator<Item = (Range<usize>, Range<usize>)> + '_ {
2886 // Find bracket pairs that *inclusively* contain the given range.
2887 let range = range.start.to_offset(self).saturating_sub(1)
2888 ..self.len().min(range.end.to_offset(self) + 1);
2889
2890 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
2891 grammar.brackets_config.as_ref().map(|c| &c.query)
2892 });
2893 let configs = matches
2894 .grammars()
2895 .iter()
2896 .map(|grammar| grammar.brackets_config.as_ref().unwrap())
2897 .collect::<Vec<_>>();
2898
2899 iter::from_fn(move || {
2900 while let Some(mat) = matches.peek() {
2901 let mut open = None;
2902 let mut close = None;
2903 let config = &configs[mat.grammar_index];
2904 for capture in mat.captures {
2905 if capture.index == config.open_capture_ix {
2906 open = Some(capture.node.byte_range());
2907 } else if capture.index == config.close_capture_ix {
2908 close = Some(capture.node.byte_range());
2909 }
2910 }
2911
2912 matches.advance();
2913
2914 let Some((open, close)) = open.zip(close) else {
2915 continue;
2916 };
2917
2918 let bracket_range = open.start..=close.end;
2919 if !bracket_range.overlaps(&range) {
2920 continue;
2921 }
2922
2923 return Some((open, close));
2924 }
2925 None
2926 })
2927 }
2928
2929 /// Returns enclosing bracket ranges containing the given range
2930 pub fn enclosing_bracket_ranges<T: ToOffset>(
2931 &self,
2932 range: Range<T>,
2933 ) -> impl Iterator<Item = (Range<usize>, Range<usize>)> + '_ {
2934 let range = range.start.to_offset(self)..range.end.to_offset(self);
2935
2936 self.bracket_ranges(range.clone())
2937 .filter(move |(open, close)| open.start <= range.start && close.end >= range.end)
2938 }
2939
2940 /// Returns the smallest enclosing bracket ranges containing the given range or None if no brackets contain range
2941 ///
2942 /// Can optionally pass a range_filter to filter the ranges of brackets to consider
2943 pub fn innermost_enclosing_bracket_ranges<T: ToOffset>(
2944 &self,
2945 range: Range<T>,
2946 range_filter: Option<&dyn Fn(Range<usize>, Range<usize>) -> bool>,
2947 ) -> Option<(Range<usize>, Range<usize>)> {
2948 let range = range.start.to_offset(self)..range.end.to_offset(self);
2949
2950 // Get the ranges of the innermost pair of brackets.
2951 let mut result: Option<(Range<usize>, Range<usize>)> = None;
2952
2953 for (open, close) in self.enclosing_bracket_ranges(range.clone()) {
2954 if let Some(range_filter) = range_filter {
2955 if !range_filter(open.clone(), close.clone()) {
2956 continue;
2957 }
2958 }
2959
2960 let len = close.end - open.start;
2961
2962 if let Some((existing_open, existing_close)) = &result {
2963 let existing_len = existing_close.end - existing_open.start;
2964 if len > existing_len {
2965 continue;
2966 }
2967 }
2968
2969 result = Some((open, close));
2970 }
2971
2972 result
2973 }
2974
2975 /// Returns anchor ranges for any matches of the redaction query.
2976 /// The buffer can be associated with multiple languages, and the redaction query associated with each
2977 /// will be run on the relevant section of the buffer.
2978 pub fn redacted_ranges<T: ToOffset>(
2979 &self,
2980 range: Range<T>,
2981 ) -> impl Iterator<Item = Range<usize>> + '_ {
2982 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
2983 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
2984 grammar
2985 .redactions_config
2986 .as_ref()
2987 .map(|config| &config.query)
2988 });
2989
2990 let configs = syntax_matches
2991 .grammars()
2992 .iter()
2993 .map(|grammar| grammar.redactions_config.as_ref())
2994 .collect::<Vec<_>>();
2995
2996 iter::from_fn(move || {
2997 let redacted_range = syntax_matches
2998 .peek()
2999 .and_then(|mat| {
3000 configs[mat.grammar_index].and_then(|config| {
3001 mat.captures
3002 .iter()
3003 .find(|capture| capture.index == config.redaction_capture_ix)
3004 })
3005 })
3006 .map(|mat| mat.node.byte_range());
3007 syntax_matches.advance();
3008 redacted_range
3009 })
3010 }
3011
3012 pub fn runnable_ranges(
3013 &self,
3014 range: Range<Anchor>,
3015 ) -> impl Iterator<Item = RunnableRange> + '_ {
3016 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
3017
3018 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
3019 grammar.runnable_config.as_ref().map(|config| &config.query)
3020 });
3021
3022 let test_configs = syntax_matches
3023 .grammars()
3024 .iter()
3025 .map(|grammar| grammar.runnable_config.as_ref())
3026 .collect::<Vec<_>>();
3027
3028 iter::from_fn(move || loop {
3029 let mat = syntax_matches.peek()?;
3030
3031 let test_range = test_configs[mat.grammar_index].and_then(|test_configs| {
3032 let mut run_range = None;
3033 let full_range = mat.captures.iter().fold(
3034 Range {
3035 start: usize::MAX,
3036 end: 0,
3037 },
3038 |mut acc, next| {
3039 let byte_range = next.node.byte_range();
3040 if acc.start > byte_range.start {
3041 acc.start = byte_range.start;
3042 }
3043 if acc.end < byte_range.end {
3044 acc.end = byte_range.end;
3045 }
3046 acc
3047 },
3048 );
3049 if full_range.start > full_range.end {
3050 // We did not find a full spanning range of this match.
3051 return None;
3052 }
3053 let extra_captures: SmallVec<[_; 1]> =
3054 SmallVec::from_iter(mat.captures.iter().filter_map(|capture| {
3055 test_configs
3056 .extra_captures
3057 .get(capture.index as usize)
3058 .cloned()
3059 .and_then(|tag_name| match tag_name {
3060 RunnableCapture::Named(name) => {
3061 Some((capture.node.byte_range(), name))
3062 }
3063 RunnableCapture::Run => {
3064 let _ = run_range.insert(capture.node.byte_range());
3065 None
3066 }
3067 })
3068 }));
3069 let run_range = run_range?;
3070 let tags = test_configs
3071 .query
3072 .property_settings(mat.pattern_index)
3073 .iter()
3074 .filter_map(|property| {
3075 if *property.key == *"tag" {
3076 property
3077 .value
3078 .as_ref()
3079 .map(|value| RunnableTag(value.to_string().into()))
3080 } else {
3081 None
3082 }
3083 })
3084 .collect();
3085 let extra_captures = extra_captures
3086 .into_iter()
3087 .map(|(range, name)| {
3088 (
3089 name.to_string(),
3090 self.text_for_range(range.clone()).collect::<String>(),
3091 )
3092 })
3093 .collect();
3094 // All tags should have the same range.
3095 Some(RunnableRange {
3096 run_range,
3097 full_range,
3098 runnable: Runnable {
3099 tags,
3100 language: mat.language,
3101 buffer: self.remote_id(),
3102 },
3103 extra_captures,
3104 buffer_id: self.remote_id(),
3105 })
3106 });
3107
3108 syntax_matches.advance();
3109 if test_range.is_some() {
3110 // It's fine for us to short-circuit on .peek()? returning None. We don't want to return None from this iter if we
3111 // had a capture that did not contain a run marker, hence we'll just loop around for the next capture.
3112 return test_range;
3113 }
3114 })
3115 }
3116
3117 pub fn indent_guides_in_range(
3118 &self,
3119 range: Range<Anchor>,
3120 ignore_disabled_for_language: bool,
3121 cx: &AppContext,
3122 ) -> Vec<IndentGuide> {
3123 let language_settings = language_settings(self.language(), self.file.as_ref(), cx);
3124 let settings = language_settings.indent_guides;
3125 if !ignore_disabled_for_language && !settings.enabled {
3126 return Vec::new();
3127 }
3128 let tab_size = language_settings.tab_size.get() as u32;
3129
3130 let start_row = range.start.to_point(self).row;
3131 let end_row = range.end.to_point(self).row;
3132 let row_range = start_row..end_row + 1;
3133
3134 let mut row_indents = self.line_indents_in_row_range(row_range.clone());
3135
3136 let mut result_vec = Vec::new();
3137 let mut indent_stack = SmallVec::<[IndentGuide; 8]>::new();
3138
3139 while let Some((first_row, mut line_indent)) = row_indents.next() {
3140 let current_depth = indent_stack.len() as u32;
3141
3142 // When encountering empty, continue until found useful line indent
3143 // then add to the indent stack with the depth found
3144 let mut found_indent = false;
3145 let mut last_row = first_row;
3146 if line_indent.is_line_empty() {
3147 let mut trailing_row = end_row;
3148 while !found_indent {
3149 let (target_row, new_line_indent) =
3150 if let Some(display_row) = row_indents.next() {
3151 display_row
3152 } else {
3153 // This means we reached the end of the given range and found empty lines at the end.
3154 // We need to traverse further until we find a non-empty line to know if we need to add
3155 // an indent guide for the last visible indent.
3156 trailing_row += 1;
3157
3158 const TRAILING_ROW_SEARCH_LIMIT: u32 = 25;
3159 if trailing_row > self.max_point().row
3160 || trailing_row > end_row + TRAILING_ROW_SEARCH_LIMIT
3161 {
3162 break;
3163 }
3164 let new_line_indent = self.line_indent_for_row(trailing_row);
3165 (trailing_row, new_line_indent)
3166 };
3167
3168 if new_line_indent.is_line_empty() {
3169 continue;
3170 }
3171 last_row = target_row.min(end_row);
3172 line_indent = new_line_indent;
3173 found_indent = true;
3174 break;
3175 }
3176 } else {
3177 found_indent = true
3178 }
3179
3180 let depth = if found_indent {
3181 line_indent.len(tab_size) / tab_size
3182 + ((line_indent.len(tab_size) % tab_size) > 0) as u32
3183 } else {
3184 current_depth
3185 };
3186
3187 if depth < current_depth {
3188 for _ in 0..(current_depth - depth) {
3189 let mut indent = indent_stack.pop().unwrap();
3190 if last_row != first_row {
3191 // In this case, we landed on an empty row, had to seek forward,
3192 // and discovered that the indent we where on is ending.
3193 // This means that the last display row must
3194 // be on line that ends this indent range, so we
3195 // should display the range up to the first non-empty line
3196 indent.end_row = first_row.saturating_sub(1);
3197 }
3198
3199 result_vec.push(indent)
3200 }
3201 } else if depth > current_depth {
3202 for next_depth in current_depth..depth {
3203 indent_stack.push(IndentGuide {
3204 buffer_id: self.remote_id(),
3205 start_row: first_row,
3206 end_row: last_row,
3207 depth: next_depth,
3208 tab_size,
3209 settings,
3210 });
3211 }
3212 }
3213
3214 for indent in indent_stack.iter_mut() {
3215 indent.end_row = last_row;
3216 }
3217 }
3218
3219 result_vec.extend(indent_stack);
3220
3221 result_vec
3222 }
3223
3224 pub async fn enclosing_indent(
3225 &self,
3226 mut buffer_row: BufferRow,
3227 ) -> Option<(Range<BufferRow>, LineIndent)> {
3228 let max_row = self.max_point().row;
3229 if buffer_row >= max_row {
3230 return None;
3231 }
3232
3233 let mut target_indent = self.line_indent_for_row(buffer_row);
3234
3235 // If the current row is at the start of an indented block, we want to return this
3236 // block as the enclosing indent.
3237 if !target_indent.is_line_empty() && buffer_row < max_row {
3238 let next_line_indent = self.line_indent_for_row(buffer_row + 1);
3239 if !next_line_indent.is_line_empty()
3240 && target_indent.raw_len() < next_line_indent.raw_len()
3241 {
3242 target_indent = next_line_indent;
3243 buffer_row += 1;
3244 }
3245 }
3246
3247 const SEARCH_ROW_LIMIT: u32 = 25000;
3248 const SEARCH_WHITESPACE_ROW_LIMIT: u32 = 2500;
3249 const YIELD_INTERVAL: u32 = 100;
3250
3251 let mut accessed_row_counter = 0;
3252
3253 // If there is a blank line at the current row, search for the next non indented lines
3254 if target_indent.is_line_empty() {
3255 let start = buffer_row.saturating_sub(SEARCH_WHITESPACE_ROW_LIMIT);
3256 let end = (max_row + 1).min(buffer_row + SEARCH_WHITESPACE_ROW_LIMIT);
3257
3258 let mut non_empty_line_above = None;
3259 for (row, indent) in self
3260 .text
3261 .reversed_line_indents_in_row_range(start..buffer_row)
3262 {
3263 accessed_row_counter += 1;
3264 if accessed_row_counter == YIELD_INTERVAL {
3265 accessed_row_counter = 0;
3266 yield_now().await;
3267 }
3268 if !indent.is_line_empty() {
3269 non_empty_line_above = Some((row, indent));
3270 break;
3271 }
3272 }
3273
3274 let mut non_empty_line_below = None;
3275 for (row, indent) in self.text.line_indents_in_row_range((buffer_row + 1)..end) {
3276 accessed_row_counter += 1;
3277 if accessed_row_counter == YIELD_INTERVAL {
3278 accessed_row_counter = 0;
3279 yield_now().await;
3280 }
3281 if !indent.is_line_empty() {
3282 non_empty_line_below = Some((row, indent));
3283 break;
3284 }
3285 }
3286
3287 let (row, indent) = match (non_empty_line_above, non_empty_line_below) {
3288 (Some((above_row, above_indent)), Some((below_row, below_indent))) => {
3289 if above_indent.raw_len() >= below_indent.raw_len() {
3290 (above_row, above_indent)
3291 } else {
3292 (below_row, below_indent)
3293 }
3294 }
3295 (Some(above), None) => above,
3296 (None, Some(below)) => below,
3297 _ => return None,
3298 };
3299
3300 target_indent = indent;
3301 buffer_row = row;
3302 }
3303
3304 let start = buffer_row.saturating_sub(SEARCH_ROW_LIMIT);
3305 let end = (max_row + 1).min(buffer_row + SEARCH_ROW_LIMIT);
3306
3307 let mut start_indent = None;
3308 for (row, indent) in self
3309 .text
3310 .reversed_line_indents_in_row_range(start..buffer_row)
3311 {
3312 accessed_row_counter += 1;
3313 if accessed_row_counter == YIELD_INTERVAL {
3314 accessed_row_counter = 0;
3315 yield_now().await;
3316 }
3317 if !indent.is_line_empty() && indent.raw_len() < target_indent.raw_len() {
3318 start_indent = Some((row, indent));
3319 break;
3320 }
3321 }
3322 let (start_row, start_indent_size) = start_indent?;
3323
3324 let mut end_indent = (end, None);
3325 for (row, indent) in self.text.line_indents_in_row_range((buffer_row + 1)..end) {
3326 accessed_row_counter += 1;
3327 if accessed_row_counter == YIELD_INTERVAL {
3328 accessed_row_counter = 0;
3329 yield_now().await;
3330 }
3331 if !indent.is_line_empty() && indent.raw_len() < target_indent.raw_len() {
3332 end_indent = (row.saturating_sub(1), Some(indent));
3333 break;
3334 }
3335 }
3336 let (end_row, end_indent_size) = end_indent;
3337
3338 let indent = if let Some(end_indent_size) = end_indent_size {
3339 if start_indent_size.raw_len() > end_indent_size.raw_len() {
3340 start_indent_size
3341 } else {
3342 end_indent_size
3343 }
3344 } else {
3345 start_indent_size
3346 };
3347
3348 Some((start_row..end_row, indent))
3349 }
3350
3351 /// Returns selections for remote peers intersecting the given range.
3352 #[allow(clippy::type_complexity)]
3353 pub fn selections_in_range(
3354 &self,
3355 range: Range<Anchor>,
3356 include_local: bool,
3357 ) -> impl Iterator<
3358 Item = (
3359 ReplicaId,
3360 bool,
3361 CursorShape,
3362 impl Iterator<Item = &Selection<Anchor>> + '_,
3363 ),
3364 > + '_ {
3365 self.remote_selections
3366 .iter()
3367 .filter(move |(replica_id, set)| {
3368 (include_local || **replica_id != self.text.replica_id())
3369 && !set.selections.is_empty()
3370 })
3371 .map(move |(replica_id, set)| {
3372 let start_ix = match set.selections.binary_search_by(|probe| {
3373 probe.end.cmp(&range.start, self).then(Ordering::Greater)
3374 }) {
3375 Ok(ix) | Err(ix) => ix,
3376 };
3377 let end_ix = match set.selections.binary_search_by(|probe| {
3378 probe.start.cmp(&range.end, self).then(Ordering::Less)
3379 }) {
3380 Ok(ix) | Err(ix) => ix,
3381 };
3382
3383 (
3384 *replica_id,
3385 set.line_mode,
3386 set.cursor_shape,
3387 set.selections[start_ix..end_ix].iter(),
3388 )
3389 })
3390 }
3391
3392 /// Whether the buffer contains any git changes.
3393 pub fn has_git_diff(&self) -> bool {
3394 !self.git_diff.is_empty()
3395 }
3396
3397 /// Returns all the Git diff hunks intersecting the given
3398 /// row range.
3399 pub fn git_diff_hunks_in_row_range(
3400 &self,
3401 range: Range<BufferRow>,
3402 ) -> impl '_ + Iterator<Item = git::diff::DiffHunk<u32>> {
3403 self.git_diff.hunks_in_row_range(range, self)
3404 }
3405
3406 /// Returns all the Git diff hunks intersecting the given
3407 /// range.
3408 pub fn git_diff_hunks_intersecting_range(
3409 &self,
3410 range: Range<Anchor>,
3411 ) -> impl '_ + Iterator<Item = git::diff::DiffHunk<u32>> {
3412 self.git_diff.hunks_intersecting_range(range, self)
3413 }
3414
3415 /// Returns all the Git diff hunks intersecting the given
3416 /// range, in reverse order.
3417 pub fn git_diff_hunks_intersecting_range_rev(
3418 &self,
3419 range: Range<Anchor>,
3420 ) -> impl '_ + Iterator<Item = git::diff::DiffHunk<u32>> {
3421 self.git_diff.hunks_intersecting_range_rev(range, self)
3422 }
3423
3424 /// Returns if the buffer contains any diagnostics.
3425 pub fn has_diagnostics(&self) -> bool {
3426 !self.diagnostics.is_empty()
3427 }
3428
3429 /// Returns all the diagnostics intersecting the given range.
3430 pub fn diagnostics_in_range<'a, T, O>(
3431 &'a self,
3432 search_range: Range<T>,
3433 reversed: bool,
3434 ) -> impl 'a + Iterator<Item = DiagnosticEntry<O>>
3435 where
3436 T: 'a + Clone + ToOffset,
3437 O: 'a + FromAnchor + Ord,
3438 {
3439 let mut iterators: Vec<_> = self
3440 .diagnostics
3441 .iter()
3442 .map(|(_, collection)| {
3443 collection
3444 .range::<T, O>(search_range.clone(), self, true, reversed)
3445 .peekable()
3446 })
3447 .collect();
3448
3449 std::iter::from_fn(move || {
3450 let (next_ix, _) = iterators
3451 .iter_mut()
3452 .enumerate()
3453 .flat_map(|(ix, iter)| Some((ix, iter.peek()?)))
3454 .min_by(|(_, a), (_, b)| {
3455 let cmp = a
3456 .range
3457 .start
3458 .cmp(&b.range.start)
3459 // when range is equal, sort by diagnostic severity
3460 .then(a.diagnostic.severity.cmp(&b.diagnostic.severity))
3461 // and stabilize order with group_id
3462 .then(a.diagnostic.group_id.cmp(&b.diagnostic.group_id));
3463 if reversed {
3464 cmp.reverse()
3465 } else {
3466 cmp
3467 }
3468 })?;
3469 iterators[next_ix].next()
3470 })
3471 }
3472
3473 /// Returns all the diagnostic groups associated with the given
3474 /// language server id. If no language server id is provided,
3475 /// all diagnostics groups are returned.
3476 pub fn diagnostic_groups(
3477 &self,
3478 language_server_id: Option<LanguageServerId>,
3479 ) -> Vec<(LanguageServerId, DiagnosticGroup<Anchor>)> {
3480 let mut groups = Vec::new();
3481
3482 if let Some(language_server_id) = language_server_id {
3483 if let Ok(ix) = self
3484 .diagnostics
3485 .binary_search_by_key(&language_server_id, |e| e.0)
3486 {
3487 self.diagnostics[ix]
3488 .1
3489 .groups(language_server_id, &mut groups, self);
3490 }
3491 } else {
3492 for (language_server_id, diagnostics) in self.diagnostics.iter() {
3493 diagnostics.groups(*language_server_id, &mut groups, self);
3494 }
3495 }
3496
3497 groups.sort_by(|(id_a, group_a), (id_b, group_b)| {
3498 let a_start = &group_a.entries[group_a.primary_ix].range.start;
3499 let b_start = &group_b.entries[group_b.primary_ix].range.start;
3500 a_start.cmp(b_start, self).then_with(|| id_a.cmp(id_b))
3501 });
3502
3503 groups
3504 }
3505
3506 /// Returns an iterator over the diagnostics for the given group.
3507 pub fn diagnostic_group<'a, O>(
3508 &'a self,
3509 group_id: usize,
3510 ) -> impl 'a + Iterator<Item = DiagnosticEntry<O>>
3511 where
3512 O: 'a + FromAnchor,
3513 {
3514 self.diagnostics
3515 .iter()
3516 .flat_map(move |(_, set)| set.group(group_id, self))
3517 }
3518
3519 /// An integer version number that accounts for all updates besides
3520 /// the buffer's text itself (which is versioned via a version vector).
3521 pub fn non_text_state_update_count(&self) -> usize {
3522 self.non_text_state_update_count
3523 }
3524
3525 /// Returns a snapshot of underlying file.
3526 pub fn file(&self) -> Option<&Arc<dyn File>> {
3527 self.file.as_ref()
3528 }
3529
3530 /// Resolves the file path (relative to the worktree root) associated with the underlying file.
3531 pub fn resolve_file_path(&self, cx: &AppContext, include_root: bool) -> Option<PathBuf> {
3532 if let Some(file) = self.file() {
3533 if file.path().file_name().is_none() || include_root {
3534 Some(file.full_path(cx))
3535 } else {
3536 Some(file.path().to_path_buf())
3537 }
3538 } else {
3539 None
3540 }
3541 }
3542}
3543
3544fn indent_size_for_line(text: &text::BufferSnapshot, row: u32) -> IndentSize {
3545 indent_size_for_text(text.chars_at(Point::new(row, 0)))
3546}
3547
3548fn indent_size_for_text(text: impl Iterator<Item = char>) -> IndentSize {
3549 let mut result = IndentSize::spaces(0);
3550 for c in text {
3551 let kind = match c {
3552 ' ' => IndentKind::Space,
3553 '\t' => IndentKind::Tab,
3554 _ => break,
3555 };
3556 if result.len == 0 {
3557 result.kind = kind;
3558 }
3559 result.len += 1;
3560 }
3561 result
3562}
3563
3564impl Clone for BufferSnapshot {
3565 fn clone(&self) -> Self {
3566 Self {
3567 text: self.text.clone(),
3568 git_diff: self.git_diff.clone(),
3569 syntax: self.syntax.clone(),
3570 file: self.file.clone(),
3571 remote_selections: self.remote_selections.clone(),
3572 diagnostics: self.diagnostics.clone(),
3573 language: self.language.clone(),
3574 non_text_state_update_count: self.non_text_state_update_count,
3575 }
3576 }
3577}
3578
3579impl Deref for BufferSnapshot {
3580 type Target = text::BufferSnapshot;
3581
3582 fn deref(&self) -> &Self::Target {
3583 &self.text
3584 }
3585}
3586
3587unsafe impl<'a> Send for BufferChunks<'a> {}
3588
3589impl<'a> BufferChunks<'a> {
3590 pub(crate) fn new(
3591 text: &'a Rope,
3592 range: Range<usize>,
3593 syntax: Option<(SyntaxMapCaptures<'a>, Vec<HighlightMap>)>,
3594 diagnostic_endpoints: Vec<DiagnosticEndpoint>,
3595 ) -> Self {
3596 let mut highlights = None;
3597 if let Some((captures, highlight_maps)) = syntax {
3598 highlights = Some(BufferChunkHighlights {
3599 captures,
3600 next_capture: None,
3601 stack: Default::default(),
3602 highlight_maps,
3603 })
3604 }
3605
3606 let diagnostic_endpoints = diagnostic_endpoints.into_iter().peekable();
3607 let chunks = text.chunks_in_range(range.clone());
3608
3609 BufferChunks {
3610 range,
3611 chunks,
3612 diagnostic_endpoints,
3613 error_depth: 0,
3614 warning_depth: 0,
3615 information_depth: 0,
3616 hint_depth: 0,
3617 unnecessary_depth: 0,
3618 highlights,
3619 }
3620 }
3621
3622 /// Seeks to the given byte offset in the buffer.
3623 pub fn seek(&mut self, offset: usize) {
3624 self.range.start = offset;
3625 self.chunks.seek(self.range.start);
3626 if let Some(highlights) = self.highlights.as_mut() {
3627 highlights
3628 .stack
3629 .retain(|(end_offset, _)| *end_offset > offset);
3630 if let Some(capture) = &highlights.next_capture {
3631 if offset >= capture.node.start_byte() {
3632 let next_capture_end = capture.node.end_byte();
3633 if offset < next_capture_end {
3634 highlights.stack.push((
3635 next_capture_end,
3636 highlights.highlight_maps[capture.grammar_index].get(capture.index),
3637 ));
3638 }
3639 highlights.next_capture.take();
3640 }
3641 }
3642 highlights.captures.set_byte_range(self.range.clone());
3643 }
3644 }
3645
3646 /// The current byte offset in the buffer.
3647 pub fn offset(&self) -> usize {
3648 self.range.start
3649 }
3650
3651 fn update_diagnostic_depths(&mut self, endpoint: DiagnosticEndpoint) {
3652 let depth = match endpoint.severity {
3653 DiagnosticSeverity::ERROR => &mut self.error_depth,
3654 DiagnosticSeverity::WARNING => &mut self.warning_depth,
3655 DiagnosticSeverity::INFORMATION => &mut self.information_depth,
3656 DiagnosticSeverity::HINT => &mut self.hint_depth,
3657 _ => return,
3658 };
3659 if endpoint.is_start {
3660 *depth += 1;
3661 } else {
3662 *depth -= 1;
3663 }
3664
3665 if endpoint.is_unnecessary {
3666 if endpoint.is_start {
3667 self.unnecessary_depth += 1;
3668 } else {
3669 self.unnecessary_depth -= 1;
3670 }
3671 }
3672 }
3673
3674 fn current_diagnostic_severity(&self) -> Option<DiagnosticSeverity> {
3675 if self.error_depth > 0 {
3676 Some(DiagnosticSeverity::ERROR)
3677 } else if self.warning_depth > 0 {
3678 Some(DiagnosticSeverity::WARNING)
3679 } else if self.information_depth > 0 {
3680 Some(DiagnosticSeverity::INFORMATION)
3681 } else if self.hint_depth > 0 {
3682 Some(DiagnosticSeverity::HINT)
3683 } else {
3684 None
3685 }
3686 }
3687
3688 fn current_code_is_unnecessary(&self) -> bool {
3689 self.unnecessary_depth > 0
3690 }
3691}
3692
3693impl<'a> Iterator for BufferChunks<'a> {
3694 type Item = Chunk<'a>;
3695
3696 fn next(&mut self) -> Option<Self::Item> {
3697 let mut next_capture_start = usize::MAX;
3698 let mut next_diagnostic_endpoint = usize::MAX;
3699
3700 if let Some(highlights) = self.highlights.as_mut() {
3701 while let Some((parent_capture_end, _)) = highlights.stack.last() {
3702 if *parent_capture_end <= self.range.start {
3703 highlights.stack.pop();
3704 } else {
3705 break;
3706 }
3707 }
3708
3709 if highlights.next_capture.is_none() {
3710 highlights.next_capture = highlights.captures.next();
3711 }
3712
3713 while let Some(capture) = highlights.next_capture.as_ref() {
3714 if self.range.start < capture.node.start_byte() {
3715 next_capture_start = capture.node.start_byte();
3716 break;
3717 } else {
3718 let highlight_id =
3719 highlights.highlight_maps[capture.grammar_index].get(capture.index);
3720 highlights
3721 .stack
3722 .push((capture.node.end_byte(), highlight_id));
3723 highlights.next_capture = highlights.captures.next();
3724 }
3725 }
3726 }
3727
3728 while let Some(endpoint) = self.diagnostic_endpoints.peek().copied() {
3729 if endpoint.offset <= self.range.start {
3730 self.update_diagnostic_depths(endpoint);
3731 self.diagnostic_endpoints.next();
3732 } else {
3733 next_diagnostic_endpoint = endpoint.offset;
3734 break;
3735 }
3736 }
3737
3738 if let Some(chunk) = self.chunks.peek() {
3739 let chunk_start = self.range.start;
3740 let mut chunk_end = (self.chunks.offset() + chunk.len())
3741 .min(next_capture_start)
3742 .min(next_diagnostic_endpoint);
3743 let mut highlight_id = None;
3744 if let Some(highlights) = self.highlights.as_ref() {
3745 if let Some((parent_capture_end, parent_highlight_id)) = highlights.stack.last() {
3746 chunk_end = chunk_end.min(*parent_capture_end);
3747 highlight_id = Some(*parent_highlight_id);
3748 }
3749 }
3750
3751 let slice =
3752 &chunk[chunk_start - self.chunks.offset()..chunk_end - self.chunks.offset()];
3753 self.range.start = chunk_end;
3754 if self.range.start == self.chunks.offset() + chunk.len() {
3755 self.chunks.next().unwrap();
3756 }
3757
3758 Some(Chunk {
3759 text: slice,
3760 syntax_highlight_id: highlight_id,
3761 diagnostic_severity: self.current_diagnostic_severity(),
3762 is_unnecessary: self.current_code_is_unnecessary(),
3763 ..Default::default()
3764 })
3765 } else {
3766 None
3767 }
3768 }
3769}
3770
3771impl operation_queue::Operation for Operation {
3772 fn lamport_timestamp(&self) -> clock::Lamport {
3773 match self {
3774 Operation::Buffer(_) => {
3775 unreachable!("buffer operations should never be deferred at this layer")
3776 }
3777 Operation::UpdateDiagnostics {
3778 lamport_timestamp, ..
3779 }
3780 | Operation::UpdateSelections {
3781 lamport_timestamp, ..
3782 }
3783 | Operation::UpdateCompletionTriggers {
3784 lamport_timestamp, ..
3785 } => *lamport_timestamp,
3786 }
3787 }
3788}
3789
3790impl Default for Diagnostic {
3791 fn default() -> Self {
3792 Self {
3793 source: Default::default(),
3794 code: None,
3795 severity: DiagnosticSeverity::ERROR,
3796 message: Default::default(),
3797 group_id: 0,
3798 is_primary: false,
3799 is_disk_based: false,
3800 is_unnecessary: false,
3801 }
3802 }
3803}
3804
3805impl IndentSize {
3806 /// Returns an [IndentSize] representing the given spaces.
3807 pub fn spaces(len: u32) -> Self {
3808 Self {
3809 len,
3810 kind: IndentKind::Space,
3811 }
3812 }
3813
3814 /// Returns an [IndentSize] representing a tab.
3815 pub fn tab() -> Self {
3816 Self {
3817 len: 1,
3818 kind: IndentKind::Tab,
3819 }
3820 }
3821
3822 /// An iterator over the characters represented by this [IndentSize].
3823 pub fn chars(&self) -> impl Iterator<Item = char> {
3824 iter::repeat(self.char()).take(self.len as usize)
3825 }
3826
3827 /// The character representation of this [IndentSize].
3828 pub fn char(&self) -> char {
3829 match self.kind {
3830 IndentKind::Space => ' ',
3831 IndentKind::Tab => '\t',
3832 }
3833 }
3834
3835 /// Consumes the current [IndentSize] and returns a new one that has
3836 /// been shrunk or enlarged by the given size along the given direction.
3837 pub fn with_delta(mut self, direction: Ordering, size: IndentSize) -> Self {
3838 match direction {
3839 Ordering::Less => {
3840 if self.kind == size.kind && self.len >= size.len {
3841 self.len -= size.len;
3842 }
3843 }
3844 Ordering::Equal => {}
3845 Ordering::Greater => {
3846 if self.len == 0 {
3847 self = size;
3848 } else if self.kind == size.kind {
3849 self.len += size.len;
3850 }
3851 }
3852 }
3853 self
3854 }
3855}
3856
3857#[cfg(any(test, feature = "test-support"))]
3858pub struct TestFile {
3859 pub path: Arc<Path>,
3860 pub root_name: String,
3861}
3862
3863#[cfg(any(test, feature = "test-support"))]
3864impl File for TestFile {
3865 fn path(&self) -> &Arc<Path> {
3866 &self.path
3867 }
3868
3869 fn full_path(&self, _: &gpui::AppContext) -> PathBuf {
3870 PathBuf::from(&self.root_name).join(self.path.as_ref())
3871 }
3872
3873 fn as_local(&self) -> Option<&dyn LocalFile> {
3874 None
3875 }
3876
3877 fn mtime(&self) -> Option<SystemTime> {
3878 unimplemented!()
3879 }
3880
3881 fn file_name<'a>(&'a self, _: &'a gpui::AppContext) -> &'a std::ffi::OsStr {
3882 self.path().file_name().unwrap_or(self.root_name.as_ref())
3883 }
3884
3885 fn worktree_id(&self) -> usize {
3886 0
3887 }
3888
3889 fn is_deleted(&self) -> bool {
3890 unimplemented!()
3891 }
3892
3893 fn as_any(&self) -> &dyn std::any::Any {
3894 unimplemented!()
3895 }
3896
3897 fn to_proto(&self) -> rpc::proto::File {
3898 unimplemented!()
3899 }
3900
3901 fn is_private(&self) -> bool {
3902 false
3903 }
3904}
3905
3906pub(crate) fn contiguous_ranges(
3907 values: impl Iterator<Item = u32>,
3908 max_len: usize,
3909) -> impl Iterator<Item = Range<u32>> {
3910 let mut values = values;
3911 let mut current_range: Option<Range<u32>> = None;
3912 std::iter::from_fn(move || loop {
3913 if let Some(value) = values.next() {
3914 if let Some(range) = &mut current_range {
3915 if value == range.end && range.len() < max_len {
3916 range.end += 1;
3917 continue;
3918 }
3919 }
3920
3921 let prev_range = current_range.clone();
3922 current_range = Some(value..(value + 1));
3923 if prev_range.is_some() {
3924 return prev_range;
3925 }
3926 } else {
3927 return current_range.take();
3928 }
3929 })
3930}
3931
3932/// Returns the [CharKind] for the given character. When a scope is provided,
3933/// the function checks if the character is considered a word character
3934/// based on the language scope's word character settings.
3935pub fn char_kind(scope: &Option<LanguageScope>, c: char) -> CharKind {
3936 if c.is_whitespace() {
3937 return CharKind::Whitespace;
3938 } else if c.is_alphanumeric() || c == '_' {
3939 return CharKind::Word;
3940 }
3941
3942 if let Some(scope) = scope {
3943 if let Some(characters) = scope.word_characters() {
3944 if characters.contains(&c) {
3945 return CharKind::Word;
3946 }
3947 }
3948 }
3949
3950 CharKind::Punctuation
3951}
3952
3953/// Find all of the ranges of whitespace that occur at the ends of lines
3954/// in the given rope.
3955///
3956/// This could also be done with a regex search, but this implementation
3957/// avoids copying text.
3958pub fn trailing_whitespace_ranges(rope: &Rope) -> Vec<Range<usize>> {
3959 let mut ranges = Vec::new();
3960
3961 let mut offset = 0;
3962 let mut prev_chunk_trailing_whitespace_range = 0..0;
3963 for chunk in rope.chunks() {
3964 let mut prev_line_trailing_whitespace_range = 0..0;
3965 for (i, line) in chunk.split('\n').enumerate() {
3966 let line_end_offset = offset + line.len();
3967 let trimmed_line_len = line.trim_end_matches(|c| matches!(c, ' ' | '\t')).len();
3968 let mut trailing_whitespace_range = (offset + trimmed_line_len)..line_end_offset;
3969
3970 if i == 0 && trimmed_line_len == 0 {
3971 trailing_whitespace_range.start = prev_chunk_trailing_whitespace_range.start;
3972 }
3973 if !prev_line_trailing_whitespace_range.is_empty() {
3974 ranges.push(prev_line_trailing_whitespace_range);
3975 }
3976
3977 offset = line_end_offset + 1;
3978 prev_line_trailing_whitespace_range = trailing_whitespace_range;
3979 }
3980
3981 offset -= 1;
3982 prev_chunk_trailing_whitespace_range = prev_line_trailing_whitespace_range;
3983 }
3984
3985 if !prev_chunk_trailing_whitespace_range.is_empty() {
3986 ranges.push(prev_chunk_trailing_whitespace_range);
3987 }
3988
3989 ranges
3990}