1pub use crate::{
2 diagnostic_set::DiagnosticSet,
3 highlight_map::{HighlightId, HighlightMap},
4 markdown::ParsedMarkdown,
5 proto, Grammar, Language, LanguageRegistry,
6};
7use crate::{
8 diagnostic_set::{DiagnosticEntry, DiagnosticGroup},
9 language_settings::{language_settings, LanguageSettings},
10 markdown::parse_markdown,
11 outline::OutlineItem,
12 syntax_map::{
13 SyntaxLayer, SyntaxMap, SyntaxMapCapture, SyntaxMapCaptures, SyntaxMapMatches,
14 SyntaxSnapshot, ToTreeSitterPoint,
15 },
16 task_context::RunnableRange,
17 LanguageScope, Outline, RunnableTag,
18};
19use anyhow::{anyhow, Context, Result};
20pub use clock::ReplicaId;
21use futures::channel::oneshot;
22use gpui::{AppContext, EventEmitter, HighlightStyle, ModelContext, Task, TaskLabel};
23use lazy_static::lazy_static;
24use lsp::LanguageServerId;
25use parking_lot::Mutex;
26use similar::{ChangeTag, TextDiff};
27use smallvec::SmallVec;
28use smol::future::yield_now;
29use std::{
30 any::Any,
31 cmp::{self, Ordering},
32 collections::BTreeMap,
33 ffi::OsStr,
34 future::Future,
35 iter::{self, Iterator, Peekable},
36 mem,
37 ops::{Deref, Range},
38 path::{Path, PathBuf},
39 str,
40 sync::Arc,
41 time::{Duration, Instant, SystemTime},
42 vec,
43};
44use sum_tree::TreeMap;
45use text::operation_queue::OperationQueue;
46use text::*;
47pub use text::{
48 Anchor, Bias, Buffer as TextBuffer, BufferId, BufferSnapshot as TextBufferSnapshot, Edit,
49 OffsetRangeExt, OffsetUtf16, Patch, Point, PointUtf16, Rope, Selection, SelectionGoal,
50 Subscription, TextDimension, TextSummary, ToOffset, ToOffsetUtf16, ToPoint, ToPointUtf16,
51 Transaction, TransactionId, Unclipped,
52};
53use theme::SyntaxTheme;
54#[cfg(any(test, feature = "test-support"))]
55use util::RandomCharIter;
56use util::RangeExt;
57
58#[cfg(any(test, feature = "test-support"))]
59pub use {tree_sitter_rust, tree_sitter_typescript};
60
61pub use lsp::DiagnosticSeverity;
62
63lazy_static! {
64 /// A label for the background task spawned by the buffer to compute
65 /// a diff against the contents of its file.
66 pub static ref BUFFER_DIFF_TASK: TaskLabel = TaskLabel::new();
67}
68
69/// Indicate whether a [Buffer] has permissions to edit.
70#[derive(PartialEq, Clone, Copy, Debug)]
71pub enum Capability {
72 /// The buffer is a mutable replica.
73 ReadWrite,
74 /// The buffer is a read-only replica.
75 ReadOnly,
76}
77
78pub type BufferRow = u32;
79
80/// An in-memory representation of a source code file, including its text,
81/// syntax trees, git status, and diagnostics.
82pub struct Buffer {
83 text: TextBuffer,
84 diff_base: Option<Rope>,
85 git_diff: git::diff::BufferDiff,
86 file: Option<Arc<dyn File>>,
87 /// The mtime of the file when this buffer was last loaded from
88 /// or saved to disk.
89 saved_mtime: Option<SystemTime>,
90 /// The version vector when this buffer was last loaded from
91 /// or saved to disk.
92 saved_version: clock::Global,
93 transaction_depth: usize,
94 was_dirty_before_starting_transaction: Option<bool>,
95 reload_task: Option<Task<Result<()>>>,
96 language: Option<Arc<Language>>,
97 autoindent_requests: Vec<Arc<AutoindentRequest>>,
98 pending_autoindent: Option<Task<()>>,
99 sync_parse_timeout: Duration,
100 syntax_map: Mutex<SyntaxMap>,
101 parsing_in_background: bool,
102 parse_count: usize,
103 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
104 remote_selections: TreeMap<ReplicaId, SelectionSet>,
105 selections_update_count: usize,
106 diagnostics_update_count: usize,
107 diagnostics_timestamp: clock::Lamport,
108 file_update_count: usize,
109 git_diff_update_count: usize,
110 completion_triggers: Vec<String>,
111 completion_triggers_timestamp: clock::Lamport,
112 deferred_ops: OperationQueue<Operation>,
113 capability: Capability,
114 has_conflict: bool,
115 diff_base_version: usize,
116}
117
118/// An immutable, cheaply cloneable representation of a fixed
119/// state of a buffer.
120pub struct BufferSnapshot {
121 text: text::BufferSnapshot,
122 git_diff: git::diff::BufferDiff,
123 pub(crate) syntax: SyntaxSnapshot,
124 file: Option<Arc<dyn File>>,
125 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
126 diagnostics_update_count: usize,
127 file_update_count: usize,
128 git_diff_update_count: usize,
129 remote_selections: TreeMap<ReplicaId, SelectionSet>,
130 selections_update_count: usize,
131 language: Option<Arc<Language>>,
132 parse_count: usize,
133}
134
135/// The kind and amount of indentation in a particular line. For now,
136/// assumes that indentation is all the same character.
137#[derive(Clone, Copy, Debug, PartialEq, Eq, Default)]
138pub struct IndentSize {
139 /// The number of bytes that comprise the indentation.
140 pub len: u32,
141 /// The kind of whitespace used for indentation.
142 pub kind: IndentKind,
143}
144
145/// A whitespace character that's used for indentation.
146#[derive(Clone, Copy, Debug, PartialEq, Eq, Default)]
147pub enum IndentKind {
148 /// An ASCII space character.
149 #[default]
150 Space,
151 /// An ASCII tab character.
152 Tab,
153}
154
155/// The shape of a selection cursor.
156#[derive(Copy, Clone, PartialEq, Eq, Debug, Default)]
157pub enum CursorShape {
158 /// A vertical bar
159 #[default]
160 Bar,
161 /// A block that surrounds the following character
162 Block,
163 /// An underline that runs along the following character
164 Underscore,
165 /// A box drawn around the following character
166 Hollow,
167}
168
169#[derive(Clone, Debug)]
170struct SelectionSet {
171 line_mode: bool,
172 cursor_shape: CursorShape,
173 selections: Arc<[Selection<Anchor>]>,
174 lamport_timestamp: clock::Lamport,
175}
176
177/// A diagnostic associated with a certain range of a buffer.
178#[derive(Clone, Debug, PartialEq, Eq)]
179pub struct Diagnostic {
180 /// The name of the service that produced this diagnostic.
181 pub source: Option<String>,
182 /// A machine-readable code that identifies this diagnostic.
183 pub code: Option<String>,
184 /// Whether this diagnostic is a hint, warning, or error.
185 pub severity: DiagnosticSeverity,
186 /// The human-readable message associated with this diagnostic.
187 pub message: String,
188 /// An id that identifies the group to which this diagnostic belongs.
189 ///
190 /// When a language server produces a diagnostic with
191 /// one or more associated diagnostics, those diagnostics are all
192 /// assigned a single group id.
193 pub group_id: usize,
194 /// Whether this diagnostic is the primary diagnostic for its group.
195 ///
196 /// In a given group, the primary diagnostic is the top-level diagnostic
197 /// returned by the language server. The non-primary diagnostics are the
198 /// associated diagnostics.
199 pub is_primary: bool,
200 /// Whether this diagnostic is considered to originate from an analysis of
201 /// files on disk, as opposed to any unsaved buffer contents. This is a
202 /// property of a given diagnostic source, and is configured for a given
203 /// language server via the [`LspAdapter::disk_based_diagnostic_sources`](crate::LspAdapter::disk_based_diagnostic_sources) method
204 /// for the language server.
205 pub is_disk_based: bool,
206 /// Whether this diagnostic marks unnecessary code.
207 pub is_unnecessary: bool,
208}
209
210/// TODO - move this into the `project` crate and make it private.
211pub async fn prepare_completion_documentation(
212 documentation: &lsp::Documentation,
213 language_registry: &Arc<LanguageRegistry>,
214 language: Option<Arc<Language>>,
215) -> Documentation {
216 match documentation {
217 lsp::Documentation::String(text) => {
218 if text.lines().count() <= 1 {
219 Documentation::SingleLine(text.clone())
220 } else {
221 Documentation::MultiLinePlainText(text.clone())
222 }
223 }
224
225 lsp::Documentation::MarkupContent(lsp::MarkupContent { kind, value }) => match kind {
226 lsp::MarkupKind::PlainText => {
227 if value.lines().count() <= 1 {
228 Documentation::SingleLine(value.clone())
229 } else {
230 Documentation::MultiLinePlainText(value.clone())
231 }
232 }
233
234 lsp::MarkupKind::Markdown => {
235 let parsed = parse_markdown(value, language_registry, language).await;
236 Documentation::MultiLineMarkdown(parsed)
237 }
238 },
239 }
240}
241
242/// Documentation associated with a [`Completion`].
243#[derive(Clone, Debug)]
244pub enum Documentation {
245 /// There is no documentation for this completion.
246 Undocumented,
247 /// A single line of documentation.
248 SingleLine(String),
249 /// Multiple lines of plain text documentation.
250 MultiLinePlainText(String),
251 /// Markdown documentation.
252 MultiLineMarkdown(ParsedMarkdown),
253}
254
255/// An operation used to synchronize this buffer with its other replicas.
256#[derive(Clone, Debug, PartialEq)]
257pub enum Operation {
258 /// A text operation.
259 Buffer(text::Operation),
260
261 /// An update to the buffer's diagnostics.
262 UpdateDiagnostics {
263 /// The id of the language server that produced the new diagnostics.
264 server_id: LanguageServerId,
265 /// The diagnostics.
266 diagnostics: Arc<[DiagnosticEntry<Anchor>]>,
267 /// The buffer's lamport timestamp.
268 lamport_timestamp: clock::Lamport,
269 },
270
271 /// An update to the most recent selections in this buffer.
272 UpdateSelections {
273 /// The selections.
274 selections: Arc<[Selection<Anchor>]>,
275 /// The buffer's lamport timestamp.
276 lamport_timestamp: clock::Lamport,
277 /// Whether the selections are in 'line mode'.
278 line_mode: bool,
279 /// The [`CursorShape`] associated with these selections.
280 cursor_shape: CursorShape,
281 },
282
283 /// An update to the characters that should trigger autocompletion
284 /// for this buffer.
285 UpdateCompletionTriggers {
286 /// The characters that trigger autocompletion.
287 triggers: Vec<String>,
288 /// The buffer's lamport timestamp.
289 lamport_timestamp: clock::Lamport,
290 },
291}
292
293/// An event that occurs in a buffer.
294#[derive(Clone, Debug, PartialEq)]
295pub enum Event {
296 /// The buffer was changed in a way that must be
297 /// propagated to its other replicas.
298 Operation(Operation),
299 /// The buffer was edited.
300 Edited,
301 /// The buffer's `dirty` bit changed.
302 DirtyChanged,
303 /// The buffer was saved.
304 Saved,
305 /// The buffer's file was changed on disk.
306 FileHandleChanged,
307 /// The buffer was reloaded.
308 Reloaded,
309 /// The buffer's diff_base changed.
310 DiffBaseChanged,
311 /// Buffer's excerpts for a certain diff base were recalculated.
312 DiffUpdated,
313 /// The buffer's language was changed.
314 LanguageChanged,
315 /// The buffer's syntax trees were updated.
316 Reparsed,
317 /// The buffer's diagnostics were updated.
318 DiagnosticsUpdated,
319 /// The buffer gained or lost editing capabilities.
320 CapabilityChanged,
321 /// The buffer was explicitly requested to close.
322 Closed,
323}
324
325/// The file associated with a buffer.
326pub trait File: Send + Sync {
327 /// Returns the [`LocalFile`] associated with this file, if the
328 /// file is local.
329 fn as_local(&self) -> Option<&dyn LocalFile>;
330
331 /// Returns whether this file is local.
332 fn is_local(&self) -> bool {
333 self.as_local().is_some()
334 }
335
336 /// Returns the file's mtime.
337 fn mtime(&self) -> Option<SystemTime>;
338
339 /// Returns the path of this file relative to the worktree's root directory.
340 fn path(&self) -> &Arc<Path>;
341
342 /// Returns the path of this file relative to the worktree's parent directory (this means it
343 /// includes the name of the worktree's root folder).
344 fn full_path(&self, cx: &AppContext) -> PathBuf;
345
346 /// Returns the last component of this handle's absolute path. If this handle refers to the root
347 /// of its worktree, then this method will return the name of the worktree itself.
348 fn file_name<'a>(&'a self, cx: &'a AppContext) -> &'a OsStr;
349
350 /// Returns the id of the worktree to which this file belongs.
351 ///
352 /// This is needed for looking up project-specific settings.
353 fn worktree_id(&self) -> usize;
354
355 /// Returns whether the file has been deleted.
356 fn is_deleted(&self) -> bool;
357
358 /// Returns whether the file existed on disk at one point
359 fn is_created(&self) -> bool {
360 self.mtime().is_some()
361 }
362
363 /// Converts this file into an [`Any`] trait object.
364 fn as_any(&self) -> &dyn Any;
365
366 /// Converts this file into a protobuf message.
367 fn to_proto(&self) -> rpc::proto::File;
368
369 /// Return whether Zed considers this to be a private file.
370 fn is_private(&self) -> bool;
371}
372
373/// The file associated with a buffer, in the case where the file is on the local disk.
374pub trait LocalFile: File {
375 /// Returns the absolute path of this file.
376 fn abs_path(&self, cx: &AppContext) -> PathBuf;
377
378 /// Loads the file's contents from disk.
379 fn load(&self, cx: &AppContext) -> Task<Result<String>>;
380
381 /// Called when the buffer is reloaded from disk.
382 fn buffer_reloaded(
383 &self,
384 buffer_id: BufferId,
385 version: &clock::Global,
386 line_ending: LineEnding,
387 mtime: Option<SystemTime>,
388 cx: &mut AppContext,
389 );
390
391 /// Returns true if the file should not be shared with collaborators.
392 fn is_private(&self, _: &AppContext) -> bool {
393 false
394 }
395}
396
397/// The auto-indent behavior associated with an editing operation.
398/// For some editing operations, each affected line of text has its
399/// indentation recomputed. For other operations, the entire block
400/// of edited text is adjusted uniformly.
401#[derive(Clone, Debug)]
402pub enum AutoindentMode {
403 /// Indent each line of inserted text.
404 EachLine,
405 /// Apply the same indentation adjustment to all of the lines
406 /// in a given insertion.
407 Block {
408 /// The original indentation level of the first line of each
409 /// insertion, if it has been copied.
410 original_indent_columns: Vec<u32>,
411 },
412}
413
414#[derive(Clone)]
415struct AutoindentRequest {
416 before_edit: BufferSnapshot,
417 entries: Vec<AutoindentRequestEntry>,
418 is_block_mode: bool,
419}
420
421#[derive(Clone)]
422struct AutoindentRequestEntry {
423 /// A range of the buffer whose indentation should be adjusted.
424 range: Range<Anchor>,
425 /// Whether or not these lines should be considered brand new, for the
426 /// purpose of auto-indent. When text is not new, its indentation will
427 /// only be adjusted if the suggested indentation level has *changed*
428 /// since the edit was made.
429 first_line_is_new: bool,
430 indent_size: IndentSize,
431 original_indent_column: Option<u32>,
432}
433
434#[derive(Debug)]
435struct IndentSuggestion {
436 basis_row: u32,
437 delta: Ordering,
438 within_error: bool,
439}
440
441struct BufferChunkHighlights<'a> {
442 captures: SyntaxMapCaptures<'a>,
443 next_capture: Option<SyntaxMapCapture<'a>>,
444 stack: Vec<(usize, HighlightId)>,
445 highlight_maps: Vec<HighlightMap>,
446}
447
448/// An iterator that yields chunks of a buffer's text, along with their
449/// syntax highlights and diagnostic status.
450pub struct BufferChunks<'a> {
451 range: Range<usize>,
452 chunks: text::Chunks<'a>,
453 diagnostic_endpoints: Peekable<vec::IntoIter<DiagnosticEndpoint>>,
454 error_depth: usize,
455 warning_depth: usize,
456 information_depth: usize,
457 hint_depth: usize,
458 unnecessary_depth: usize,
459 highlights: Option<BufferChunkHighlights<'a>>,
460}
461
462/// A chunk of a buffer's text, along with its syntax highlight and
463/// diagnostic status.
464#[derive(Clone, Copy, Debug, Default)]
465pub struct Chunk<'a> {
466 /// The text of the chunk.
467 pub text: &'a str,
468 /// The syntax highlighting style of the chunk.
469 pub syntax_highlight_id: Option<HighlightId>,
470 /// The highlight style that has been applied to this chunk in
471 /// the editor.
472 pub highlight_style: Option<HighlightStyle>,
473 /// The severity of diagnostic associated with this chunk, if any.
474 pub diagnostic_severity: Option<DiagnosticSeverity>,
475 /// Whether this chunk of text is marked as unnecessary.
476 pub is_unnecessary: bool,
477 /// Whether this chunk of text was originally a tab character.
478 pub is_tab: bool,
479}
480
481/// A set of edits to a given version of a buffer, computed asynchronously.
482#[derive(Debug)]
483pub struct Diff {
484 pub(crate) base_version: clock::Global,
485 line_ending: LineEnding,
486 edits: Vec<(Range<usize>, Arc<str>)>,
487}
488
489#[derive(Clone, Copy)]
490pub(crate) struct DiagnosticEndpoint {
491 offset: usize,
492 is_start: bool,
493 severity: DiagnosticSeverity,
494 is_unnecessary: bool,
495}
496
497/// A class of characters, used for characterizing a run of text.
498#[derive(Copy, Clone, Eq, PartialEq, PartialOrd, Ord, Debug)]
499pub enum CharKind {
500 /// Whitespace.
501 Whitespace,
502 /// Punctuation.
503 Punctuation,
504 /// Word.
505 Word,
506}
507
508/// A runnable is a set of data about a region that could be resolved into a task
509pub struct Runnable {
510 pub tags: SmallVec<[RunnableTag; 1]>,
511 pub language: Arc<Language>,
512 pub buffer: BufferId,
513}
514
515impl Buffer {
516 /// Create a new buffer with the given base text.
517 pub fn local<T: Into<String>>(base_text: T, cx: &mut ModelContext<Self>) -> Self {
518 Self::build(
519 TextBuffer::new(0, cx.entity_id().as_non_zero_u64().into(), base_text.into()),
520 None,
521 None,
522 Capability::ReadWrite,
523 )
524 }
525
526 /// Create a new buffer with the given base text that has proper line endings and other normalization applied.
527 pub fn local_normalized(
528 base_text_normalized: Rope,
529 line_ending: LineEnding,
530 cx: &mut ModelContext<Self>,
531 ) -> Self {
532 Self::build(
533 TextBuffer::new_normalized(
534 0,
535 cx.entity_id().as_non_zero_u64().into(),
536 line_ending,
537 base_text_normalized,
538 ),
539 None,
540 None,
541 Capability::ReadWrite,
542 )
543 }
544
545 /// Create a new buffer that is a replica of a remote buffer.
546 pub fn remote(
547 remote_id: BufferId,
548 replica_id: ReplicaId,
549 capability: Capability,
550 base_text: impl Into<String>,
551 ) -> Self {
552 Self::build(
553 TextBuffer::new(replica_id, remote_id, base_text.into()),
554 None,
555 None,
556 capability,
557 )
558 }
559
560 /// Create a new buffer that is a replica of a remote buffer, populating its
561 /// state from the given protobuf message.
562 pub fn from_proto(
563 replica_id: ReplicaId,
564 capability: Capability,
565 message: proto::BufferState,
566 file: Option<Arc<dyn File>>,
567 ) -> Result<Self> {
568 let buffer_id = BufferId::new(message.id)
569 .with_context(|| anyhow!("Could not deserialize buffer_id"))?;
570 let buffer = TextBuffer::new(replica_id, buffer_id, message.base_text);
571 let mut this = Self::build(buffer, message.diff_base, file, capability);
572 this.text.set_line_ending(proto::deserialize_line_ending(
573 rpc::proto::LineEnding::from_i32(message.line_ending)
574 .ok_or_else(|| anyhow!("missing line_ending"))?,
575 ));
576 this.saved_version = proto::deserialize_version(&message.saved_version);
577 this.saved_mtime = message.saved_mtime.map(|time| time.into());
578 Ok(this)
579 }
580
581 /// Serialize the buffer's state to a protobuf message.
582 pub fn to_proto(&self) -> proto::BufferState {
583 proto::BufferState {
584 id: self.remote_id().into(),
585 file: self.file.as_ref().map(|f| f.to_proto()),
586 base_text: self.base_text().to_string(),
587 diff_base: self.diff_base.as_ref().map(|h| h.to_string()),
588 line_ending: proto::serialize_line_ending(self.line_ending()) as i32,
589 saved_version: proto::serialize_version(&self.saved_version),
590 saved_mtime: self.saved_mtime.map(|time| time.into()),
591 }
592 }
593
594 /// Serialize as protobufs all of the changes to the buffer since the given version.
595 pub fn serialize_ops(
596 &self,
597 since: Option<clock::Global>,
598 cx: &AppContext,
599 ) -> Task<Vec<proto::Operation>> {
600 let mut operations = Vec::new();
601 operations.extend(self.deferred_ops.iter().map(proto::serialize_operation));
602
603 operations.extend(self.remote_selections.iter().map(|(_, set)| {
604 proto::serialize_operation(&Operation::UpdateSelections {
605 selections: set.selections.clone(),
606 lamport_timestamp: set.lamport_timestamp,
607 line_mode: set.line_mode,
608 cursor_shape: set.cursor_shape,
609 })
610 }));
611
612 for (server_id, diagnostics) in &self.diagnostics {
613 operations.push(proto::serialize_operation(&Operation::UpdateDiagnostics {
614 lamport_timestamp: self.diagnostics_timestamp,
615 server_id: *server_id,
616 diagnostics: diagnostics.iter().cloned().collect(),
617 }));
618 }
619
620 operations.push(proto::serialize_operation(
621 &Operation::UpdateCompletionTriggers {
622 triggers: self.completion_triggers.clone(),
623 lamport_timestamp: self.completion_triggers_timestamp,
624 },
625 ));
626
627 let text_operations = self.text.operations().clone();
628 cx.background_executor().spawn(async move {
629 let since = since.unwrap_or_default();
630 operations.extend(
631 text_operations
632 .iter()
633 .filter(|(_, op)| !since.observed(op.timestamp()))
634 .map(|(_, op)| proto::serialize_operation(&Operation::Buffer(op.clone()))),
635 );
636 operations.sort_unstable_by_key(proto::lamport_timestamp_for_operation);
637 operations
638 })
639 }
640
641 /// Assign a language to the buffer, returning the buffer.
642 pub fn with_language(mut self, language: Arc<Language>, cx: &mut ModelContext<Self>) -> Self {
643 self.set_language(Some(language), cx);
644 self
645 }
646
647 /// Returns the [Capability] of this buffer.
648 pub fn capability(&self) -> Capability {
649 self.capability
650 }
651
652 /// Whether this buffer can only be read.
653 pub fn read_only(&self) -> bool {
654 self.capability == Capability::ReadOnly
655 }
656
657 /// Builds a [Buffer] with the given underlying [TextBuffer], diff base, [File] and [Capability].
658 pub fn build(
659 buffer: TextBuffer,
660 diff_base: Option<String>,
661 file: Option<Arc<dyn File>>,
662 capability: Capability,
663 ) -> Self {
664 let saved_mtime = file.as_ref().and_then(|file| file.mtime());
665
666 Self {
667 saved_mtime,
668 saved_version: buffer.version(),
669 reload_task: None,
670 transaction_depth: 0,
671 was_dirty_before_starting_transaction: None,
672 text: buffer,
673 diff_base: diff_base
674 .map(|mut raw_diff_base| {
675 LineEnding::normalize(&mut raw_diff_base);
676 raw_diff_base
677 })
678 .map(Rope::from),
679 diff_base_version: 0,
680 git_diff: git::diff::BufferDiff::new(),
681 file,
682 capability,
683 syntax_map: Mutex::new(SyntaxMap::new()),
684 parsing_in_background: false,
685 parse_count: 0,
686 sync_parse_timeout: Duration::from_millis(1),
687 autoindent_requests: Default::default(),
688 pending_autoindent: Default::default(),
689 language: None,
690 remote_selections: Default::default(),
691 selections_update_count: 0,
692 diagnostics: Default::default(),
693 diagnostics_update_count: 0,
694 diagnostics_timestamp: Default::default(),
695 file_update_count: 0,
696 git_diff_update_count: 0,
697 completion_triggers: Default::default(),
698 completion_triggers_timestamp: Default::default(),
699 deferred_ops: OperationQueue::new(),
700 has_conflict: false,
701 }
702 }
703
704 /// Retrieve a snapshot of the buffer's current state. This is computationally
705 /// cheap, and allows reading from the buffer on a background thread.
706 pub fn snapshot(&self) -> BufferSnapshot {
707 let text = self.text.snapshot();
708 let mut syntax_map = self.syntax_map.lock();
709 syntax_map.interpolate(&text);
710 let syntax = syntax_map.snapshot();
711
712 BufferSnapshot {
713 text,
714 syntax,
715 git_diff: self.git_diff.clone(),
716 file: self.file.clone(),
717 remote_selections: self.remote_selections.clone(),
718 diagnostics: self.diagnostics.clone(),
719 diagnostics_update_count: self.diagnostics_update_count,
720 file_update_count: self.file_update_count,
721 git_diff_update_count: self.git_diff_update_count,
722 language: self.language.clone(),
723 parse_count: self.parse_count,
724 selections_update_count: self.selections_update_count,
725 }
726 }
727
728 #[cfg(test)]
729 pub(crate) fn as_text_snapshot(&self) -> &text::BufferSnapshot {
730 &self.text
731 }
732
733 /// Retrieve a snapshot of the buffer's raw text, without any
734 /// language-related state like the syntax tree or diagnostics.
735 pub fn text_snapshot(&self) -> text::BufferSnapshot {
736 self.text.snapshot()
737 }
738
739 /// The file associated with the buffer, if any.
740 pub fn file(&self) -> Option<&Arc<dyn File>> {
741 self.file.as_ref()
742 }
743
744 /// The version of the buffer that was last saved or reloaded from disk.
745 pub fn saved_version(&self) -> &clock::Global {
746 &self.saved_version
747 }
748
749 /// The mtime of the buffer's file when the buffer was last saved or reloaded from disk.
750 pub fn saved_mtime(&self) -> Option<SystemTime> {
751 self.saved_mtime
752 }
753
754 /// Assign a language to the buffer.
755 pub fn set_language(&mut self, language: Option<Arc<Language>>, cx: &mut ModelContext<Self>) {
756 self.parse_count += 1;
757 self.syntax_map.lock().clear();
758 self.language = language;
759 self.reparse(cx);
760 cx.emit(Event::LanguageChanged);
761 }
762
763 /// Assign a language registry to the buffer. This allows the buffer to retrieve
764 /// other languages if parts of the buffer are written in different languages.
765 pub fn set_language_registry(&mut self, language_registry: Arc<LanguageRegistry>) {
766 self.syntax_map
767 .lock()
768 .set_language_registry(language_registry);
769 }
770
771 /// Assign the buffer a new [Capability].
772 pub fn set_capability(&mut self, capability: Capability, cx: &mut ModelContext<Self>) {
773 self.capability = capability;
774 cx.emit(Event::CapabilityChanged)
775 }
776
777 /// This method is called to signal that the buffer has been saved.
778 pub fn did_save(
779 &mut self,
780 version: clock::Global,
781 mtime: Option<SystemTime>,
782 cx: &mut ModelContext<Self>,
783 ) {
784 self.saved_version = version;
785 self.has_conflict = false;
786 self.saved_mtime = mtime;
787 cx.emit(Event::Saved);
788 cx.notify();
789 }
790
791 /// Reloads the contents of the buffer from disk.
792 pub fn reload(
793 &mut self,
794 cx: &mut ModelContext<Self>,
795 ) -> oneshot::Receiver<Option<Transaction>> {
796 let (tx, rx) = futures::channel::oneshot::channel();
797 let prev_version = self.text.version();
798 self.reload_task = Some(cx.spawn(|this, mut cx| async move {
799 let Some((new_mtime, new_text)) = this.update(&mut cx, |this, cx| {
800 let file = this.file.as_ref()?.as_local()?;
801 Some((file.mtime(), file.load(cx)))
802 })?
803 else {
804 return Ok(());
805 };
806
807 let new_text = new_text.await?;
808 let diff = this
809 .update(&mut cx, |this, cx| this.diff(new_text.clone(), cx))?
810 .await;
811 this.update(&mut cx, |this, cx| {
812 if this.version() == diff.base_version {
813 this.finalize_last_transaction();
814 this.apply_diff(diff, cx);
815 tx.send(this.finalize_last_transaction().cloned()).ok();
816 this.has_conflict = false;
817 this.did_reload(this.version(), this.line_ending(), new_mtime, cx);
818 } else {
819 if !diff.edits.is_empty()
820 || this
821 .edits_since::<usize>(&diff.base_version)
822 .next()
823 .is_some()
824 {
825 this.has_conflict = true;
826 }
827
828 this.did_reload(prev_version, this.line_ending(), this.saved_mtime, cx);
829 }
830
831 this.reload_task.take();
832 })
833 }));
834 rx
835 }
836
837 /// This method is called to signal that the buffer has been reloaded.
838 pub fn did_reload(
839 &mut self,
840 version: clock::Global,
841 line_ending: LineEnding,
842 mtime: Option<SystemTime>,
843 cx: &mut ModelContext<Self>,
844 ) {
845 self.saved_version = version;
846 self.text.set_line_ending(line_ending);
847 self.saved_mtime = mtime;
848 if let Some(file) = self.file.as_ref().and_then(|f| f.as_local()) {
849 file.buffer_reloaded(
850 self.remote_id(),
851 &self.saved_version,
852 self.line_ending(),
853 self.saved_mtime,
854 cx,
855 );
856 }
857 cx.emit(Event::Reloaded);
858 cx.notify();
859 }
860
861 /// Updates the [File] backing this buffer. This should be called when
862 /// the file has changed or has been deleted.
863 pub fn file_updated(&mut self, new_file: Arc<dyn File>, cx: &mut ModelContext<Self>) {
864 let mut file_changed = false;
865
866 if let Some(old_file) = self.file.as_ref() {
867 if new_file.path() != old_file.path() {
868 file_changed = true;
869 }
870
871 if new_file.is_deleted() {
872 if !old_file.is_deleted() {
873 file_changed = true;
874 if !self.is_dirty() {
875 cx.emit(Event::DirtyChanged);
876 }
877 }
878 } else {
879 let new_mtime = new_file.mtime();
880 if new_mtime != old_file.mtime() {
881 file_changed = true;
882
883 if !self.is_dirty() {
884 self.reload(cx).close();
885 }
886 }
887 }
888 } else {
889 file_changed = true;
890 };
891
892 self.file = Some(new_file);
893 if file_changed {
894 self.file_update_count += 1;
895 cx.emit(Event::FileHandleChanged);
896 cx.notify();
897 }
898 }
899
900 /// Returns the current diff base, see [Buffer::set_diff_base].
901 pub fn diff_base(&self) -> Option<&Rope> {
902 self.diff_base.as_ref()
903 }
904
905 /// Sets the text that will be used to compute a Git diff
906 /// against the buffer text.
907 pub fn set_diff_base(&mut self, diff_base: Option<String>, cx: &mut ModelContext<Self>) {
908 self.diff_base = diff_base
909 .map(|mut raw_diff_base| {
910 LineEnding::normalize(&mut raw_diff_base);
911 raw_diff_base
912 })
913 .map(Rope::from);
914 self.diff_base_version += 1;
915 if let Some(recalc_task) = self.git_diff_recalc(cx) {
916 cx.spawn(|buffer, mut cx| async move {
917 recalc_task.await;
918 buffer
919 .update(&mut cx, |_, cx| {
920 cx.emit(Event::DiffBaseChanged);
921 })
922 .ok();
923 })
924 .detach();
925 }
926 }
927
928 /// Returns a number, unique per diff base set to the buffer.
929 pub fn diff_base_version(&self) -> usize {
930 self.diff_base_version
931 }
932
933 /// Recomputes the Git diff status.
934 pub fn git_diff_recalc(&mut self, cx: &mut ModelContext<Self>) -> Option<Task<()>> {
935 let diff_base = self.diff_base.clone()?;
936 let snapshot = self.snapshot();
937
938 let mut diff = self.git_diff.clone();
939 let diff = cx.background_executor().spawn(async move {
940 diff.update(&diff_base, &snapshot).await;
941 diff
942 });
943
944 Some(cx.spawn(|this, mut cx| async move {
945 let buffer_diff = diff.await;
946 this.update(&mut cx, |this, cx| {
947 this.git_diff = buffer_diff;
948 this.git_diff_update_count += 1;
949 cx.emit(Event::DiffUpdated);
950 })
951 .ok();
952 }))
953 }
954
955 /// Returns the primary [Language] assigned to this [Buffer].
956 pub fn language(&self) -> Option<&Arc<Language>> {
957 self.language.as_ref()
958 }
959
960 /// Returns the [Language] at the given location.
961 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<Arc<Language>> {
962 let offset = position.to_offset(self);
963 self.syntax_map
964 .lock()
965 .layers_for_range(offset..offset, &self.text)
966 .last()
967 .map(|info| info.language.clone())
968 .or_else(|| self.language.clone())
969 }
970
971 /// The number of times the buffer was parsed.
972 pub fn parse_count(&self) -> usize {
973 self.parse_count
974 }
975
976 /// The number of times selections were updated.
977 pub fn selections_update_count(&self) -> usize {
978 self.selections_update_count
979 }
980
981 /// The number of times diagnostics were updated.
982 pub fn diagnostics_update_count(&self) -> usize {
983 self.diagnostics_update_count
984 }
985
986 /// The number of times the underlying file was updated.
987 pub fn file_update_count(&self) -> usize {
988 self.file_update_count
989 }
990
991 /// The number of times the git diff status was updated.
992 pub fn git_diff_update_count(&self) -> usize {
993 self.git_diff_update_count
994 }
995
996 /// Whether the buffer is being parsed in the background.
997 #[cfg(any(test, feature = "test-support"))]
998 pub fn is_parsing(&self) -> bool {
999 self.parsing_in_background
1000 }
1001
1002 /// Indicates whether the buffer contains any regions that may be
1003 /// written in a language that hasn't been loaded yet.
1004 pub fn contains_unknown_injections(&self) -> bool {
1005 self.syntax_map.lock().contains_unknown_injections()
1006 }
1007
1008 #[cfg(test)]
1009 pub fn set_sync_parse_timeout(&mut self, timeout: Duration) {
1010 self.sync_parse_timeout = timeout;
1011 }
1012
1013 /// Called after an edit to synchronize the buffer's main parse tree with
1014 /// the buffer's new underlying state.
1015 ///
1016 /// Locks the syntax map and interpolates the edits since the last reparse
1017 /// into the foreground syntax tree.
1018 ///
1019 /// Then takes a stable snapshot of the syntax map before unlocking it.
1020 /// The snapshot with the interpolated edits is sent to a background thread,
1021 /// where we ask Tree-sitter to perform an incremental parse.
1022 ///
1023 /// Meanwhile, in the foreground, we block the main thread for up to 1ms
1024 /// waiting on the parse to complete. As soon as it completes, we proceed
1025 /// synchronously, unless a 1ms timeout elapses.
1026 ///
1027 /// If we time out waiting on the parse, we spawn a second task waiting
1028 /// until the parse does complete and return with the interpolated tree still
1029 /// in the foreground. When the background parse completes, call back into
1030 /// the main thread and assign the foreground parse state.
1031 ///
1032 /// If the buffer or grammar changed since the start of the background parse,
1033 /// initiate an additional reparse recursively. To avoid concurrent parses
1034 /// for the same buffer, we only initiate a new parse if we are not already
1035 /// parsing in the background.
1036 pub fn reparse(&mut self, cx: &mut ModelContext<Self>) {
1037 if self.parsing_in_background {
1038 return;
1039 }
1040 let language = if let Some(language) = self.language.clone() {
1041 language
1042 } else {
1043 return;
1044 };
1045
1046 let text = self.text_snapshot();
1047 let parsed_version = self.version();
1048
1049 let mut syntax_map = self.syntax_map.lock();
1050 syntax_map.interpolate(&text);
1051 let language_registry = syntax_map.language_registry();
1052 let mut syntax_snapshot = syntax_map.snapshot();
1053 drop(syntax_map);
1054
1055 let parse_task = cx.background_executor().spawn({
1056 let language = language.clone();
1057 let language_registry = language_registry.clone();
1058 async move {
1059 syntax_snapshot.reparse(&text, language_registry, language);
1060 syntax_snapshot
1061 }
1062 });
1063
1064 match cx
1065 .background_executor()
1066 .block_with_timeout(self.sync_parse_timeout, parse_task)
1067 {
1068 Ok(new_syntax_snapshot) => {
1069 self.did_finish_parsing(new_syntax_snapshot, cx);
1070 return;
1071 }
1072 Err(parse_task) => {
1073 self.parsing_in_background = true;
1074 cx.spawn(move |this, mut cx| async move {
1075 let new_syntax_map = parse_task.await;
1076 this.update(&mut cx, move |this, cx| {
1077 let grammar_changed =
1078 this.language.as_ref().map_or(true, |current_language| {
1079 !Arc::ptr_eq(&language, current_language)
1080 });
1081 let language_registry_changed = new_syntax_map
1082 .contains_unknown_injections()
1083 && language_registry.map_or(false, |registry| {
1084 registry.version() != new_syntax_map.language_registry_version()
1085 });
1086 let parse_again = language_registry_changed
1087 || grammar_changed
1088 || this.version.changed_since(&parsed_version);
1089 this.did_finish_parsing(new_syntax_map, cx);
1090 this.parsing_in_background = false;
1091 if parse_again {
1092 this.reparse(cx);
1093 }
1094 })
1095 .ok();
1096 })
1097 .detach();
1098 }
1099 }
1100 }
1101
1102 fn did_finish_parsing(&mut self, syntax_snapshot: SyntaxSnapshot, cx: &mut ModelContext<Self>) {
1103 self.parse_count += 1;
1104 self.syntax_map.lock().did_parse(syntax_snapshot);
1105 self.request_autoindent(cx);
1106 cx.emit(Event::Reparsed);
1107 cx.notify();
1108 }
1109
1110 /// Assign to the buffer a set of diagnostics created by a given language server.
1111 pub fn update_diagnostics(
1112 &mut self,
1113 server_id: LanguageServerId,
1114 diagnostics: DiagnosticSet,
1115 cx: &mut ModelContext<Self>,
1116 ) {
1117 let lamport_timestamp = self.text.lamport_clock.tick();
1118 let op = Operation::UpdateDiagnostics {
1119 server_id,
1120 diagnostics: diagnostics.iter().cloned().collect(),
1121 lamport_timestamp,
1122 };
1123 self.apply_diagnostic_update(server_id, diagnostics, lamport_timestamp, cx);
1124 self.send_operation(op, cx);
1125 }
1126
1127 fn request_autoindent(&mut self, cx: &mut ModelContext<Self>) {
1128 if let Some(indent_sizes) = self.compute_autoindents() {
1129 let indent_sizes = cx.background_executor().spawn(indent_sizes);
1130 match cx
1131 .background_executor()
1132 .block_with_timeout(Duration::from_micros(500), indent_sizes)
1133 {
1134 Ok(indent_sizes) => self.apply_autoindents(indent_sizes, cx),
1135 Err(indent_sizes) => {
1136 self.pending_autoindent = Some(cx.spawn(|this, mut cx| async move {
1137 let indent_sizes = indent_sizes.await;
1138 this.update(&mut cx, |this, cx| {
1139 this.apply_autoindents(indent_sizes, cx);
1140 })
1141 .ok();
1142 }));
1143 }
1144 }
1145 } else {
1146 self.autoindent_requests.clear();
1147 }
1148 }
1149
1150 fn compute_autoindents(&self) -> Option<impl Future<Output = BTreeMap<u32, IndentSize>>> {
1151 let max_rows_between_yields = 100;
1152 let snapshot = self.snapshot();
1153 if snapshot.syntax.is_empty() || self.autoindent_requests.is_empty() {
1154 return None;
1155 }
1156
1157 let autoindent_requests = self.autoindent_requests.clone();
1158 Some(async move {
1159 let mut indent_sizes = BTreeMap::new();
1160 for request in autoindent_requests {
1161 // Resolve each edited range to its row in the current buffer and in the
1162 // buffer before this batch of edits.
1163 let mut row_ranges = Vec::new();
1164 let mut old_to_new_rows = BTreeMap::new();
1165 let mut language_indent_sizes_by_new_row = Vec::new();
1166 for entry in &request.entries {
1167 let position = entry.range.start;
1168 let new_row = position.to_point(&snapshot).row;
1169 let new_end_row = entry.range.end.to_point(&snapshot).row + 1;
1170 language_indent_sizes_by_new_row.push((new_row, entry.indent_size));
1171
1172 if !entry.first_line_is_new {
1173 let old_row = position.to_point(&request.before_edit).row;
1174 old_to_new_rows.insert(old_row, new_row);
1175 }
1176 row_ranges.push((new_row..new_end_row, entry.original_indent_column));
1177 }
1178
1179 // Build a map containing the suggested indentation for each of the edited lines
1180 // with respect to the state of the buffer before these edits. This map is keyed
1181 // by the rows for these lines in the current state of the buffer.
1182 let mut old_suggestions = BTreeMap::<u32, (IndentSize, bool)>::default();
1183 let old_edited_ranges =
1184 contiguous_ranges(old_to_new_rows.keys().copied(), max_rows_between_yields);
1185 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1186 let mut language_indent_size = IndentSize::default();
1187 for old_edited_range in old_edited_ranges {
1188 let suggestions = request
1189 .before_edit
1190 .suggest_autoindents(old_edited_range.clone())
1191 .into_iter()
1192 .flatten();
1193 for (old_row, suggestion) in old_edited_range.zip(suggestions) {
1194 if let Some(suggestion) = suggestion {
1195 let new_row = *old_to_new_rows.get(&old_row).unwrap();
1196
1197 // Find the indent size based on the language for this row.
1198 while let Some((row, size)) = language_indent_sizes.peek() {
1199 if *row > new_row {
1200 break;
1201 }
1202 language_indent_size = *size;
1203 language_indent_sizes.next();
1204 }
1205
1206 let suggested_indent = old_to_new_rows
1207 .get(&suggestion.basis_row)
1208 .and_then(|from_row| {
1209 Some(old_suggestions.get(from_row).copied()?.0)
1210 })
1211 .unwrap_or_else(|| {
1212 request
1213 .before_edit
1214 .indent_size_for_line(suggestion.basis_row)
1215 })
1216 .with_delta(suggestion.delta, language_indent_size);
1217 old_suggestions
1218 .insert(new_row, (suggested_indent, suggestion.within_error));
1219 }
1220 }
1221 yield_now().await;
1222 }
1223
1224 // In block mode, only compute indentation suggestions for the first line
1225 // of each insertion. Otherwise, compute suggestions for every inserted line.
1226 let new_edited_row_ranges = contiguous_ranges(
1227 row_ranges.iter().flat_map(|(range, _)| {
1228 if request.is_block_mode {
1229 range.start..range.start + 1
1230 } else {
1231 range.clone()
1232 }
1233 }),
1234 max_rows_between_yields,
1235 );
1236
1237 // Compute new suggestions for each line, but only include them in the result
1238 // if they differ from the old suggestion for that line.
1239 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1240 let mut language_indent_size = IndentSize::default();
1241 for new_edited_row_range in new_edited_row_ranges {
1242 let suggestions = snapshot
1243 .suggest_autoindents(new_edited_row_range.clone())
1244 .into_iter()
1245 .flatten();
1246 for (new_row, suggestion) in new_edited_row_range.zip(suggestions) {
1247 if let Some(suggestion) = suggestion {
1248 // Find the indent size based on the language for this row.
1249 while let Some((row, size)) = language_indent_sizes.peek() {
1250 if *row > new_row {
1251 break;
1252 }
1253 language_indent_size = *size;
1254 language_indent_sizes.next();
1255 }
1256
1257 let suggested_indent = indent_sizes
1258 .get(&suggestion.basis_row)
1259 .copied()
1260 .unwrap_or_else(|| {
1261 snapshot.indent_size_for_line(suggestion.basis_row)
1262 })
1263 .with_delta(suggestion.delta, language_indent_size);
1264 if old_suggestions.get(&new_row).map_or(
1265 true,
1266 |(old_indentation, was_within_error)| {
1267 suggested_indent != *old_indentation
1268 && (!suggestion.within_error || *was_within_error)
1269 },
1270 ) {
1271 indent_sizes.insert(new_row, suggested_indent);
1272 }
1273 }
1274 }
1275 yield_now().await;
1276 }
1277
1278 // For each block of inserted text, adjust the indentation of the remaining
1279 // lines of the block by the same amount as the first line was adjusted.
1280 if request.is_block_mode {
1281 for (row_range, original_indent_column) in
1282 row_ranges
1283 .into_iter()
1284 .filter_map(|(range, original_indent_column)| {
1285 if range.len() > 1 {
1286 Some((range, original_indent_column?))
1287 } else {
1288 None
1289 }
1290 })
1291 {
1292 let new_indent = indent_sizes
1293 .get(&row_range.start)
1294 .copied()
1295 .unwrap_or_else(|| snapshot.indent_size_for_line(row_range.start));
1296 let delta = new_indent.len as i64 - original_indent_column as i64;
1297 if delta != 0 {
1298 for row in row_range.skip(1) {
1299 indent_sizes.entry(row).or_insert_with(|| {
1300 let mut size = snapshot.indent_size_for_line(row);
1301 if size.kind == new_indent.kind {
1302 match delta.cmp(&0) {
1303 Ordering::Greater => size.len += delta as u32,
1304 Ordering::Less => {
1305 size.len = size.len.saturating_sub(-delta as u32)
1306 }
1307 Ordering::Equal => {}
1308 }
1309 }
1310 size
1311 });
1312 }
1313 }
1314 }
1315 }
1316 }
1317
1318 indent_sizes
1319 })
1320 }
1321
1322 fn apply_autoindents(
1323 &mut self,
1324 indent_sizes: BTreeMap<u32, IndentSize>,
1325 cx: &mut ModelContext<Self>,
1326 ) {
1327 self.autoindent_requests.clear();
1328
1329 let edits: Vec<_> = indent_sizes
1330 .into_iter()
1331 .filter_map(|(row, indent_size)| {
1332 let current_size = indent_size_for_line(self, row);
1333 Self::edit_for_indent_size_adjustment(row, current_size, indent_size)
1334 })
1335 .collect();
1336
1337 self.edit(edits, None, cx);
1338 }
1339
1340 /// Create a minimal edit that will cause the given row to be indented
1341 /// with the given size. After applying this edit, the length of the line
1342 /// will always be at least `new_size.len`.
1343 pub fn edit_for_indent_size_adjustment(
1344 row: u32,
1345 current_size: IndentSize,
1346 new_size: IndentSize,
1347 ) -> Option<(Range<Point>, String)> {
1348 if new_size.kind == current_size.kind {
1349 match new_size.len.cmp(¤t_size.len) {
1350 Ordering::Greater => {
1351 let point = Point::new(row, 0);
1352 Some((
1353 point..point,
1354 iter::repeat(new_size.char())
1355 .take((new_size.len - current_size.len) as usize)
1356 .collect::<String>(),
1357 ))
1358 }
1359
1360 Ordering::Less => Some((
1361 Point::new(row, 0)..Point::new(row, current_size.len - new_size.len),
1362 String::new(),
1363 )),
1364
1365 Ordering::Equal => None,
1366 }
1367 } else {
1368 Some((
1369 Point::new(row, 0)..Point::new(row, current_size.len),
1370 iter::repeat(new_size.char())
1371 .take(new_size.len as usize)
1372 .collect::<String>(),
1373 ))
1374 }
1375 }
1376
1377 /// Spawns a background task that asynchronously computes a `Diff` between the buffer's text
1378 /// and the given new text.
1379 pub fn diff(&self, mut new_text: String, cx: &AppContext) -> Task<Diff> {
1380 let old_text = self.as_rope().clone();
1381 let base_version = self.version();
1382 cx.background_executor()
1383 .spawn_labeled(*BUFFER_DIFF_TASK, async move {
1384 let old_text = old_text.to_string();
1385 let line_ending = LineEnding::detect(&new_text);
1386 LineEnding::normalize(&mut new_text);
1387
1388 let diff = TextDiff::from_chars(old_text.as_str(), new_text.as_str());
1389 let empty: Arc<str> = "".into();
1390
1391 let mut edits = Vec::new();
1392 let mut old_offset = 0;
1393 let mut new_offset = 0;
1394 let mut last_edit: Option<(Range<usize>, Range<usize>)> = None;
1395 for change in diff.iter_all_changes().map(Some).chain([None]) {
1396 if let Some(change) = &change {
1397 let len = change.value().len();
1398 match change.tag() {
1399 ChangeTag::Equal => {
1400 old_offset += len;
1401 new_offset += len;
1402 }
1403 ChangeTag::Delete => {
1404 let old_end_offset = old_offset + len;
1405 if let Some((last_old_range, _)) = &mut last_edit {
1406 last_old_range.end = old_end_offset;
1407 } else {
1408 last_edit =
1409 Some((old_offset..old_end_offset, new_offset..new_offset));
1410 }
1411 old_offset = old_end_offset;
1412 }
1413 ChangeTag::Insert => {
1414 let new_end_offset = new_offset + len;
1415 if let Some((_, last_new_range)) = &mut last_edit {
1416 last_new_range.end = new_end_offset;
1417 } else {
1418 last_edit =
1419 Some((old_offset..old_offset, new_offset..new_end_offset));
1420 }
1421 new_offset = new_end_offset;
1422 }
1423 }
1424 }
1425
1426 if let Some((old_range, new_range)) = &last_edit {
1427 if old_offset > old_range.end
1428 || new_offset > new_range.end
1429 || change.is_none()
1430 {
1431 let text = if new_range.is_empty() {
1432 empty.clone()
1433 } else {
1434 new_text[new_range.clone()].into()
1435 };
1436 edits.push((old_range.clone(), text));
1437 last_edit.take();
1438 }
1439 }
1440 }
1441
1442 Diff {
1443 base_version,
1444 line_ending,
1445 edits,
1446 }
1447 })
1448 }
1449
1450 /// Spawns a background task that searches the buffer for any whitespace
1451 /// at the ends of a lines, and returns a `Diff` that removes that whitespace.
1452 pub fn remove_trailing_whitespace(&self, cx: &AppContext) -> Task<Diff> {
1453 let old_text = self.as_rope().clone();
1454 let line_ending = self.line_ending();
1455 let base_version = self.version();
1456 cx.background_executor().spawn(async move {
1457 let ranges = trailing_whitespace_ranges(&old_text);
1458 let empty = Arc::<str>::from("");
1459 Diff {
1460 base_version,
1461 line_ending,
1462 edits: ranges
1463 .into_iter()
1464 .map(|range| (range, empty.clone()))
1465 .collect(),
1466 }
1467 })
1468 }
1469
1470 /// Ensures that the buffer ends with a single newline character, and
1471 /// no other whitespace.
1472 pub fn ensure_final_newline(&mut self, cx: &mut ModelContext<Self>) {
1473 let len = self.len();
1474 let mut offset = len;
1475 for chunk in self.as_rope().reversed_chunks_in_range(0..len) {
1476 let non_whitespace_len = chunk
1477 .trim_end_matches(|c: char| c.is_ascii_whitespace())
1478 .len();
1479 offset -= chunk.len();
1480 offset += non_whitespace_len;
1481 if non_whitespace_len != 0 {
1482 if offset == len - 1 && chunk.get(non_whitespace_len..) == Some("\n") {
1483 return;
1484 }
1485 break;
1486 }
1487 }
1488 self.edit([(offset..len, "\n")], None, cx);
1489 }
1490
1491 /// Applies a diff to the buffer. If the buffer has changed since the given diff was
1492 /// calculated, then adjust the diff to account for those changes, and discard any
1493 /// parts of the diff that conflict with those changes.
1494 pub fn apply_diff(&mut self, diff: Diff, cx: &mut ModelContext<Self>) -> Option<TransactionId> {
1495 // Check for any edits to the buffer that have occurred since this diff
1496 // was computed.
1497 let snapshot = self.snapshot();
1498 let mut edits_since = snapshot.edits_since::<usize>(&diff.base_version).peekable();
1499 let mut delta = 0;
1500 let adjusted_edits = diff.edits.into_iter().filter_map(|(range, new_text)| {
1501 while let Some(edit_since) = edits_since.peek() {
1502 // If the edit occurs after a diff hunk, then it does not
1503 // affect that hunk.
1504 if edit_since.old.start > range.end {
1505 break;
1506 }
1507 // If the edit precedes the diff hunk, then adjust the hunk
1508 // to reflect the edit.
1509 else if edit_since.old.end < range.start {
1510 delta += edit_since.new_len() as i64 - edit_since.old_len() as i64;
1511 edits_since.next();
1512 }
1513 // If the edit intersects a diff hunk, then discard that hunk.
1514 else {
1515 return None;
1516 }
1517 }
1518
1519 let start = (range.start as i64 + delta) as usize;
1520 let end = (range.end as i64 + delta) as usize;
1521 Some((start..end, new_text))
1522 });
1523
1524 self.start_transaction();
1525 self.text.set_line_ending(diff.line_ending);
1526 self.edit(adjusted_edits, None, cx);
1527 self.end_transaction(cx)
1528 }
1529
1530 /// Checks if the buffer has unsaved changes.
1531 pub fn is_dirty(&self) -> bool {
1532 self.has_conflict
1533 || self.has_edits_since(&self.saved_version)
1534 || self
1535 .file
1536 .as_ref()
1537 .map_or(false, |file| file.is_deleted() || !file.is_created())
1538 }
1539
1540 /// Checks if the buffer and its file have both changed since the buffer
1541 /// was last saved or reloaded.
1542 pub fn has_conflict(&self) -> bool {
1543 self.has_conflict
1544 || self.file.as_ref().map_or(false, |file| {
1545 file.mtime() > self.saved_mtime && self.has_edits_since(&self.saved_version)
1546 })
1547 }
1548
1549 /// Gets a [`Subscription`] that tracks all of the changes to the buffer's text.
1550 pub fn subscribe(&mut self) -> Subscription {
1551 self.text.subscribe()
1552 }
1553
1554 /// Starts a transaction, if one is not already in-progress. When undoing or
1555 /// redoing edits, all of the edits performed within a transaction are undone
1556 /// or redone together.
1557 pub fn start_transaction(&mut self) -> Option<TransactionId> {
1558 self.start_transaction_at(Instant::now())
1559 }
1560
1561 /// Starts a transaction, providing the current time. Subsequent transactions
1562 /// that occur within a short period of time will be grouped together. This
1563 /// is controlled by the buffer's undo grouping duration.
1564 pub fn start_transaction_at(&mut self, now: Instant) -> Option<TransactionId> {
1565 self.transaction_depth += 1;
1566 if self.was_dirty_before_starting_transaction.is_none() {
1567 self.was_dirty_before_starting_transaction = Some(self.is_dirty());
1568 }
1569 self.text.start_transaction_at(now)
1570 }
1571
1572 /// Terminates the current transaction, if this is the outermost transaction.
1573 pub fn end_transaction(&mut self, cx: &mut ModelContext<Self>) -> Option<TransactionId> {
1574 self.end_transaction_at(Instant::now(), cx)
1575 }
1576
1577 /// Terminates the current transaction, providing the current time. Subsequent transactions
1578 /// that occur within a short period of time will be grouped together. This
1579 /// is controlled by the buffer's undo grouping duration.
1580 pub fn end_transaction_at(
1581 &mut self,
1582 now: Instant,
1583 cx: &mut ModelContext<Self>,
1584 ) -> Option<TransactionId> {
1585 assert!(self.transaction_depth > 0);
1586 self.transaction_depth -= 1;
1587 let was_dirty = if self.transaction_depth == 0 {
1588 self.was_dirty_before_starting_transaction.take().unwrap()
1589 } else {
1590 false
1591 };
1592 if let Some((transaction_id, start_version)) = self.text.end_transaction_at(now) {
1593 self.did_edit(&start_version, was_dirty, cx);
1594 Some(transaction_id)
1595 } else {
1596 None
1597 }
1598 }
1599
1600 /// Manually add a transaction to the buffer's undo history.
1601 pub fn push_transaction(&mut self, transaction: Transaction, now: Instant) {
1602 self.text.push_transaction(transaction, now);
1603 }
1604
1605 /// Prevent the last transaction from being grouped with any subsequent transactions,
1606 /// even if they occur with the buffer's undo grouping duration.
1607 pub fn finalize_last_transaction(&mut self) -> Option<&Transaction> {
1608 self.text.finalize_last_transaction()
1609 }
1610
1611 /// Manually group all changes since a given transaction.
1612 pub fn group_until_transaction(&mut self, transaction_id: TransactionId) {
1613 self.text.group_until_transaction(transaction_id);
1614 }
1615
1616 /// Manually remove a transaction from the buffer's undo history
1617 pub fn forget_transaction(&mut self, transaction_id: TransactionId) {
1618 self.text.forget_transaction(transaction_id);
1619 }
1620
1621 /// Manually merge two adjacent transactions in the buffer's undo history.
1622 pub fn merge_transactions(&mut self, transaction: TransactionId, destination: TransactionId) {
1623 self.text.merge_transactions(transaction, destination);
1624 }
1625
1626 /// Waits for the buffer to receive operations with the given timestamps.
1627 pub fn wait_for_edits(
1628 &mut self,
1629 edit_ids: impl IntoIterator<Item = clock::Lamport>,
1630 ) -> impl Future<Output = Result<()>> {
1631 self.text.wait_for_edits(edit_ids)
1632 }
1633
1634 /// Waits for the buffer to receive the operations necessary for resolving the given anchors.
1635 pub fn wait_for_anchors(
1636 &mut self,
1637 anchors: impl IntoIterator<Item = Anchor>,
1638 ) -> impl 'static + Future<Output = Result<()>> {
1639 self.text.wait_for_anchors(anchors)
1640 }
1641
1642 /// Waits for the buffer to receive operations up to the given version.
1643 pub fn wait_for_version(&mut self, version: clock::Global) -> impl Future<Output = Result<()>> {
1644 self.text.wait_for_version(version)
1645 }
1646
1647 /// Forces all futures returned by [`Buffer::wait_for_version`], [`Buffer::wait_for_edits`], or
1648 /// [`Buffer::wait_for_version`] to resolve with an error.
1649 pub fn give_up_waiting(&mut self) {
1650 self.text.give_up_waiting();
1651 }
1652
1653 /// Stores a set of selections that should be broadcasted to all of the buffer's replicas.
1654 pub fn set_active_selections(
1655 &mut self,
1656 selections: Arc<[Selection<Anchor>]>,
1657 line_mode: bool,
1658 cursor_shape: CursorShape,
1659 cx: &mut ModelContext<Self>,
1660 ) {
1661 let lamport_timestamp = self.text.lamport_clock.tick();
1662 self.remote_selections.insert(
1663 self.text.replica_id(),
1664 SelectionSet {
1665 selections: selections.clone(),
1666 lamport_timestamp,
1667 line_mode,
1668 cursor_shape,
1669 },
1670 );
1671 self.send_operation(
1672 Operation::UpdateSelections {
1673 selections,
1674 line_mode,
1675 lamport_timestamp,
1676 cursor_shape,
1677 },
1678 cx,
1679 );
1680 }
1681
1682 /// Clears the selections, so that other replicas of the buffer do not see any selections for
1683 /// this replica.
1684 pub fn remove_active_selections(&mut self, cx: &mut ModelContext<Self>) {
1685 if self
1686 .remote_selections
1687 .get(&self.text.replica_id())
1688 .map_or(true, |set| !set.selections.is_empty())
1689 {
1690 self.set_active_selections(Arc::from([]), false, Default::default(), cx);
1691 }
1692 }
1693
1694 /// Replaces the buffer's entire text.
1695 pub fn set_text<T>(&mut self, text: T, cx: &mut ModelContext<Self>) -> Option<clock::Lamport>
1696 where
1697 T: Into<Arc<str>>,
1698 {
1699 self.autoindent_requests.clear();
1700 self.edit([(0..self.len(), text)], None, cx)
1701 }
1702
1703 /// Applies the given edits to the buffer. Each edit is specified as a range of text to
1704 /// delete, and a string of text to insert at that location.
1705 ///
1706 /// If an [`AutoindentMode`] is provided, then the buffer will enqueue an auto-indent
1707 /// request for the edited ranges, which will be processed when the buffer finishes
1708 /// parsing.
1709 ///
1710 /// Parsing takes place at the end of a transaction, and may compute synchronously
1711 /// or asynchronously, depending on the changes.
1712 pub fn edit<I, S, T>(
1713 &mut self,
1714 edits_iter: I,
1715 autoindent_mode: Option<AutoindentMode>,
1716 cx: &mut ModelContext<Self>,
1717 ) -> Option<clock::Lamport>
1718 where
1719 I: IntoIterator<Item = (Range<S>, T)>,
1720 S: ToOffset,
1721 T: Into<Arc<str>>,
1722 {
1723 // Skip invalid edits and coalesce contiguous ones.
1724 let mut edits: Vec<(Range<usize>, Arc<str>)> = Vec::new();
1725 for (range, new_text) in edits_iter {
1726 let mut range = range.start.to_offset(self)..range.end.to_offset(self);
1727 if range.start > range.end {
1728 mem::swap(&mut range.start, &mut range.end);
1729 }
1730 let new_text = new_text.into();
1731 if !new_text.is_empty() || !range.is_empty() {
1732 if let Some((prev_range, prev_text)) = edits.last_mut() {
1733 if prev_range.end >= range.start {
1734 prev_range.end = cmp::max(prev_range.end, range.end);
1735 *prev_text = format!("{prev_text}{new_text}").into();
1736 } else {
1737 edits.push((range, new_text));
1738 }
1739 } else {
1740 edits.push((range, new_text));
1741 }
1742 }
1743 }
1744 if edits.is_empty() {
1745 return None;
1746 }
1747
1748 self.start_transaction();
1749 self.pending_autoindent.take();
1750 let autoindent_request = autoindent_mode
1751 .and_then(|mode| self.language.as_ref().map(|_| (self.snapshot(), mode)));
1752
1753 let edit_operation = self.text.edit(edits.iter().cloned());
1754 let edit_id = edit_operation.timestamp();
1755
1756 if let Some((before_edit, mode)) = autoindent_request {
1757 let mut delta = 0isize;
1758 let entries = edits
1759 .into_iter()
1760 .enumerate()
1761 .zip(&edit_operation.as_edit().unwrap().new_text)
1762 .map(|((ix, (range, _)), new_text)| {
1763 let new_text_length = new_text.len();
1764 let old_start = range.start.to_point(&before_edit);
1765 let new_start = (delta + range.start as isize) as usize;
1766 delta += new_text_length as isize - (range.end as isize - range.start as isize);
1767
1768 let mut range_of_insertion_to_indent = 0..new_text_length;
1769 let mut first_line_is_new = false;
1770 let mut original_indent_column = None;
1771
1772 // When inserting an entire line at the beginning of an existing line,
1773 // treat the insertion as new.
1774 if new_text.contains('\n')
1775 && old_start.column <= before_edit.indent_size_for_line(old_start.row).len
1776 {
1777 first_line_is_new = true;
1778 }
1779
1780 // When inserting text starting with a newline, avoid auto-indenting the
1781 // previous line.
1782 if new_text.starts_with('\n') {
1783 range_of_insertion_to_indent.start += 1;
1784 first_line_is_new = true;
1785 }
1786
1787 // Avoid auto-indenting after the insertion.
1788 if let AutoindentMode::Block {
1789 original_indent_columns,
1790 } = &mode
1791 {
1792 original_indent_column =
1793 Some(original_indent_columns.get(ix).copied().unwrap_or_else(|| {
1794 indent_size_for_text(
1795 new_text[range_of_insertion_to_indent.clone()].chars(),
1796 )
1797 .len
1798 }));
1799 if new_text[range_of_insertion_to_indent.clone()].ends_with('\n') {
1800 range_of_insertion_to_indent.end -= 1;
1801 }
1802 }
1803
1804 AutoindentRequestEntry {
1805 first_line_is_new,
1806 original_indent_column,
1807 indent_size: before_edit.language_indent_size_at(range.start, cx),
1808 range: self.anchor_before(new_start + range_of_insertion_to_indent.start)
1809 ..self.anchor_after(new_start + range_of_insertion_to_indent.end),
1810 }
1811 })
1812 .collect();
1813
1814 self.autoindent_requests.push(Arc::new(AutoindentRequest {
1815 before_edit,
1816 entries,
1817 is_block_mode: matches!(mode, AutoindentMode::Block { .. }),
1818 }));
1819 }
1820
1821 self.end_transaction(cx);
1822 self.send_operation(Operation::Buffer(edit_operation), cx);
1823 Some(edit_id)
1824 }
1825
1826 fn did_edit(
1827 &mut self,
1828 old_version: &clock::Global,
1829 was_dirty: bool,
1830 cx: &mut ModelContext<Self>,
1831 ) {
1832 if self.edits_since::<usize>(old_version).next().is_none() {
1833 return;
1834 }
1835
1836 self.reparse(cx);
1837
1838 cx.emit(Event::Edited);
1839 if was_dirty != self.is_dirty() {
1840 cx.emit(Event::DirtyChanged);
1841 }
1842 cx.notify();
1843 }
1844
1845 /// Applies the given remote operations to the buffer.
1846 pub fn apply_ops<I: IntoIterator<Item = Operation>>(
1847 &mut self,
1848 ops: I,
1849 cx: &mut ModelContext<Self>,
1850 ) -> Result<()> {
1851 self.pending_autoindent.take();
1852 let was_dirty = self.is_dirty();
1853 let old_version = self.version.clone();
1854 let mut deferred_ops = Vec::new();
1855 let buffer_ops = ops
1856 .into_iter()
1857 .filter_map(|op| match op {
1858 Operation::Buffer(op) => Some(op),
1859 _ => {
1860 if self.can_apply_op(&op) {
1861 self.apply_op(op, cx);
1862 } else {
1863 deferred_ops.push(op);
1864 }
1865 None
1866 }
1867 })
1868 .collect::<Vec<_>>();
1869 self.text.apply_ops(buffer_ops)?;
1870 self.deferred_ops.insert(deferred_ops);
1871 self.flush_deferred_ops(cx);
1872 self.did_edit(&old_version, was_dirty, cx);
1873 // Notify independently of whether the buffer was edited as the operations could include a
1874 // selection update.
1875 cx.notify();
1876 Ok(())
1877 }
1878
1879 fn flush_deferred_ops(&mut self, cx: &mut ModelContext<Self>) {
1880 let mut deferred_ops = Vec::new();
1881 for op in self.deferred_ops.drain().iter().cloned() {
1882 if self.can_apply_op(&op) {
1883 self.apply_op(op, cx);
1884 } else {
1885 deferred_ops.push(op);
1886 }
1887 }
1888 self.deferred_ops.insert(deferred_ops);
1889 }
1890
1891 fn can_apply_op(&self, operation: &Operation) -> bool {
1892 match operation {
1893 Operation::Buffer(_) => {
1894 unreachable!("buffer operations should never be applied at this layer")
1895 }
1896 Operation::UpdateDiagnostics {
1897 diagnostics: diagnostic_set,
1898 ..
1899 } => diagnostic_set.iter().all(|diagnostic| {
1900 self.text.can_resolve(&diagnostic.range.start)
1901 && self.text.can_resolve(&diagnostic.range.end)
1902 }),
1903 Operation::UpdateSelections { selections, .. } => selections
1904 .iter()
1905 .all(|s| self.can_resolve(&s.start) && self.can_resolve(&s.end)),
1906 Operation::UpdateCompletionTriggers { .. } => true,
1907 }
1908 }
1909
1910 fn apply_op(&mut self, operation: Operation, cx: &mut ModelContext<Self>) {
1911 match operation {
1912 Operation::Buffer(_) => {
1913 unreachable!("buffer operations should never be applied at this layer")
1914 }
1915 Operation::UpdateDiagnostics {
1916 server_id,
1917 diagnostics: diagnostic_set,
1918 lamport_timestamp,
1919 } => {
1920 let snapshot = self.snapshot();
1921 self.apply_diagnostic_update(
1922 server_id,
1923 DiagnosticSet::from_sorted_entries(diagnostic_set.iter().cloned(), &snapshot),
1924 lamport_timestamp,
1925 cx,
1926 );
1927 }
1928 Operation::UpdateSelections {
1929 selections,
1930 lamport_timestamp,
1931 line_mode,
1932 cursor_shape,
1933 } => {
1934 if let Some(set) = self.remote_selections.get(&lamport_timestamp.replica_id) {
1935 if set.lamport_timestamp > lamport_timestamp {
1936 return;
1937 }
1938 }
1939
1940 self.remote_selections.insert(
1941 lamport_timestamp.replica_id,
1942 SelectionSet {
1943 selections,
1944 lamport_timestamp,
1945 line_mode,
1946 cursor_shape,
1947 },
1948 );
1949 self.text.lamport_clock.observe(lamport_timestamp);
1950 self.selections_update_count += 1;
1951 }
1952 Operation::UpdateCompletionTriggers {
1953 triggers,
1954 lamport_timestamp,
1955 } => {
1956 self.completion_triggers = triggers;
1957 self.text.lamport_clock.observe(lamport_timestamp);
1958 }
1959 }
1960 }
1961
1962 fn apply_diagnostic_update(
1963 &mut self,
1964 server_id: LanguageServerId,
1965 diagnostics: DiagnosticSet,
1966 lamport_timestamp: clock::Lamport,
1967 cx: &mut ModelContext<Self>,
1968 ) {
1969 if lamport_timestamp > self.diagnostics_timestamp {
1970 let ix = self.diagnostics.binary_search_by_key(&server_id, |e| e.0);
1971 if diagnostics.len() == 0 {
1972 if let Ok(ix) = ix {
1973 self.diagnostics.remove(ix);
1974 }
1975 } else {
1976 match ix {
1977 Err(ix) => self.diagnostics.insert(ix, (server_id, diagnostics)),
1978 Ok(ix) => self.diagnostics[ix].1 = diagnostics,
1979 };
1980 }
1981 self.diagnostics_timestamp = lamport_timestamp;
1982 self.diagnostics_update_count += 1;
1983 self.text.lamport_clock.observe(lamport_timestamp);
1984 cx.notify();
1985 cx.emit(Event::DiagnosticsUpdated);
1986 }
1987 }
1988
1989 fn send_operation(&mut self, operation: Operation, cx: &mut ModelContext<Self>) {
1990 cx.emit(Event::Operation(operation));
1991 }
1992
1993 /// Removes the selections for a given peer.
1994 pub fn remove_peer(&mut self, replica_id: ReplicaId, cx: &mut ModelContext<Self>) {
1995 self.remote_selections.remove(&replica_id);
1996 cx.notify();
1997 }
1998
1999 /// Undoes the most recent transaction.
2000 pub fn undo(&mut self, cx: &mut ModelContext<Self>) -> Option<TransactionId> {
2001 let was_dirty = self.is_dirty();
2002 let old_version = self.version.clone();
2003
2004 if let Some((transaction_id, operation)) = self.text.undo() {
2005 self.send_operation(Operation::Buffer(operation), cx);
2006 self.did_edit(&old_version, was_dirty, cx);
2007 Some(transaction_id)
2008 } else {
2009 None
2010 }
2011 }
2012
2013 /// Manually undoes a specific transaction in the buffer's undo history.
2014 pub fn undo_transaction(
2015 &mut self,
2016 transaction_id: TransactionId,
2017 cx: &mut ModelContext<Self>,
2018 ) -> bool {
2019 let was_dirty = self.is_dirty();
2020 let old_version = self.version.clone();
2021 if let Some(operation) = self.text.undo_transaction(transaction_id) {
2022 self.send_operation(Operation::Buffer(operation), cx);
2023 self.did_edit(&old_version, was_dirty, cx);
2024 true
2025 } else {
2026 false
2027 }
2028 }
2029
2030 /// Manually undoes all changes after a given transaction in the buffer's undo history.
2031 pub fn undo_to_transaction(
2032 &mut self,
2033 transaction_id: TransactionId,
2034 cx: &mut ModelContext<Self>,
2035 ) -> bool {
2036 let was_dirty = self.is_dirty();
2037 let old_version = self.version.clone();
2038
2039 let operations = self.text.undo_to_transaction(transaction_id);
2040 let undone = !operations.is_empty();
2041 for operation in operations {
2042 self.send_operation(Operation::Buffer(operation), cx);
2043 }
2044 if undone {
2045 self.did_edit(&old_version, was_dirty, cx)
2046 }
2047 undone
2048 }
2049
2050 /// Manually redoes a specific transaction in the buffer's redo history.
2051 pub fn redo(&mut self, cx: &mut ModelContext<Self>) -> Option<TransactionId> {
2052 let was_dirty = self.is_dirty();
2053 let old_version = self.version.clone();
2054
2055 if let Some((transaction_id, operation)) = self.text.redo() {
2056 self.send_operation(Operation::Buffer(operation), cx);
2057 self.did_edit(&old_version, was_dirty, cx);
2058 Some(transaction_id)
2059 } else {
2060 None
2061 }
2062 }
2063
2064 /// Manually undoes all changes until a given transaction in the buffer's redo history.
2065 pub fn redo_to_transaction(
2066 &mut self,
2067 transaction_id: TransactionId,
2068 cx: &mut ModelContext<Self>,
2069 ) -> bool {
2070 let was_dirty = self.is_dirty();
2071 let old_version = self.version.clone();
2072
2073 let operations = self.text.redo_to_transaction(transaction_id);
2074 let redone = !operations.is_empty();
2075 for operation in operations {
2076 self.send_operation(Operation::Buffer(operation), cx);
2077 }
2078 if redone {
2079 self.did_edit(&old_version, was_dirty, cx)
2080 }
2081 redone
2082 }
2083
2084 /// Override current completion triggers with the user-provided completion triggers.
2085 pub fn set_completion_triggers(&mut self, triggers: Vec<String>, cx: &mut ModelContext<Self>) {
2086 self.completion_triggers.clone_from(&triggers);
2087 self.completion_triggers_timestamp = self.text.lamport_clock.tick();
2088 self.send_operation(
2089 Operation::UpdateCompletionTriggers {
2090 triggers,
2091 lamport_timestamp: self.completion_triggers_timestamp,
2092 },
2093 cx,
2094 );
2095 cx.notify();
2096 }
2097
2098 /// Returns a list of strings which trigger a completion menu for this language.
2099 /// Usually this is driven by LSP server which returns a list of trigger characters for completions.
2100 pub fn completion_triggers(&self) -> &[String] {
2101 &self.completion_triggers
2102 }
2103}
2104
2105#[doc(hidden)]
2106#[cfg(any(test, feature = "test-support"))]
2107impl Buffer {
2108 pub fn edit_via_marked_text(
2109 &mut self,
2110 marked_string: &str,
2111 autoindent_mode: Option<AutoindentMode>,
2112 cx: &mut ModelContext<Self>,
2113 ) {
2114 let edits = self.edits_for_marked_text(marked_string);
2115 self.edit(edits, autoindent_mode, cx);
2116 }
2117
2118 pub fn set_group_interval(&mut self, group_interval: Duration) {
2119 self.text.set_group_interval(group_interval);
2120 }
2121
2122 pub fn randomly_edit<T>(
2123 &mut self,
2124 rng: &mut T,
2125 old_range_count: usize,
2126 cx: &mut ModelContext<Self>,
2127 ) where
2128 T: rand::Rng,
2129 {
2130 let mut edits: Vec<(Range<usize>, String)> = Vec::new();
2131 let mut last_end = None;
2132 for _ in 0..old_range_count {
2133 if last_end.map_or(false, |last_end| last_end >= self.len()) {
2134 break;
2135 }
2136
2137 let new_start = last_end.map_or(0, |last_end| last_end + 1);
2138 let mut range = self.random_byte_range(new_start, rng);
2139 if rng.gen_bool(0.2) {
2140 mem::swap(&mut range.start, &mut range.end);
2141 }
2142 last_end = Some(range.end);
2143
2144 let new_text_len = rng.gen_range(0..10);
2145 let new_text: String = RandomCharIter::new(&mut *rng).take(new_text_len).collect();
2146
2147 edits.push((range, new_text));
2148 }
2149 log::info!("mutating buffer {} with {:?}", self.replica_id(), edits);
2150 self.edit(edits, None, cx);
2151 }
2152
2153 pub fn randomly_undo_redo(&mut self, rng: &mut impl rand::Rng, cx: &mut ModelContext<Self>) {
2154 let was_dirty = self.is_dirty();
2155 let old_version = self.version.clone();
2156
2157 let ops = self.text.randomly_undo_redo(rng);
2158 if !ops.is_empty() {
2159 for op in ops {
2160 self.send_operation(Operation::Buffer(op), cx);
2161 self.did_edit(&old_version, was_dirty, cx);
2162 }
2163 }
2164 }
2165}
2166
2167impl EventEmitter<Event> for Buffer {}
2168
2169impl Deref for Buffer {
2170 type Target = TextBuffer;
2171
2172 fn deref(&self) -> &Self::Target {
2173 &self.text
2174 }
2175}
2176
2177impl BufferSnapshot {
2178 /// Returns [`IndentSize`] for a given line that respects user settings and /// language preferences.
2179 pub fn indent_size_for_line(&self, row: u32) -> IndentSize {
2180 indent_size_for_line(self, row)
2181 }
2182 /// Returns [`IndentSize`] for a given position that respects user settings
2183 /// and language preferences.
2184 pub fn language_indent_size_at<T: ToOffset>(&self, position: T, cx: &AppContext) -> IndentSize {
2185 let settings = language_settings(self.language_at(position), self.file(), cx);
2186 if settings.hard_tabs {
2187 IndentSize::tab()
2188 } else {
2189 IndentSize::spaces(settings.tab_size.get())
2190 }
2191 }
2192
2193 /// Retrieve the suggested indent size for all of the given rows. The unit of indentation
2194 /// is passed in as `single_indent_size`.
2195 pub fn suggested_indents(
2196 &self,
2197 rows: impl Iterator<Item = u32>,
2198 single_indent_size: IndentSize,
2199 ) -> BTreeMap<u32, IndentSize> {
2200 let mut result = BTreeMap::new();
2201
2202 for row_range in contiguous_ranges(rows, 10) {
2203 let suggestions = match self.suggest_autoindents(row_range.clone()) {
2204 Some(suggestions) => suggestions,
2205 _ => break,
2206 };
2207
2208 for (row, suggestion) in row_range.zip(suggestions) {
2209 let indent_size = if let Some(suggestion) = suggestion {
2210 result
2211 .get(&suggestion.basis_row)
2212 .copied()
2213 .unwrap_or_else(|| self.indent_size_for_line(suggestion.basis_row))
2214 .with_delta(suggestion.delta, single_indent_size)
2215 } else {
2216 self.indent_size_for_line(row)
2217 };
2218
2219 result.insert(row, indent_size);
2220 }
2221 }
2222
2223 result
2224 }
2225
2226 fn suggest_autoindents(
2227 &self,
2228 row_range: Range<u32>,
2229 ) -> Option<impl Iterator<Item = Option<IndentSuggestion>> + '_> {
2230 let config = &self.language.as_ref()?.config;
2231 let prev_non_blank_row = self.prev_non_blank_row(row_range.start);
2232
2233 // Find the suggested indentation ranges based on the syntax tree.
2234 let start = Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0);
2235 let end = Point::new(row_range.end, 0);
2236 let range = (start..end).to_offset(&self.text);
2237 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
2238 Some(&grammar.indents_config.as_ref()?.query)
2239 });
2240 let indent_configs = matches
2241 .grammars()
2242 .iter()
2243 .map(|grammar| grammar.indents_config.as_ref().unwrap())
2244 .collect::<Vec<_>>();
2245
2246 let mut indent_ranges = Vec::<Range<Point>>::new();
2247 let mut outdent_positions = Vec::<Point>::new();
2248 while let Some(mat) = matches.peek() {
2249 let mut start: Option<Point> = None;
2250 let mut end: Option<Point> = None;
2251
2252 let config = &indent_configs[mat.grammar_index];
2253 for capture in mat.captures {
2254 if capture.index == config.indent_capture_ix {
2255 start.get_or_insert(Point::from_ts_point(capture.node.start_position()));
2256 end.get_or_insert(Point::from_ts_point(capture.node.end_position()));
2257 } else if Some(capture.index) == config.start_capture_ix {
2258 start = Some(Point::from_ts_point(capture.node.end_position()));
2259 } else if Some(capture.index) == config.end_capture_ix {
2260 end = Some(Point::from_ts_point(capture.node.start_position()));
2261 } else if Some(capture.index) == config.outdent_capture_ix {
2262 outdent_positions.push(Point::from_ts_point(capture.node.start_position()));
2263 }
2264 }
2265
2266 matches.advance();
2267 if let Some((start, end)) = start.zip(end) {
2268 if start.row == end.row {
2269 continue;
2270 }
2271
2272 let range = start..end;
2273 match indent_ranges.binary_search_by_key(&range.start, |r| r.start) {
2274 Err(ix) => indent_ranges.insert(ix, range),
2275 Ok(ix) => {
2276 let prev_range = &mut indent_ranges[ix];
2277 prev_range.end = prev_range.end.max(range.end);
2278 }
2279 }
2280 }
2281 }
2282
2283 let mut error_ranges = Vec::<Range<Point>>::new();
2284 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
2285 Some(&grammar.error_query)
2286 });
2287 while let Some(mat) = matches.peek() {
2288 let node = mat.captures[0].node;
2289 let start = Point::from_ts_point(node.start_position());
2290 let end = Point::from_ts_point(node.end_position());
2291 let range = start..end;
2292 let ix = match error_ranges.binary_search_by_key(&range.start, |r| r.start) {
2293 Ok(ix) | Err(ix) => ix,
2294 };
2295 let mut end_ix = ix;
2296 while let Some(existing_range) = error_ranges.get(end_ix) {
2297 if existing_range.end < end {
2298 end_ix += 1;
2299 } else {
2300 break;
2301 }
2302 }
2303 error_ranges.splice(ix..end_ix, [range]);
2304 matches.advance();
2305 }
2306
2307 outdent_positions.sort();
2308 for outdent_position in outdent_positions {
2309 // find the innermost indent range containing this outdent_position
2310 // set its end to the outdent position
2311 if let Some(range_to_truncate) = indent_ranges
2312 .iter_mut()
2313 .filter(|indent_range| indent_range.contains(&outdent_position))
2314 .last()
2315 {
2316 range_to_truncate.end = outdent_position;
2317 }
2318 }
2319
2320 // Find the suggested indentation increases and decreased based on regexes.
2321 let mut indent_change_rows = Vec::<(u32, Ordering)>::new();
2322 self.for_each_line(
2323 Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0)
2324 ..Point::new(row_range.end, 0),
2325 |row, line| {
2326 if config
2327 .decrease_indent_pattern
2328 .as_ref()
2329 .map_or(false, |regex| regex.is_match(line))
2330 {
2331 indent_change_rows.push((row, Ordering::Less));
2332 }
2333 if config
2334 .increase_indent_pattern
2335 .as_ref()
2336 .map_or(false, |regex| regex.is_match(line))
2337 {
2338 indent_change_rows.push((row + 1, Ordering::Greater));
2339 }
2340 },
2341 );
2342
2343 let mut indent_changes = indent_change_rows.into_iter().peekable();
2344 let mut prev_row = if config.auto_indent_using_last_non_empty_line {
2345 prev_non_blank_row.unwrap_or(0)
2346 } else {
2347 row_range.start.saturating_sub(1)
2348 };
2349 let mut prev_row_start = Point::new(prev_row, self.indent_size_for_line(prev_row).len);
2350 Some(row_range.map(move |row| {
2351 let row_start = Point::new(row, self.indent_size_for_line(row).len);
2352
2353 let mut indent_from_prev_row = false;
2354 let mut outdent_from_prev_row = false;
2355 let mut outdent_to_row = u32::MAX;
2356
2357 while let Some((indent_row, delta)) = indent_changes.peek() {
2358 match indent_row.cmp(&row) {
2359 Ordering::Equal => match delta {
2360 Ordering::Less => outdent_from_prev_row = true,
2361 Ordering::Greater => indent_from_prev_row = true,
2362 _ => {}
2363 },
2364
2365 Ordering::Greater => break,
2366 Ordering::Less => {}
2367 }
2368
2369 indent_changes.next();
2370 }
2371
2372 for range in &indent_ranges {
2373 if range.start.row >= row {
2374 break;
2375 }
2376 if range.start.row == prev_row && range.end > row_start {
2377 indent_from_prev_row = true;
2378 }
2379 if range.end > prev_row_start && range.end <= row_start {
2380 outdent_to_row = outdent_to_row.min(range.start.row);
2381 }
2382 }
2383
2384 let within_error = error_ranges
2385 .iter()
2386 .any(|e| e.start.row < row && e.end > row_start);
2387
2388 let suggestion = if outdent_to_row == prev_row
2389 || (outdent_from_prev_row && indent_from_prev_row)
2390 {
2391 Some(IndentSuggestion {
2392 basis_row: prev_row,
2393 delta: Ordering::Equal,
2394 within_error,
2395 })
2396 } else if indent_from_prev_row {
2397 Some(IndentSuggestion {
2398 basis_row: prev_row,
2399 delta: Ordering::Greater,
2400 within_error,
2401 })
2402 } else if outdent_to_row < prev_row {
2403 Some(IndentSuggestion {
2404 basis_row: outdent_to_row,
2405 delta: Ordering::Equal,
2406 within_error,
2407 })
2408 } else if outdent_from_prev_row {
2409 Some(IndentSuggestion {
2410 basis_row: prev_row,
2411 delta: Ordering::Less,
2412 within_error,
2413 })
2414 } else if config.auto_indent_using_last_non_empty_line || !self.is_line_blank(prev_row)
2415 {
2416 Some(IndentSuggestion {
2417 basis_row: prev_row,
2418 delta: Ordering::Equal,
2419 within_error,
2420 })
2421 } else {
2422 None
2423 };
2424
2425 prev_row = row;
2426 prev_row_start = row_start;
2427 suggestion
2428 }))
2429 }
2430
2431 fn prev_non_blank_row(&self, mut row: u32) -> Option<u32> {
2432 while row > 0 {
2433 row -= 1;
2434 if !self.is_line_blank(row) {
2435 return Some(row);
2436 }
2437 }
2438 None
2439 }
2440
2441 /// Iterates over chunks of text in the given range of the buffer. Text is chunked
2442 /// in an arbitrary way due to being stored in a [`Rope`](text::Rope). The text is also
2443 /// returned in chunks where each chunk has a single syntax highlighting style and
2444 /// diagnostic status.
2445 pub fn chunks<T: ToOffset>(&self, range: Range<T>, language_aware: bool) -> BufferChunks {
2446 let range = range.start.to_offset(self)..range.end.to_offset(self);
2447
2448 let mut syntax = None;
2449 let mut diagnostic_endpoints = Vec::new();
2450 if language_aware {
2451 let captures = self.syntax.captures(range.clone(), &self.text, |grammar| {
2452 grammar.highlights_query.as_ref()
2453 });
2454 let highlight_maps = captures
2455 .grammars()
2456 .into_iter()
2457 .map(|grammar| grammar.highlight_map())
2458 .collect();
2459 syntax = Some((captures, highlight_maps));
2460 for entry in self.diagnostics_in_range::<_, usize>(range.clone(), false) {
2461 diagnostic_endpoints.push(DiagnosticEndpoint {
2462 offset: entry.range.start,
2463 is_start: true,
2464 severity: entry.diagnostic.severity,
2465 is_unnecessary: entry.diagnostic.is_unnecessary,
2466 });
2467 diagnostic_endpoints.push(DiagnosticEndpoint {
2468 offset: entry.range.end,
2469 is_start: false,
2470 severity: entry.diagnostic.severity,
2471 is_unnecessary: entry.diagnostic.is_unnecessary,
2472 });
2473 }
2474 diagnostic_endpoints
2475 .sort_unstable_by_key(|endpoint| (endpoint.offset, !endpoint.is_start));
2476 }
2477
2478 BufferChunks::new(self.text.as_rope(), range, syntax, diagnostic_endpoints)
2479 }
2480
2481 /// Invokes the given callback for each line of text in the given range of the buffer.
2482 /// Uses callback to avoid allocating a string for each line.
2483 fn for_each_line(&self, range: Range<Point>, mut callback: impl FnMut(u32, &str)) {
2484 let mut line = String::new();
2485 let mut row = range.start.row;
2486 for chunk in self
2487 .as_rope()
2488 .chunks_in_range(range.to_offset(self))
2489 .chain(["\n"])
2490 {
2491 for (newline_ix, text) in chunk.split('\n').enumerate() {
2492 if newline_ix > 0 {
2493 callback(row, &line);
2494 row += 1;
2495 line.clear();
2496 }
2497 line.push_str(text);
2498 }
2499 }
2500 }
2501
2502 /// Iterates over every [`SyntaxLayer`] in the buffer.
2503 pub fn syntax_layers(&self) -> impl Iterator<Item = SyntaxLayer> + '_ {
2504 self.syntax.layers_for_range(0..self.len(), &self.text)
2505 }
2506
2507 pub fn syntax_layer_at<D: ToOffset>(&self, position: D) -> Option<SyntaxLayer> {
2508 let offset = position.to_offset(self);
2509 self.syntax
2510 .layers_for_range(offset..offset, &self.text)
2511 .filter(|l| l.node().end_byte() > offset)
2512 .last()
2513 }
2514
2515 /// Returns the main [Language]
2516 pub fn language(&self) -> Option<&Arc<Language>> {
2517 self.language.as_ref()
2518 }
2519
2520 /// Returns the [Language] at the given location.
2521 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<&Arc<Language>> {
2522 self.syntax_layer_at(position)
2523 .map(|info| info.language)
2524 .or(self.language.as_ref())
2525 }
2526
2527 /// Returns the settings for the language at the given location.
2528 pub fn settings_at<'a, D: ToOffset>(
2529 &self,
2530 position: D,
2531 cx: &'a AppContext,
2532 ) -> &'a LanguageSettings {
2533 language_settings(self.language_at(position), self.file.as_ref(), cx)
2534 }
2535
2536 /// Returns the [LanguageScope] at the given location.
2537 pub fn language_scope_at<D: ToOffset>(&self, position: D) -> Option<LanguageScope> {
2538 let offset = position.to_offset(self);
2539 let mut scope = None;
2540 let mut smallest_range: Option<Range<usize>> = None;
2541
2542 // Use the layer that has the smallest node intersecting the given point.
2543 for layer in self.syntax.layers_for_range(offset..offset, &self.text) {
2544 let mut cursor = layer.node().walk();
2545
2546 let mut range = None;
2547 loop {
2548 let child_range = cursor.node().byte_range();
2549 if !child_range.to_inclusive().contains(&offset) {
2550 break;
2551 }
2552
2553 range = Some(child_range);
2554 if cursor.goto_first_child_for_byte(offset).is_none() {
2555 break;
2556 }
2557 }
2558
2559 if let Some(range) = range {
2560 if smallest_range
2561 .as_ref()
2562 .map_or(true, |smallest_range| range.len() < smallest_range.len())
2563 {
2564 smallest_range = Some(range);
2565 scope = Some(LanguageScope {
2566 language: layer.language.clone(),
2567 override_id: layer.override_id(offset, &self.text),
2568 });
2569 }
2570 }
2571 }
2572
2573 scope.or_else(|| {
2574 self.language.clone().map(|language| LanguageScope {
2575 language,
2576 override_id: None,
2577 })
2578 })
2579 }
2580
2581 /// Returns a tuple of the range and character kind of the word
2582 /// surrounding the given position.
2583 pub fn surrounding_word<T: ToOffset>(&self, start: T) -> (Range<usize>, Option<CharKind>) {
2584 let mut start = start.to_offset(self);
2585 let mut end = start;
2586 let mut next_chars = self.chars_at(start).peekable();
2587 let mut prev_chars = self.reversed_chars_at(start).peekable();
2588
2589 let scope = self.language_scope_at(start);
2590 let kind = |c| char_kind(&scope, c);
2591 let word_kind = cmp::max(
2592 prev_chars.peek().copied().map(kind),
2593 next_chars.peek().copied().map(kind),
2594 );
2595
2596 for ch in prev_chars {
2597 if Some(kind(ch)) == word_kind && ch != '\n' {
2598 start -= ch.len_utf8();
2599 } else {
2600 break;
2601 }
2602 }
2603
2604 for ch in next_chars {
2605 if Some(kind(ch)) == word_kind && ch != '\n' {
2606 end += ch.len_utf8();
2607 } else {
2608 break;
2609 }
2610 }
2611
2612 (start..end, word_kind)
2613 }
2614
2615 /// Returns the range for the closes syntax node enclosing the given range.
2616 pub fn range_for_syntax_ancestor<T: ToOffset>(&self, range: Range<T>) -> Option<Range<usize>> {
2617 let range = range.start.to_offset(self)..range.end.to_offset(self);
2618 let mut result: Option<Range<usize>> = None;
2619 'outer: for layer in self.syntax.layers_for_range(range.clone(), &self.text) {
2620 let mut cursor = layer.node().walk();
2621
2622 // Descend to the first leaf that touches the start of the range,
2623 // and if the range is non-empty, extends beyond the start.
2624 while cursor.goto_first_child_for_byte(range.start).is_some() {
2625 if !range.is_empty() && cursor.node().end_byte() == range.start {
2626 cursor.goto_next_sibling();
2627 }
2628 }
2629
2630 // Ascend to the smallest ancestor that strictly contains the range.
2631 loop {
2632 let node_range = cursor.node().byte_range();
2633 if node_range.start <= range.start
2634 && node_range.end >= range.end
2635 && node_range.len() > range.len()
2636 {
2637 break;
2638 }
2639 if !cursor.goto_parent() {
2640 continue 'outer;
2641 }
2642 }
2643
2644 let left_node = cursor.node();
2645 let mut layer_result = left_node.byte_range();
2646
2647 // For an empty range, try to find another node immediately to the right of the range.
2648 if left_node.end_byte() == range.start {
2649 let mut right_node = None;
2650 while !cursor.goto_next_sibling() {
2651 if !cursor.goto_parent() {
2652 break;
2653 }
2654 }
2655
2656 while cursor.node().start_byte() == range.start {
2657 right_node = Some(cursor.node());
2658 if !cursor.goto_first_child() {
2659 break;
2660 }
2661 }
2662
2663 // If there is a candidate node on both sides of the (empty) range, then
2664 // decide between the two by favoring a named node over an anonymous token.
2665 // If both nodes are the same in that regard, favor the right one.
2666 if let Some(right_node) = right_node {
2667 if right_node.is_named() || !left_node.is_named() {
2668 layer_result = right_node.byte_range();
2669 }
2670 }
2671 }
2672
2673 if let Some(previous_result) = &result {
2674 if previous_result.len() < layer_result.len() {
2675 continue;
2676 }
2677 }
2678 result = Some(layer_result);
2679 }
2680
2681 result
2682 }
2683
2684 /// Returns the outline for the buffer.
2685 ///
2686 /// This method allows passing an optional [SyntaxTheme] to
2687 /// syntax-highlight the returned symbols.
2688 pub fn outline(&self, theme: Option<&SyntaxTheme>) -> Option<Outline<Anchor>> {
2689 self.outline_items_containing(0..self.len(), true, theme)
2690 .map(Outline::new)
2691 }
2692
2693 /// Returns all the symbols that contain the given position.
2694 ///
2695 /// This method allows passing an optional [SyntaxTheme] to
2696 /// syntax-highlight the returned symbols.
2697 pub fn symbols_containing<T: ToOffset>(
2698 &self,
2699 position: T,
2700 theme: Option<&SyntaxTheme>,
2701 ) -> Option<Vec<OutlineItem<Anchor>>> {
2702 let position = position.to_offset(self);
2703 let mut items = self.outline_items_containing(
2704 position.saturating_sub(1)..self.len().min(position + 1),
2705 false,
2706 theme,
2707 )?;
2708 let mut prev_depth = None;
2709 items.retain(|item| {
2710 let result = prev_depth.map_or(true, |prev_depth| item.depth > prev_depth);
2711 prev_depth = Some(item.depth);
2712 result
2713 });
2714 Some(items)
2715 }
2716
2717 fn outline_items_containing(
2718 &self,
2719 range: Range<usize>,
2720 include_extra_context: bool,
2721 theme: Option<&SyntaxTheme>,
2722 ) -> Option<Vec<OutlineItem<Anchor>>> {
2723 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
2724 grammar.outline_config.as_ref().map(|c| &c.query)
2725 });
2726 let configs = matches
2727 .grammars()
2728 .iter()
2729 .map(|g| g.outline_config.as_ref().unwrap())
2730 .collect::<Vec<_>>();
2731
2732 let mut stack = Vec::<Range<usize>>::new();
2733 let mut items = Vec::new();
2734 while let Some(mat) = matches.peek() {
2735 let config = &configs[mat.grammar_index];
2736 let item_node = mat.captures.iter().find_map(|cap| {
2737 if cap.index == config.item_capture_ix {
2738 Some(cap.node)
2739 } else {
2740 None
2741 }
2742 })?;
2743
2744 let item_range = item_node.byte_range();
2745 if item_range.end < range.start || item_range.start > range.end {
2746 matches.advance();
2747 continue;
2748 }
2749
2750 let mut buffer_ranges = Vec::new();
2751 for capture in mat.captures {
2752 let node_is_name;
2753 if capture.index == config.name_capture_ix {
2754 node_is_name = true;
2755 } else if Some(capture.index) == config.context_capture_ix
2756 || (Some(capture.index) == config.extra_context_capture_ix
2757 && include_extra_context)
2758 {
2759 node_is_name = false;
2760 } else {
2761 continue;
2762 }
2763
2764 let mut range = capture.node.start_byte()..capture.node.end_byte();
2765 let start = capture.node.start_position();
2766 if capture.node.end_position().row > start.row {
2767 range.end =
2768 range.start + self.line_len(start.row as u32) as usize - start.column;
2769 }
2770
2771 if !range.is_empty() {
2772 buffer_ranges.push((range, node_is_name));
2773 }
2774 }
2775
2776 if buffer_ranges.is_empty() {
2777 matches.advance();
2778 continue;
2779 }
2780
2781 let mut text = String::new();
2782 let mut highlight_ranges = Vec::new();
2783 let mut name_ranges = Vec::new();
2784 let mut chunks = self.chunks(
2785 buffer_ranges.first().unwrap().0.start..buffer_ranges.last().unwrap().0.end,
2786 true,
2787 );
2788 let mut last_buffer_range_end = 0;
2789 for (buffer_range, is_name) in buffer_ranges {
2790 if !text.is_empty() && buffer_range.start > last_buffer_range_end {
2791 text.push(' ');
2792 }
2793 last_buffer_range_end = buffer_range.end;
2794 if is_name {
2795 let mut start = text.len();
2796 let end = start + buffer_range.len();
2797
2798 // When multiple names are captured, then the matcheable text
2799 // includes the whitespace in between the names.
2800 if !name_ranges.is_empty() {
2801 start -= 1;
2802 }
2803
2804 name_ranges.push(start..end);
2805 }
2806
2807 let mut offset = buffer_range.start;
2808 chunks.seek(offset);
2809 for mut chunk in chunks.by_ref() {
2810 if chunk.text.len() > buffer_range.end - offset {
2811 chunk.text = &chunk.text[0..(buffer_range.end - offset)];
2812 offset = buffer_range.end;
2813 } else {
2814 offset += chunk.text.len();
2815 }
2816 let style = chunk
2817 .syntax_highlight_id
2818 .zip(theme)
2819 .and_then(|(highlight, theme)| highlight.style(theme));
2820 if let Some(style) = style {
2821 let start = text.len();
2822 let end = start + chunk.text.len();
2823 highlight_ranges.push((start..end, style));
2824 }
2825 text.push_str(chunk.text);
2826 if offset >= buffer_range.end {
2827 break;
2828 }
2829 }
2830 }
2831
2832 matches.advance();
2833 while stack.last().map_or(false, |prev_range| {
2834 prev_range.start > item_range.start || prev_range.end < item_range.end
2835 }) {
2836 stack.pop();
2837 }
2838 stack.push(item_range.clone());
2839
2840 items.push(OutlineItem {
2841 depth: stack.len() - 1,
2842 range: self.anchor_after(item_range.start)..self.anchor_before(item_range.end),
2843 text,
2844 highlight_ranges,
2845 name_ranges,
2846 })
2847 }
2848 Some(items)
2849 }
2850
2851 /// For each grammar in the language, runs the provided
2852 /// [tree_sitter::Query] against the given range.
2853 pub fn matches(
2854 &self,
2855 range: Range<usize>,
2856 query: fn(&Grammar) -> Option<&tree_sitter::Query>,
2857 ) -> SyntaxMapMatches {
2858 self.syntax.matches(range, self, query)
2859 }
2860
2861 /// Returns bracket range pairs overlapping or adjacent to `range`
2862 pub fn bracket_ranges<T: ToOffset>(
2863 &self,
2864 range: Range<T>,
2865 ) -> impl Iterator<Item = (Range<usize>, Range<usize>)> + '_ {
2866 // Find bracket pairs that *inclusively* contain the given range.
2867 let range = range.start.to_offset(self).saturating_sub(1)
2868 ..self.len().min(range.end.to_offset(self) + 1);
2869
2870 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
2871 grammar.brackets_config.as_ref().map(|c| &c.query)
2872 });
2873 let configs = matches
2874 .grammars()
2875 .iter()
2876 .map(|grammar| grammar.brackets_config.as_ref().unwrap())
2877 .collect::<Vec<_>>();
2878
2879 iter::from_fn(move || {
2880 while let Some(mat) = matches.peek() {
2881 let mut open = None;
2882 let mut close = None;
2883 let config = &configs[mat.grammar_index];
2884 for capture in mat.captures {
2885 if capture.index == config.open_capture_ix {
2886 open = Some(capture.node.byte_range());
2887 } else if capture.index == config.close_capture_ix {
2888 close = Some(capture.node.byte_range());
2889 }
2890 }
2891
2892 matches.advance();
2893
2894 let Some((open, close)) = open.zip(close) else {
2895 continue;
2896 };
2897
2898 let bracket_range = open.start..=close.end;
2899 if !bracket_range.overlaps(&range) {
2900 continue;
2901 }
2902
2903 return Some((open, close));
2904 }
2905 None
2906 })
2907 }
2908
2909 /// Returns enclosing bracket ranges containing the given range
2910 pub fn enclosing_bracket_ranges<T: ToOffset>(
2911 &self,
2912 range: Range<T>,
2913 ) -> impl Iterator<Item = (Range<usize>, Range<usize>)> + '_ {
2914 let range = range.start.to_offset(self)..range.end.to_offset(self);
2915
2916 self.bracket_ranges(range.clone())
2917 .filter(move |(open, close)| open.start <= range.start && close.end >= range.end)
2918 }
2919
2920 /// Returns the smallest enclosing bracket ranges containing the given range or None if no brackets contain range
2921 ///
2922 /// Can optionally pass a range_filter to filter the ranges of brackets to consider
2923 pub fn innermost_enclosing_bracket_ranges<T: ToOffset>(
2924 &self,
2925 range: Range<T>,
2926 range_filter: Option<&dyn Fn(Range<usize>, Range<usize>) -> bool>,
2927 ) -> Option<(Range<usize>, Range<usize>)> {
2928 let range = range.start.to_offset(self)..range.end.to_offset(self);
2929
2930 // Get the ranges of the innermost pair of brackets.
2931 let mut result: Option<(Range<usize>, Range<usize>)> = None;
2932
2933 for (open, close) in self.enclosing_bracket_ranges(range.clone()) {
2934 if let Some(range_filter) = range_filter {
2935 if !range_filter(open.clone(), close.clone()) {
2936 continue;
2937 }
2938 }
2939
2940 let len = close.end - open.start;
2941
2942 if let Some((existing_open, existing_close)) = &result {
2943 let existing_len = existing_close.end - existing_open.start;
2944 if len > existing_len {
2945 continue;
2946 }
2947 }
2948
2949 result = Some((open, close));
2950 }
2951
2952 result
2953 }
2954
2955 /// Returns anchor ranges for any matches of the redaction query.
2956 /// The buffer can be associated with multiple languages, and the redaction query associated with each
2957 /// will be run on the relevant section of the buffer.
2958 pub fn redacted_ranges<T: ToOffset>(
2959 &self,
2960 range: Range<T>,
2961 ) -> impl Iterator<Item = Range<usize>> + '_ {
2962 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
2963 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
2964 grammar
2965 .redactions_config
2966 .as_ref()
2967 .map(|config| &config.query)
2968 });
2969
2970 let configs = syntax_matches
2971 .grammars()
2972 .iter()
2973 .map(|grammar| grammar.redactions_config.as_ref())
2974 .collect::<Vec<_>>();
2975
2976 iter::from_fn(move || {
2977 let redacted_range = syntax_matches
2978 .peek()
2979 .and_then(|mat| {
2980 configs[mat.grammar_index].and_then(|config| {
2981 mat.captures
2982 .iter()
2983 .find(|capture| capture.index == config.redaction_capture_ix)
2984 })
2985 })
2986 .map(|mat| mat.node.byte_range());
2987 syntax_matches.advance();
2988 redacted_range
2989 })
2990 }
2991
2992 pub fn runnable_ranges(
2993 &self,
2994 range: Range<Anchor>,
2995 ) -> impl Iterator<Item = RunnableRange> + '_ {
2996 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
2997
2998 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
2999 grammar.runnable_config.as_ref().map(|config| &config.query)
3000 });
3001
3002 let test_configs = syntax_matches
3003 .grammars()
3004 .iter()
3005 .map(|grammar| grammar.runnable_config.as_ref())
3006 .collect::<Vec<_>>();
3007
3008 iter::from_fn(move || loop {
3009 let mat = syntax_matches.peek()?;
3010 let test_range = test_configs[mat.grammar_index].and_then(|test_configs| {
3011 let mut tags: SmallVec<[(Range<usize>, RunnableTag); 1]> =
3012 SmallVec::from_iter(mat.captures.iter().filter_map(|capture| {
3013 test_configs
3014 .runnable_tags
3015 .get(&capture.index)
3016 .cloned()
3017 .map(|tag_name| (capture.node.byte_range(), tag_name))
3018 }));
3019 let maximum_range = tags
3020 .iter()
3021 .max_by_key(|(byte_range, _)| byte_range.len())
3022 .map(|(range, _)| range)?
3023 .clone();
3024 tags.sort_by_key(|(range, _)| range == &maximum_range);
3025 let split_point = tags.partition_point(|(range, _)| range != &maximum_range);
3026 let (extra_captures, tags) = tags.split_at(split_point);
3027 let extra_captures = extra_captures
3028 .into_iter()
3029 .map(|(range, name)| {
3030 (
3031 name.0.to_string(),
3032 self.text_for_range(range.clone()).collect::<String>(),
3033 )
3034 })
3035 .collect();
3036 Some(RunnableRange {
3037 run_range: mat
3038 .captures
3039 .iter()
3040 .find(|capture| capture.index == test_configs.run_capture_ix)
3041 .map(|mat| mat.node.byte_range())?,
3042 runnable: Runnable {
3043 tags: tags.into_iter().cloned().map(|(_, tag)| tag).collect(),
3044 language: mat.language,
3045 buffer: self.remote_id(),
3046 },
3047 extra_captures,
3048 buffer_id: self.remote_id(),
3049 })
3050 });
3051
3052 syntax_matches.advance();
3053 if test_range.is_some() {
3054 // It's fine for us to short-circuit on .peek()? returning None. We don't want to return None from this iter if we
3055 // had a capture that did not contain a run marker, hence we'll just loop around for the next capture.
3056 return test_range;
3057 }
3058 })
3059 }
3060
3061 /// Returns selections for remote peers intersecting the given range.
3062 #[allow(clippy::type_complexity)]
3063 pub fn remote_selections_in_range(
3064 &self,
3065 range: Range<Anchor>,
3066 ) -> impl Iterator<
3067 Item = (
3068 ReplicaId,
3069 bool,
3070 CursorShape,
3071 impl Iterator<Item = &Selection<Anchor>> + '_,
3072 ),
3073 > + '_ {
3074 self.remote_selections
3075 .iter()
3076 .filter(|(replica_id, set)| {
3077 **replica_id != self.text.replica_id() && !set.selections.is_empty()
3078 })
3079 .map(move |(replica_id, set)| {
3080 let start_ix = match set.selections.binary_search_by(|probe| {
3081 probe.end.cmp(&range.start, self).then(Ordering::Greater)
3082 }) {
3083 Ok(ix) | Err(ix) => ix,
3084 };
3085 let end_ix = match set.selections.binary_search_by(|probe| {
3086 probe.start.cmp(&range.end, self).then(Ordering::Less)
3087 }) {
3088 Ok(ix) | Err(ix) => ix,
3089 };
3090
3091 (
3092 *replica_id,
3093 set.line_mode,
3094 set.cursor_shape,
3095 set.selections[start_ix..end_ix].iter(),
3096 )
3097 })
3098 }
3099
3100 /// Whether the buffer contains any git changes.
3101 pub fn has_git_diff(&self) -> bool {
3102 !self.git_diff.is_empty()
3103 }
3104
3105 /// Returns all the Git diff hunks intersecting the given
3106 /// row range.
3107 pub fn git_diff_hunks_in_row_range(
3108 &self,
3109 range: Range<BufferRow>,
3110 ) -> impl '_ + Iterator<Item = git::diff::DiffHunk<u32>> {
3111 self.git_diff.hunks_in_row_range(range, self)
3112 }
3113
3114 /// Returns all the Git diff hunks intersecting the given
3115 /// range.
3116 pub fn git_diff_hunks_intersecting_range(
3117 &self,
3118 range: Range<Anchor>,
3119 ) -> impl '_ + Iterator<Item = git::diff::DiffHunk<u32>> {
3120 self.git_diff.hunks_intersecting_range(range, self)
3121 }
3122
3123 /// Returns all the Git diff hunks intersecting the given
3124 /// range, in reverse order.
3125 pub fn git_diff_hunks_intersecting_range_rev(
3126 &self,
3127 range: Range<Anchor>,
3128 ) -> impl '_ + Iterator<Item = git::diff::DiffHunk<u32>> {
3129 self.git_diff.hunks_intersecting_range_rev(range, self)
3130 }
3131
3132 /// Returns if the buffer contains any diagnostics.
3133 pub fn has_diagnostics(&self) -> bool {
3134 !self.diagnostics.is_empty()
3135 }
3136
3137 /// Returns all the diagnostics intersecting the given range.
3138 pub fn diagnostics_in_range<'a, T, O>(
3139 &'a self,
3140 search_range: Range<T>,
3141 reversed: bool,
3142 ) -> impl 'a + Iterator<Item = DiagnosticEntry<O>>
3143 where
3144 T: 'a + Clone + ToOffset,
3145 O: 'a + FromAnchor + Ord,
3146 {
3147 let mut iterators: Vec<_> = self
3148 .diagnostics
3149 .iter()
3150 .map(|(_, collection)| {
3151 collection
3152 .range::<T, O>(search_range.clone(), self, true, reversed)
3153 .peekable()
3154 })
3155 .collect();
3156
3157 std::iter::from_fn(move || {
3158 let (next_ix, _) = iterators
3159 .iter_mut()
3160 .enumerate()
3161 .flat_map(|(ix, iter)| Some((ix, iter.peek()?)))
3162 .min_by(|(_, a), (_, b)| {
3163 let cmp = a
3164 .range
3165 .start
3166 .cmp(&b.range.start)
3167 // when range is equal, sort by diagnostic severity
3168 .then(a.diagnostic.severity.cmp(&b.diagnostic.severity))
3169 // and stabilize order with group_id
3170 .then(a.diagnostic.group_id.cmp(&b.diagnostic.group_id));
3171 if reversed {
3172 cmp.reverse()
3173 } else {
3174 cmp
3175 }
3176 })?;
3177 iterators[next_ix].next()
3178 })
3179 }
3180
3181 /// Returns all the diagnostic groups associated with the given
3182 /// language server id. If no language server id is provided,
3183 /// all diagnostics groups are returned.
3184 pub fn diagnostic_groups(
3185 &self,
3186 language_server_id: Option<LanguageServerId>,
3187 ) -> Vec<(LanguageServerId, DiagnosticGroup<Anchor>)> {
3188 let mut groups = Vec::new();
3189
3190 if let Some(language_server_id) = language_server_id {
3191 if let Ok(ix) = self
3192 .diagnostics
3193 .binary_search_by_key(&language_server_id, |e| e.0)
3194 {
3195 self.diagnostics[ix]
3196 .1
3197 .groups(language_server_id, &mut groups, self);
3198 }
3199 } else {
3200 for (language_server_id, diagnostics) in self.diagnostics.iter() {
3201 diagnostics.groups(*language_server_id, &mut groups, self);
3202 }
3203 }
3204
3205 groups.sort_by(|(id_a, group_a), (id_b, group_b)| {
3206 let a_start = &group_a.entries[group_a.primary_ix].range.start;
3207 let b_start = &group_b.entries[group_b.primary_ix].range.start;
3208 a_start.cmp(b_start, self).then_with(|| id_a.cmp(id_b))
3209 });
3210
3211 groups
3212 }
3213
3214 /// Returns an iterator over the diagnostics for the given group.
3215 pub fn diagnostic_group<'a, O>(
3216 &'a self,
3217 group_id: usize,
3218 ) -> impl 'a + Iterator<Item = DiagnosticEntry<O>>
3219 where
3220 O: 'a + FromAnchor,
3221 {
3222 self.diagnostics
3223 .iter()
3224 .flat_map(move |(_, set)| set.group(group_id, self))
3225 }
3226
3227 /// The number of times diagnostics were updated.
3228 pub fn diagnostics_update_count(&self) -> usize {
3229 self.diagnostics_update_count
3230 }
3231
3232 /// The number of times the buffer was parsed.
3233 pub fn parse_count(&self) -> usize {
3234 self.parse_count
3235 }
3236
3237 /// The number of times selections were updated.
3238 pub fn selections_update_count(&self) -> usize {
3239 self.selections_update_count
3240 }
3241
3242 /// Returns a snapshot of underlying file.
3243 pub fn file(&self) -> Option<&Arc<dyn File>> {
3244 self.file.as_ref()
3245 }
3246
3247 /// Resolves the file path (relative to the worktree root) associated with the underlying file.
3248 pub fn resolve_file_path(&self, cx: &AppContext, include_root: bool) -> Option<PathBuf> {
3249 if let Some(file) = self.file() {
3250 if file.path().file_name().is_none() || include_root {
3251 Some(file.full_path(cx))
3252 } else {
3253 Some(file.path().to_path_buf())
3254 }
3255 } else {
3256 None
3257 }
3258 }
3259
3260 /// The number of times the underlying file was updated.
3261 pub fn file_update_count(&self) -> usize {
3262 self.file_update_count
3263 }
3264
3265 /// The number of times the git diff status was updated.
3266 pub fn git_diff_update_count(&self) -> usize {
3267 self.git_diff_update_count
3268 }
3269}
3270
3271fn indent_size_for_line(text: &text::BufferSnapshot, row: u32) -> IndentSize {
3272 indent_size_for_text(text.chars_at(Point::new(row, 0)))
3273}
3274
3275fn indent_size_for_text(text: impl Iterator<Item = char>) -> IndentSize {
3276 let mut result = IndentSize::spaces(0);
3277 for c in text {
3278 let kind = match c {
3279 ' ' => IndentKind::Space,
3280 '\t' => IndentKind::Tab,
3281 _ => break,
3282 };
3283 if result.len == 0 {
3284 result.kind = kind;
3285 }
3286 result.len += 1;
3287 }
3288 result
3289}
3290
3291impl Clone for BufferSnapshot {
3292 fn clone(&self) -> Self {
3293 Self {
3294 text: self.text.clone(),
3295 git_diff: self.git_diff.clone(),
3296 syntax: self.syntax.clone(),
3297 file: self.file.clone(),
3298 remote_selections: self.remote_selections.clone(),
3299 diagnostics: self.diagnostics.clone(),
3300 selections_update_count: self.selections_update_count,
3301 diagnostics_update_count: self.diagnostics_update_count,
3302 file_update_count: self.file_update_count,
3303 git_diff_update_count: self.git_diff_update_count,
3304 language: self.language.clone(),
3305 parse_count: self.parse_count,
3306 }
3307 }
3308}
3309
3310impl Deref for BufferSnapshot {
3311 type Target = text::BufferSnapshot;
3312
3313 fn deref(&self) -> &Self::Target {
3314 &self.text
3315 }
3316}
3317
3318unsafe impl<'a> Send for BufferChunks<'a> {}
3319
3320impl<'a> BufferChunks<'a> {
3321 pub(crate) fn new(
3322 text: &'a Rope,
3323 range: Range<usize>,
3324 syntax: Option<(SyntaxMapCaptures<'a>, Vec<HighlightMap>)>,
3325 diagnostic_endpoints: Vec<DiagnosticEndpoint>,
3326 ) -> Self {
3327 let mut highlights = None;
3328 if let Some((captures, highlight_maps)) = syntax {
3329 highlights = Some(BufferChunkHighlights {
3330 captures,
3331 next_capture: None,
3332 stack: Default::default(),
3333 highlight_maps,
3334 })
3335 }
3336
3337 let diagnostic_endpoints = diagnostic_endpoints.into_iter().peekable();
3338 let chunks = text.chunks_in_range(range.clone());
3339
3340 BufferChunks {
3341 range,
3342 chunks,
3343 diagnostic_endpoints,
3344 error_depth: 0,
3345 warning_depth: 0,
3346 information_depth: 0,
3347 hint_depth: 0,
3348 unnecessary_depth: 0,
3349 highlights,
3350 }
3351 }
3352
3353 /// Seeks to the given byte offset in the buffer.
3354 pub fn seek(&mut self, offset: usize) {
3355 self.range.start = offset;
3356 self.chunks.seek(self.range.start);
3357 if let Some(highlights) = self.highlights.as_mut() {
3358 highlights
3359 .stack
3360 .retain(|(end_offset, _)| *end_offset > offset);
3361 if let Some(capture) = &highlights.next_capture {
3362 if offset >= capture.node.start_byte() {
3363 let next_capture_end = capture.node.end_byte();
3364 if offset < next_capture_end {
3365 highlights.stack.push((
3366 next_capture_end,
3367 highlights.highlight_maps[capture.grammar_index].get(capture.index),
3368 ));
3369 }
3370 highlights.next_capture.take();
3371 }
3372 }
3373 highlights.captures.set_byte_range(self.range.clone());
3374 }
3375 }
3376
3377 /// The current byte offset in the buffer.
3378 pub fn offset(&self) -> usize {
3379 self.range.start
3380 }
3381
3382 fn update_diagnostic_depths(&mut self, endpoint: DiagnosticEndpoint) {
3383 let depth = match endpoint.severity {
3384 DiagnosticSeverity::ERROR => &mut self.error_depth,
3385 DiagnosticSeverity::WARNING => &mut self.warning_depth,
3386 DiagnosticSeverity::INFORMATION => &mut self.information_depth,
3387 DiagnosticSeverity::HINT => &mut self.hint_depth,
3388 _ => return,
3389 };
3390 if endpoint.is_start {
3391 *depth += 1;
3392 } else {
3393 *depth -= 1;
3394 }
3395
3396 if endpoint.is_unnecessary {
3397 if endpoint.is_start {
3398 self.unnecessary_depth += 1;
3399 } else {
3400 self.unnecessary_depth -= 1;
3401 }
3402 }
3403 }
3404
3405 fn current_diagnostic_severity(&self) -> Option<DiagnosticSeverity> {
3406 if self.error_depth > 0 {
3407 Some(DiagnosticSeverity::ERROR)
3408 } else if self.warning_depth > 0 {
3409 Some(DiagnosticSeverity::WARNING)
3410 } else if self.information_depth > 0 {
3411 Some(DiagnosticSeverity::INFORMATION)
3412 } else if self.hint_depth > 0 {
3413 Some(DiagnosticSeverity::HINT)
3414 } else {
3415 None
3416 }
3417 }
3418
3419 fn current_code_is_unnecessary(&self) -> bool {
3420 self.unnecessary_depth > 0
3421 }
3422}
3423
3424impl<'a> Iterator for BufferChunks<'a> {
3425 type Item = Chunk<'a>;
3426
3427 fn next(&mut self) -> Option<Self::Item> {
3428 let mut next_capture_start = usize::MAX;
3429 let mut next_diagnostic_endpoint = usize::MAX;
3430
3431 if let Some(highlights) = self.highlights.as_mut() {
3432 while let Some((parent_capture_end, _)) = highlights.stack.last() {
3433 if *parent_capture_end <= self.range.start {
3434 highlights.stack.pop();
3435 } else {
3436 break;
3437 }
3438 }
3439
3440 if highlights.next_capture.is_none() {
3441 highlights.next_capture = highlights.captures.next();
3442 }
3443
3444 while let Some(capture) = highlights.next_capture.as_ref() {
3445 if self.range.start < capture.node.start_byte() {
3446 next_capture_start = capture.node.start_byte();
3447 break;
3448 } else {
3449 let highlight_id =
3450 highlights.highlight_maps[capture.grammar_index].get(capture.index);
3451 highlights
3452 .stack
3453 .push((capture.node.end_byte(), highlight_id));
3454 highlights.next_capture = highlights.captures.next();
3455 }
3456 }
3457 }
3458
3459 while let Some(endpoint) = self.diagnostic_endpoints.peek().copied() {
3460 if endpoint.offset <= self.range.start {
3461 self.update_diagnostic_depths(endpoint);
3462 self.diagnostic_endpoints.next();
3463 } else {
3464 next_diagnostic_endpoint = endpoint.offset;
3465 break;
3466 }
3467 }
3468
3469 if let Some(chunk) = self.chunks.peek() {
3470 let chunk_start = self.range.start;
3471 let mut chunk_end = (self.chunks.offset() + chunk.len())
3472 .min(next_capture_start)
3473 .min(next_diagnostic_endpoint);
3474 let mut highlight_id = None;
3475 if let Some(highlights) = self.highlights.as_ref() {
3476 if let Some((parent_capture_end, parent_highlight_id)) = highlights.stack.last() {
3477 chunk_end = chunk_end.min(*parent_capture_end);
3478 highlight_id = Some(*parent_highlight_id);
3479 }
3480 }
3481
3482 let slice =
3483 &chunk[chunk_start - self.chunks.offset()..chunk_end - self.chunks.offset()];
3484 self.range.start = chunk_end;
3485 if self.range.start == self.chunks.offset() + chunk.len() {
3486 self.chunks.next().unwrap();
3487 }
3488
3489 Some(Chunk {
3490 text: slice,
3491 syntax_highlight_id: highlight_id,
3492 diagnostic_severity: self.current_diagnostic_severity(),
3493 is_unnecessary: self.current_code_is_unnecessary(),
3494 ..Default::default()
3495 })
3496 } else {
3497 None
3498 }
3499 }
3500}
3501
3502impl operation_queue::Operation for Operation {
3503 fn lamport_timestamp(&self) -> clock::Lamport {
3504 match self {
3505 Operation::Buffer(_) => {
3506 unreachable!("buffer operations should never be deferred at this layer")
3507 }
3508 Operation::UpdateDiagnostics {
3509 lamport_timestamp, ..
3510 }
3511 | Operation::UpdateSelections {
3512 lamport_timestamp, ..
3513 }
3514 | Operation::UpdateCompletionTriggers {
3515 lamport_timestamp, ..
3516 } => *lamport_timestamp,
3517 }
3518 }
3519}
3520
3521impl Default for Diagnostic {
3522 fn default() -> Self {
3523 Self {
3524 source: Default::default(),
3525 code: None,
3526 severity: DiagnosticSeverity::ERROR,
3527 message: Default::default(),
3528 group_id: 0,
3529 is_primary: false,
3530 is_disk_based: false,
3531 is_unnecessary: false,
3532 }
3533 }
3534}
3535
3536impl IndentSize {
3537 /// Returns an [IndentSize] representing the given spaces.
3538 pub fn spaces(len: u32) -> Self {
3539 Self {
3540 len,
3541 kind: IndentKind::Space,
3542 }
3543 }
3544
3545 /// Returns an [IndentSize] representing a tab.
3546 pub fn tab() -> Self {
3547 Self {
3548 len: 1,
3549 kind: IndentKind::Tab,
3550 }
3551 }
3552
3553 /// An iterator over the characters represented by this [IndentSize].
3554 pub fn chars(&self) -> impl Iterator<Item = char> {
3555 iter::repeat(self.char()).take(self.len as usize)
3556 }
3557
3558 /// The character representation of this [IndentSize].
3559 pub fn char(&self) -> char {
3560 match self.kind {
3561 IndentKind::Space => ' ',
3562 IndentKind::Tab => '\t',
3563 }
3564 }
3565
3566 /// Consumes the current [IndentSize] and returns a new one that has
3567 /// been shrunk or enlarged by the given size along the given direction.
3568 pub fn with_delta(mut self, direction: Ordering, size: IndentSize) -> Self {
3569 match direction {
3570 Ordering::Less => {
3571 if self.kind == size.kind && self.len >= size.len {
3572 self.len -= size.len;
3573 }
3574 }
3575 Ordering::Equal => {}
3576 Ordering::Greater => {
3577 if self.len == 0 {
3578 self = size;
3579 } else if self.kind == size.kind {
3580 self.len += size.len;
3581 }
3582 }
3583 }
3584 self
3585 }
3586}
3587
3588#[cfg(any(test, feature = "test-support"))]
3589pub struct TestFile {
3590 pub path: Arc<Path>,
3591 pub root_name: String,
3592}
3593
3594#[cfg(any(test, feature = "test-support"))]
3595impl File for TestFile {
3596 fn path(&self) -> &Arc<Path> {
3597 &self.path
3598 }
3599
3600 fn full_path(&self, _: &gpui::AppContext) -> PathBuf {
3601 PathBuf::from(&self.root_name).join(self.path.as_ref())
3602 }
3603
3604 fn as_local(&self) -> Option<&dyn LocalFile> {
3605 None
3606 }
3607
3608 fn mtime(&self) -> Option<SystemTime> {
3609 unimplemented!()
3610 }
3611
3612 fn file_name<'a>(&'a self, _: &'a gpui::AppContext) -> &'a std::ffi::OsStr {
3613 self.path().file_name().unwrap_or(self.root_name.as_ref())
3614 }
3615
3616 fn worktree_id(&self) -> usize {
3617 0
3618 }
3619
3620 fn is_deleted(&self) -> bool {
3621 unimplemented!()
3622 }
3623
3624 fn as_any(&self) -> &dyn std::any::Any {
3625 unimplemented!()
3626 }
3627
3628 fn to_proto(&self) -> rpc::proto::File {
3629 unimplemented!()
3630 }
3631
3632 fn is_private(&self) -> bool {
3633 false
3634 }
3635}
3636
3637pub(crate) fn contiguous_ranges(
3638 values: impl Iterator<Item = u32>,
3639 max_len: usize,
3640) -> impl Iterator<Item = Range<u32>> {
3641 let mut values = values;
3642 let mut current_range: Option<Range<u32>> = None;
3643 std::iter::from_fn(move || loop {
3644 if let Some(value) = values.next() {
3645 if let Some(range) = &mut current_range {
3646 if value == range.end && range.len() < max_len {
3647 range.end += 1;
3648 continue;
3649 }
3650 }
3651
3652 let prev_range = current_range.clone();
3653 current_range = Some(value..(value + 1));
3654 if prev_range.is_some() {
3655 return prev_range;
3656 }
3657 } else {
3658 return current_range.take();
3659 }
3660 })
3661}
3662
3663/// Returns the [CharKind] for the given character. When a scope is provided,
3664/// the function checks if the character is considered a word character
3665/// based on the language scope's word character settings.
3666pub fn char_kind(scope: &Option<LanguageScope>, c: char) -> CharKind {
3667 if c.is_whitespace() {
3668 return CharKind::Whitespace;
3669 } else if c.is_alphanumeric() || c == '_' {
3670 return CharKind::Word;
3671 }
3672
3673 if let Some(scope) = scope {
3674 if let Some(characters) = scope.word_characters() {
3675 if characters.contains(&c) {
3676 return CharKind::Word;
3677 }
3678 }
3679 }
3680
3681 CharKind::Punctuation
3682}
3683
3684/// Find all of the ranges of whitespace that occur at the ends of lines
3685/// in the given rope.
3686///
3687/// This could also be done with a regex search, but this implementation
3688/// avoids copying text.
3689pub fn trailing_whitespace_ranges(rope: &Rope) -> Vec<Range<usize>> {
3690 let mut ranges = Vec::new();
3691
3692 let mut offset = 0;
3693 let mut prev_chunk_trailing_whitespace_range = 0..0;
3694 for chunk in rope.chunks() {
3695 let mut prev_line_trailing_whitespace_range = 0..0;
3696 for (i, line) in chunk.split('\n').enumerate() {
3697 let line_end_offset = offset + line.len();
3698 let trimmed_line_len = line.trim_end_matches(|c| matches!(c, ' ' | '\t')).len();
3699 let mut trailing_whitespace_range = (offset + trimmed_line_len)..line_end_offset;
3700
3701 if i == 0 && trimmed_line_len == 0 {
3702 trailing_whitespace_range.start = prev_chunk_trailing_whitespace_range.start;
3703 }
3704 if !prev_line_trailing_whitespace_range.is_empty() {
3705 ranges.push(prev_line_trailing_whitespace_range);
3706 }
3707
3708 offset = line_end_offset + 1;
3709 prev_line_trailing_whitespace_range = trailing_whitespace_range;
3710 }
3711
3712 offset -= 1;
3713 prev_chunk_trailing_whitespace_range = prev_line_trailing_whitespace_range;
3714 }
3715
3716 if !prev_chunk_trailing_whitespace_range.is_empty() {
3717 ranges.push(prev_chunk_trailing_whitespace_range);
3718 }
3719
3720 ranges
3721}