1pub use crate::{
2 diagnostic_set::DiagnosticSet,
3 highlight_map::{HighlightId, HighlightMap},
4 markdown::ParsedMarkdown,
5 proto, Grammar, Language, LanguageRegistry,
6};
7use crate::{
8 diagnostic_set::{DiagnosticEntry, DiagnosticGroup},
9 language_settings::{language_settings, LanguageSettings},
10 markdown::parse_markdown,
11 outline::OutlineItem,
12 syntax_map::{
13 SyntaxLayer, SyntaxMap, SyntaxMapCapture, SyntaxMapCaptures, SyntaxMapMatches,
14 SyntaxSnapshot, ToTreeSitterPoint,
15 },
16 task_context::RunnableRange,
17 LanguageScope, Outline, RunnableTag,
18};
19use anyhow::{anyhow, Context, Result};
20pub use clock::ReplicaId;
21use futures::channel::oneshot;
22use gpui::{AppContext, EventEmitter, HighlightStyle, ModelContext, Task, TaskLabel};
23use lazy_static::lazy_static;
24use lsp::LanguageServerId;
25use parking_lot::Mutex;
26use similar::{ChangeTag, TextDiff};
27use smallvec::SmallVec;
28use smol::future::yield_now;
29use std::{
30 any::Any,
31 cmp::{self, Ordering},
32 collections::BTreeMap,
33 ffi::OsStr,
34 future::Future,
35 iter::{self, Iterator, Peekable},
36 mem,
37 ops::{Deref, Range},
38 path::{Path, PathBuf},
39 str,
40 sync::Arc,
41 time::{Duration, Instant, SystemTime},
42 vec,
43};
44use sum_tree::TreeMap;
45use text::operation_queue::OperationQueue;
46use text::*;
47pub use text::{
48 Anchor, Bias, Buffer as TextBuffer, BufferId, BufferSnapshot as TextBufferSnapshot, Edit,
49 OffsetRangeExt, OffsetUtf16, Patch, Point, PointUtf16, Rope, Selection, SelectionGoal,
50 Subscription, TextDimension, TextSummary, ToOffset, ToOffsetUtf16, ToPoint, ToPointUtf16,
51 Transaction, TransactionId, Unclipped,
52};
53use theme::SyntaxTheme;
54#[cfg(any(test, feature = "test-support"))]
55use util::RandomCharIter;
56use util::RangeExt;
57
58#[cfg(any(test, feature = "test-support"))]
59pub use {tree_sitter_rust, tree_sitter_typescript};
60
61pub use lsp::DiagnosticSeverity;
62
63lazy_static! {
64 /// A label for the background task spawned by the buffer to compute
65 /// a diff against the contents of its file.
66 pub static ref BUFFER_DIFF_TASK: TaskLabel = TaskLabel::new();
67}
68
69/// Indicate whether a [Buffer] has permissions to edit.
70#[derive(PartialEq, Clone, Copy, Debug)]
71pub enum Capability {
72 /// The buffer is a mutable replica.
73 ReadWrite,
74 /// The buffer is a read-only replica.
75 ReadOnly,
76}
77
78pub type BufferRow = u32;
79
80/// An in-memory representation of a source code file, including its text,
81/// syntax trees, git status, and diagnostics.
82pub struct Buffer {
83 text: TextBuffer,
84 diff_base: Option<Rope>,
85 git_diff: git::diff::BufferDiff,
86 file: Option<Arc<dyn File>>,
87 /// The mtime of the file when this buffer was last loaded from
88 /// or saved to disk.
89 saved_mtime: Option<SystemTime>,
90 /// The version vector when this buffer was last loaded from
91 /// or saved to disk.
92 saved_version: clock::Global,
93 transaction_depth: usize,
94 was_dirty_before_starting_transaction: Option<bool>,
95 reload_task: Option<Task<Result<()>>>,
96 language: Option<Arc<Language>>,
97 autoindent_requests: Vec<Arc<AutoindentRequest>>,
98 pending_autoindent: Option<Task<()>>,
99 sync_parse_timeout: Duration,
100 syntax_map: Mutex<SyntaxMap>,
101 parsing_in_background: bool,
102 parse_count: usize,
103 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
104 remote_selections: TreeMap<ReplicaId, SelectionSet>,
105 selections_update_count: usize,
106 diagnostics_update_count: usize,
107 diagnostics_timestamp: clock::Lamport,
108 file_update_count: usize,
109 git_diff_update_count: usize,
110 completion_triggers: Vec<String>,
111 completion_triggers_timestamp: clock::Lamport,
112 deferred_ops: OperationQueue<Operation>,
113 capability: Capability,
114 has_conflict: bool,
115 diff_base_version: usize,
116}
117
118/// An immutable, cheaply cloneable representation of a fixed
119/// state of a buffer.
120pub struct BufferSnapshot {
121 text: text::BufferSnapshot,
122 git_diff: git::diff::BufferDiff,
123 pub(crate) syntax: SyntaxSnapshot,
124 file: Option<Arc<dyn File>>,
125 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
126 diagnostics_update_count: usize,
127 file_update_count: usize,
128 git_diff_update_count: usize,
129 remote_selections: TreeMap<ReplicaId, SelectionSet>,
130 selections_update_count: usize,
131 language: Option<Arc<Language>>,
132 parse_count: usize,
133}
134
135/// The kind and amount of indentation in a particular line. For now,
136/// assumes that indentation is all the same character.
137#[derive(Clone, Copy, Debug, PartialEq, Eq, Default)]
138pub struct IndentSize {
139 /// The number of bytes that comprise the indentation.
140 pub len: u32,
141 /// The kind of whitespace used for indentation.
142 pub kind: IndentKind,
143}
144
145/// A whitespace character that's used for indentation.
146#[derive(Clone, Copy, Debug, PartialEq, Eq, Default)]
147pub enum IndentKind {
148 /// An ASCII space character.
149 #[default]
150 Space,
151 /// An ASCII tab character.
152 Tab,
153}
154
155/// The shape of a selection cursor.
156#[derive(Copy, Clone, PartialEq, Eq, Debug, Default)]
157pub enum CursorShape {
158 /// A vertical bar
159 #[default]
160 Bar,
161 /// A block that surrounds the following character
162 Block,
163 /// An underline that runs along the following character
164 Underscore,
165 /// A box drawn around the following character
166 Hollow,
167}
168
169#[derive(Clone, Debug)]
170struct SelectionSet {
171 line_mode: bool,
172 cursor_shape: CursorShape,
173 selections: Arc<[Selection<Anchor>]>,
174 lamport_timestamp: clock::Lamport,
175}
176
177/// A diagnostic associated with a certain range of a buffer.
178#[derive(Clone, Debug, PartialEq, Eq)]
179pub struct Diagnostic {
180 /// The name of the service that produced this diagnostic.
181 pub source: Option<String>,
182 /// A machine-readable code that identifies this diagnostic.
183 pub code: Option<String>,
184 /// Whether this diagnostic is a hint, warning, or error.
185 pub severity: DiagnosticSeverity,
186 /// The human-readable message associated with this diagnostic.
187 pub message: String,
188 /// An id that identifies the group to which this diagnostic belongs.
189 ///
190 /// When a language server produces a diagnostic with
191 /// one or more associated diagnostics, those diagnostics are all
192 /// assigned a single group id.
193 pub group_id: usize,
194 /// Whether this diagnostic is the primary diagnostic for its group.
195 ///
196 /// In a given group, the primary diagnostic is the top-level diagnostic
197 /// returned by the language server. The non-primary diagnostics are the
198 /// associated diagnostics.
199 pub is_primary: bool,
200 /// Whether this diagnostic is considered to originate from an analysis of
201 /// files on disk, as opposed to any unsaved buffer contents. This is a
202 /// property of a given diagnostic source, and is configured for a given
203 /// language server via the [`LspAdapter::disk_based_diagnostic_sources`](crate::LspAdapter::disk_based_diagnostic_sources) method
204 /// for the language server.
205 pub is_disk_based: bool,
206 /// Whether this diagnostic marks unnecessary code.
207 pub is_unnecessary: bool,
208}
209
210/// TODO - move this into the `project` crate and make it private.
211pub async fn prepare_completion_documentation(
212 documentation: &lsp::Documentation,
213 language_registry: &Arc<LanguageRegistry>,
214 language: Option<Arc<Language>>,
215) -> Documentation {
216 match documentation {
217 lsp::Documentation::String(text) => {
218 if text.lines().count() <= 1 {
219 Documentation::SingleLine(text.clone())
220 } else {
221 Documentation::MultiLinePlainText(text.clone())
222 }
223 }
224
225 lsp::Documentation::MarkupContent(lsp::MarkupContent { kind, value }) => match kind {
226 lsp::MarkupKind::PlainText => {
227 if value.lines().count() <= 1 {
228 Documentation::SingleLine(value.clone())
229 } else {
230 Documentation::MultiLinePlainText(value.clone())
231 }
232 }
233
234 lsp::MarkupKind::Markdown => {
235 let parsed = parse_markdown(value, language_registry, language).await;
236 Documentation::MultiLineMarkdown(parsed)
237 }
238 },
239 }
240}
241
242/// Documentation associated with a [`Completion`].
243#[derive(Clone, Debug)]
244pub enum Documentation {
245 /// There is no documentation for this completion.
246 Undocumented,
247 /// A single line of documentation.
248 SingleLine(String),
249 /// Multiple lines of plain text documentation.
250 MultiLinePlainText(String),
251 /// Markdown documentation.
252 MultiLineMarkdown(ParsedMarkdown),
253}
254
255/// An operation used to synchronize this buffer with its other replicas.
256#[derive(Clone, Debug, PartialEq)]
257pub enum Operation {
258 /// A text operation.
259 Buffer(text::Operation),
260
261 /// An update to the buffer's diagnostics.
262 UpdateDiagnostics {
263 /// The id of the language server that produced the new diagnostics.
264 server_id: LanguageServerId,
265 /// The diagnostics.
266 diagnostics: Arc<[DiagnosticEntry<Anchor>]>,
267 /// The buffer's lamport timestamp.
268 lamport_timestamp: clock::Lamport,
269 },
270
271 /// An update to the most recent selections in this buffer.
272 UpdateSelections {
273 /// The selections.
274 selections: Arc<[Selection<Anchor>]>,
275 /// The buffer's lamport timestamp.
276 lamport_timestamp: clock::Lamport,
277 /// Whether the selections are in 'line mode'.
278 line_mode: bool,
279 /// The [`CursorShape`] associated with these selections.
280 cursor_shape: CursorShape,
281 },
282
283 /// An update to the characters that should trigger autocompletion
284 /// for this buffer.
285 UpdateCompletionTriggers {
286 /// The characters that trigger autocompletion.
287 triggers: Vec<String>,
288 /// The buffer's lamport timestamp.
289 lamport_timestamp: clock::Lamport,
290 },
291}
292
293/// An event that occurs in a buffer.
294#[derive(Clone, Debug, PartialEq)]
295pub enum Event {
296 /// The buffer was changed in a way that must be
297 /// propagated to its other replicas.
298 Operation(Operation),
299 /// The buffer was edited.
300 Edited,
301 /// The buffer's `dirty` bit changed.
302 DirtyChanged,
303 /// The buffer was saved.
304 Saved,
305 /// The buffer's file was changed on disk.
306 FileHandleChanged,
307 /// The buffer was reloaded.
308 Reloaded,
309 /// The buffer's diff_base changed.
310 DiffBaseChanged,
311 /// Buffer's excerpts for a certain diff base were recalculated.
312 DiffUpdated,
313 /// The buffer's language was changed.
314 LanguageChanged,
315 /// The buffer's syntax trees were updated.
316 Reparsed,
317 /// The buffer's diagnostics were updated.
318 DiagnosticsUpdated,
319 /// The buffer gained or lost editing capabilities.
320 CapabilityChanged,
321 /// The buffer was explicitly requested to close.
322 Closed,
323}
324
325/// The file associated with a buffer.
326pub trait File: Send + Sync {
327 /// Returns the [`LocalFile`] associated with this file, if the
328 /// file is local.
329 fn as_local(&self) -> Option<&dyn LocalFile>;
330
331 /// Returns whether this file is local.
332 fn is_local(&self) -> bool {
333 self.as_local().is_some()
334 }
335
336 /// Returns the file's mtime.
337 fn mtime(&self) -> Option<SystemTime>;
338
339 /// Returns the path of this file relative to the worktree's root directory.
340 fn path(&self) -> &Arc<Path>;
341
342 /// Returns the path of this file relative to the worktree's parent directory (this means it
343 /// includes the name of the worktree's root folder).
344 fn full_path(&self, cx: &AppContext) -> PathBuf;
345
346 /// Returns the last component of this handle's absolute path. If this handle refers to the root
347 /// of its worktree, then this method will return the name of the worktree itself.
348 fn file_name<'a>(&'a self, cx: &'a AppContext) -> &'a OsStr;
349
350 /// Returns the id of the worktree to which this file belongs.
351 ///
352 /// This is needed for looking up project-specific settings.
353 fn worktree_id(&self) -> usize;
354
355 /// Returns whether the file has been deleted.
356 fn is_deleted(&self) -> bool;
357
358 /// Returns whether the file existed on disk at one point
359 fn is_created(&self) -> bool {
360 self.mtime().is_some()
361 }
362
363 /// Converts this file into an [`Any`] trait object.
364 fn as_any(&self) -> &dyn Any;
365
366 /// Converts this file into a protobuf message.
367 fn to_proto(&self) -> rpc::proto::File;
368
369 /// Return whether Zed considers this to be a private file.
370 fn is_private(&self) -> bool;
371}
372
373/// The file associated with a buffer, in the case where the file is on the local disk.
374pub trait LocalFile: File {
375 /// Returns the absolute path of this file.
376 fn abs_path(&self, cx: &AppContext) -> PathBuf;
377
378 /// Loads the file's contents from disk.
379 fn load(&self, cx: &AppContext) -> Task<Result<String>>;
380
381 /// Called when the buffer is reloaded from disk.
382 fn buffer_reloaded(
383 &self,
384 buffer_id: BufferId,
385 version: &clock::Global,
386 line_ending: LineEnding,
387 mtime: Option<SystemTime>,
388 cx: &mut AppContext,
389 );
390
391 /// Returns true if the file should not be shared with collaborators.
392 fn is_private(&self, _: &AppContext) -> bool {
393 false
394 }
395}
396
397/// The auto-indent behavior associated with an editing operation.
398/// For some editing operations, each affected line of text has its
399/// indentation recomputed. For other operations, the entire block
400/// of edited text is adjusted uniformly.
401#[derive(Clone, Debug)]
402pub enum AutoindentMode {
403 /// Indent each line of inserted text.
404 EachLine,
405 /// Apply the same indentation adjustment to all of the lines
406 /// in a given insertion.
407 Block {
408 /// The original indentation level of the first line of each
409 /// insertion, if it has been copied.
410 original_indent_columns: Vec<u32>,
411 },
412}
413
414#[derive(Clone)]
415struct AutoindentRequest {
416 before_edit: BufferSnapshot,
417 entries: Vec<AutoindentRequestEntry>,
418 is_block_mode: bool,
419}
420
421#[derive(Clone)]
422struct AutoindentRequestEntry {
423 /// A range of the buffer whose indentation should be adjusted.
424 range: Range<Anchor>,
425 /// Whether or not these lines should be considered brand new, for the
426 /// purpose of auto-indent. When text is not new, its indentation will
427 /// only be adjusted if the suggested indentation level has *changed*
428 /// since the edit was made.
429 first_line_is_new: bool,
430 indent_size: IndentSize,
431 original_indent_column: Option<u32>,
432}
433
434#[derive(Debug)]
435struct IndentSuggestion {
436 basis_row: u32,
437 delta: Ordering,
438 within_error: bool,
439}
440
441struct BufferChunkHighlights<'a> {
442 captures: SyntaxMapCaptures<'a>,
443 next_capture: Option<SyntaxMapCapture<'a>>,
444 stack: Vec<(usize, HighlightId)>,
445 highlight_maps: Vec<HighlightMap>,
446}
447
448/// An iterator that yields chunks of a buffer's text, along with their
449/// syntax highlights and diagnostic status.
450pub struct BufferChunks<'a> {
451 range: Range<usize>,
452 chunks: text::Chunks<'a>,
453 diagnostic_endpoints: Peekable<vec::IntoIter<DiagnosticEndpoint>>,
454 error_depth: usize,
455 warning_depth: usize,
456 information_depth: usize,
457 hint_depth: usize,
458 unnecessary_depth: usize,
459 highlights: Option<BufferChunkHighlights<'a>>,
460}
461
462/// A chunk of a buffer's text, along with its syntax highlight and
463/// diagnostic status.
464#[derive(Clone, Copy, Debug, Default)]
465pub struct Chunk<'a> {
466 /// The text of the chunk.
467 pub text: &'a str,
468 /// The syntax highlighting style of the chunk.
469 pub syntax_highlight_id: Option<HighlightId>,
470 /// The highlight style that has been applied to this chunk in
471 /// the editor.
472 pub highlight_style: Option<HighlightStyle>,
473 /// The severity of diagnostic associated with this chunk, if any.
474 pub diagnostic_severity: Option<DiagnosticSeverity>,
475 /// Whether this chunk of text is marked as unnecessary.
476 pub is_unnecessary: bool,
477 /// Whether this chunk of text was originally a tab character.
478 pub is_tab: bool,
479}
480
481/// A set of edits to a given version of a buffer, computed asynchronously.
482#[derive(Debug)]
483pub struct Diff {
484 pub(crate) base_version: clock::Global,
485 line_ending: LineEnding,
486 edits: Vec<(Range<usize>, Arc<str>)>,
487}
488
489#[derive(Clone, Copy)]
490pub(crate) struct DiagnosticEndpoint {
491 offset: usize,
492 is_start: bool,
493 severity: DiagnosticSeverity,
494 is_unnecessary: bool,
495}
496
497/// A class of characters, used for characterizing a run of text.
498#[derive(Copy, Clone, Eq, PartialEq, PartialOrd, Ord, Debug)]
499pub enum CharKind {
500 /// Whitespace.
501 Whitespace,
502 /// Punctuation.
503 Punctuation,
504 /// Word.
505 Word,
506}
507
508/// A runnable is a set of data about a region that could be resolved into a task
509pub struct Runnable {
510 pub tags: SmallVec<[RunnableTag; 1]>,
511 pub language: Arc<Language>,
512 pub buffer: BufferId,
513}
514
515impl Buffer {
516 /// Create a new buffer with the given base text.
517 pub fn local<T: Into<String>>(base_text: T, cx: &mut ModelContext<Self>) -> Self {
518 Self::build(
519 TextBuffer::new(0, cx.entity_id().as_non_zero_u64().into(), base_text.into()),
520 None,
521 None,
522 Capability::ReadWrite,
523 )
524 }
525
526 /// Create a new buffer with the given base text that has proper line endings and other normalization applied.
527 pub fn local_normalized(
528 base_text_normalized: Rope,
529 line_ending: LineEnding,
530 cx: &mut ModelContext<Self>,
531 ) -> Self {
532 Self::build(
533 TextBuffer::new_normalized(
534 0,
535 cx.entity_id().as_non_zero_u64().into(),
536 line_ending,
537 base_text_normalized,
538 ),
539 None,
540 None,
541 Capability::ReadWrite,
542 )
543 }
544
545 /// Create a new buffer that is a replica of a remote buffer.
546 pub fn remote(
547 remote_id: BufferId,
548 replica_id: ReplicaId,
549 capability: Capability,
550 base_text: impl Into<String>,
551 ) -> Self {
552 Self::build(
553 TextBuffer::new(replica_id, remote_id, base_text.into()),
554 None,
555 None,
556 capability,
557 )
558 }
559
560 /// Create a new buffer that is a replica of a remote buffer, populating its
561 /// state from the given protobuf message.
562 pub fn from_proto(
563 replica_id: ReplicaId,
564 capability: Capability,
565 message: proto::BufferState,
566 file: Option<Arc<dyn File>>,
567 ) -> Result<Self> {
568 let buffer_id = BufferId::new(message.id)
569 .with_context(|| anyhow!("Could not deserialize buffer_id"))?;
570 let buffer = TextBuffer::new(replica_id, buffer_id, message.base_text);
571 let mut this = Self::build(buffer, message.diff_base, file, capability);
572 this.text.set_line_ending(proto::deserialize_line_ending(
573 rpc::proto::LineEnding::from_i32(message.line_ending)
574 .ok_or_else(|| anyhow!("missing line_ending"))?,
575 ));
576 this.saved_version = proto::deserialize_version(&message.saved_version);
577 this.saved_mtime = message.saved_mtime.map(|time| time.into());
578 Ok(this)
579 }
580
581 /// Serialize the buffer's state to a protobuf message.
582 pub fn to_proto(&self) -> proto::BufferState {
583 proto::BufferState {
584 id: self.remote_id().into(),
585 file: self.file.as_ref().map(|f| f.to_proto()),
586 base_text: self.base_text().to_string(),
587 diff_base: self.diff_base.as_ref().map(|h| h.to_string()),
588 line_ending: proto::serialize_line_ending(self.line_ending()) as i32,
589 saved_version: proto::serialize_version(&self.saved_version),
590 saved_mtime: self.saved_mtime.map(|time| time.into()),
591 }
592 }
593
594 /// Serialize as protobufs all of the changes to the buffer since the given version.
595 pub fn serialize_ops(
596 &self,
597 since: Option<clock::Global>,
598 cx: &AppContext,
599 ) -> Task<Vec<proto::Operation>> {
600 let mut operations = Vec::new();
601 operations.extend(self.deferred_ops.iter().map(proto::serialize_operation));
602
603 operations.extend(self.remote_selections.iter().map(|(_, set)| {
604 proto::serialize_operation(&Operation::UpdateSelections {
605 selections: set.selections.clone(),
606 lamport_timestamp: set.lamport_timestamp,
607 line_mode: set.line_mode,
608 cursor_shape: set.cursor_shape,
609 })
610 }));
611
612 for (server_id, diagnostics) in &self.diagnostics {
613 operations.push(proto::serialize_operation(&Operation::UpdateDiagnostics {
614 lamport_timestamp: self.diagnostics_timestamp,
615 server_id: *server_id,
616 diagnostics: diagnostics.iter().cloned().collect(),
617 }));
618 }
619
620 operations.push(proto::serialize_operation(
621 &Operation::UpdateCompletionTriggers {
622 triggers: self.completion_triggers.clone(),
623 lamport_timestamp: self.completion_triggers_timestamp,
624 },
625 ));
626
627 let text_operations = self.text.operations().clone();
628 cx.background_executor().spawn(async move {
629 let since = since.unwrap_or_default();
630 operations.extend(
631 text_operations
632 .iter()
633 .filter(|(_, op)| !since.observed(op.timestamp()))
634 .map(|(_, op)| proto::serialize_operation(&Operation::Buffer(op.clone()))),
635 );
636 operations.sort_unstable_by_key(proto::lamport_timestamp_for_operation);
637 operations
638 })
639 }
640
641 /// Assign a language to the buffer, returning the buffer.
642 pub fn with_language(mut self, language: Arc<Language>, cx: &mut ModelContext<Self>) -> Self {
643 self.set_language(Some(language), cx);
644 self
645 }
646
647 /// Returns the [Capability] of this buffer.
648 pub fn capability(&self) -> Capability {
649 self.capability
650 }
651
652 /// Whether this buffer can only be read.
653 pub fn read_only(&self) -> bool {
654 self.capability == Capability::ReadOnly
655 }
656
657 /// Builds a [Buffer] with the given underlying [TextBuffer], diff base, [File] and [Capability].
658 pub fn build(
659 buffer: TextBuffer,
660 diff_base: Option<String>,
661 file: Option<Arc<dyn File>>,
662 capability: Capability,
663 ) -> Self {
664 let saved_mtime = file.as_ref().and_then(|file| file.mtime());
665
666 Self {
667 saved_mtime,
668 saved_version: buffer.version(),
669 reload_task: None,
670 transaction_depth: 0,
671 was_dirty_before_starting_transaction: None,
672 text: buffer,
673 diff_base: diff_base
674 .map(|mut raw_diff_base| {
675 LineEnding::normalize(&mut raw_diff_base);
676 raw_diff_base
677 })
678 .map(Rope::from),
679 diff_base_version: 0,
680 git_diff: git::diff::BufferDiff::new(),
681 file,
682 capability,
683 syntax_map: Mutex::new(SyntaxMap::new()),
684 parsing_in_background: false,
685 parse_count: 0,
686 sync_parse_timeout: Duration::from_millis(1),
687 autoindent_requests: Default::default(),
688 pending_autoindent: Default::default(),
689 language: None,
690 remote_selections: Default::default(),
691 selections_update_count: 0,
692 diagnostics: Default::default(),
693 diagnostics_update_count: 0,
694 diagnostics_timestamp: Default::default(),
695 file_update_count: 0,
696 git_diff_update_count: 0,
697 completion_triggers: Default::default(),
698 completion_triggers_timestamp: Default::default(),
699 deferred_ops: OperationQueue::new(),
700 has_conflict: false,
701 }
702 }
703
704 /// Retrieve a snapshot of the buffer's current state. This is computationally
705 /// cheap, and allows reading from the buffer on a background thread.
706 pub fn snapshot(&self) -> BufferSnapshot {
707 let text = self.text.snapshot();
708 let mut syntax_map = self.syntax_map.lock();
709 syntax_map.interpolate(&text);
710 let syntax = syntax_map.snapshot();
711
712 BufferSnapshot {
713 text,
714 syntax,
715 git_diff: self.git_diff.clone(),
716 file: self.file.clone(),
717 remote_selections: self.remote_selections.clone(),
718 diagnostics: self.diagnostics.clone(),
719 diagnostics_update_count: self.diagnostics_update_count,
720 file_update_count: self.file_update_count,
721 git_diff_update_count: self.git_diff_update_count,
722 language: self.language.clone(),
723 parse_count: self.parse_count,
724 selections_update_count: self.selections_update_count,
725 }
726 }
727
728 #[cfg(test)]
729 pub(crate) fn as_text_snapshot(&self) -> &text::BufferSnapshot {
730 &self.text
731 }
732
733 /// Retrieve a snapshot of the buffer's raw text, without any
734 /// language-related state like the syntax tree or diagnostics.
735 pub fn text_snapshot(&self) -> text::BufferSnapshot {
736 self.text.snapshot()
737 }
738
739 /// The file associated with the buffer, if any.
740 pub fn file(&self) -> Option<&Arc<dyn File>> {
741 self.file.as_ref()
742 }
743
744 /// The version of the buffer that was last saved or reloaded from disk.
745 pub fn saved_version(&self) -> &clock::Global {
746 &self.saved_version
747 }
748
749 /// The mtime of the buffer's file when the buffer was last saved or reloaded from disk.
750 pub fn saved_mtime(&self) -> Option<SystemTime> {
751 self.saved_mtime
752 }
753
754 /// Assign a language to the buffer.
755 pub fn set_language(&mut self, language: Option<Arc<Language>>, cx: &mut ModelContext<Self>) {
756 self.parse_count += 1;
757 self.syntax_map.lock().clear();
758 self.language = language;
759 self.reparse(cx);
760 cx.emit(Event::LanguageChanged);
761 }
762
763 /// Assign a language registry to the buffer. This allows the buffer to retrieve
764 /// other languages if parts of the buffer are written in different languages.
765 pub fn set_language_registry(&mut self, language_registry: Arc<LanguageRegistry>) {
766 self.syntax_map
767 .lock()
768 .set_language_registry(language_registry);
769 }
770
771 /// Assign the buffer a new [Capability].
772 pub fn set_capability(&mut self, capability: Capability, cx: &mut ModelContext<Self>) {
773 self.capability = capability;
774 cx.emit(Event::CapabilityChanged)
775 }
776
777 /// This method is called to signal that the buffer has been saved.
778 pub fn did_save(
779 &mut self,
780 version: clock::Global,
781 mtime: Option<SystemTime>,
782 cx: &mut ModelContext<Self>,
783 ) {
784 self.saved_version = version;
785 self.has_conflict = false;
786 self.saved_mtime = mtime;
787 cx.emit(Event::Saved);
788 cx.notify();
789 }
790
791 /// Reloads the contents of the buffer from disk.
792 pub fn reload(
793 &mut self,
794 cx: &mut ModelContext<Self>,
795 ) -> oneshot::Receiver<Option<Transaction>> {
796 let (tx, rx) = futures::channel::oneshot::channel();
797 let prev_version = self.text.version();
798 self.reload_task = Some(cx.spawn(|this, mut cx| async move {
799 let Some((new_mtime, new_text)) = this.update(&mut cx, |this, cx| {
800 let file = this.file.as_ref()?.as_local()?;
801 Some((file.mtime(), file.load(cx)))
802 })?
803 else {
804 return Ok(());
805 };
806
807 let new_text = new_text.await?;
808 let diff = this
809 .update(&mut cx, |this, cx| this.diff(new_text.clone(), cx))?
810 .await;
811 this.update(&mut cx, |this, cx| {
812 if this.version() == diff.base_version {
813 this.finalize_last_transaction();
814 this.apply_diff(diff, cx);
815 tx.send(this.finalize_last_transaction().cloned()).ok();
816 this.has_conflict = false;
817 this.did_reload(this.version(), this.line_ending(), new_mtime, cx);
818 } else {
819 if !diff.edits.is_empty()
820 || this
821 .edits_since::<usize>(&diff.base_version)
822 .next()
823 .is_some()
824 {
825 this.has_conflict = true;
826 }
827
828 this.did_reload(prev_version, this.line_ending(), this.saved_mtime, cx);
829 }
830
831 this.reload_task.take();
832 })
833 }));
834 rx
835 }
836
837 /// This method is called to signal that the buffer has been reloaded.
838 pub fn did_reload(
839 &mut self,
840 version: clock::Global,
841 line_ending: LineEnding,
842 mtime: Option<SystemTime>,
843 cx: &mut ModelContext<Self>,
844 ) {
845 self.saved_version = version;
846 self.text.set_line_ending(line_ending);
847 self.saved_mtime = mtime;
848 if let Some(file) = self.file.as_ref().and_then(|f| f.as_local()) {
849 file.buffer_reloaded(
850 self.remote_id(),
851 &self.saved_version,
852 self.line_ending(),
853 self.saved_mtime,
854 cx,
855 );
856 }
857 cx.emit(Event::Reloaded);
858 cx.notify();
859 }
860
861 /// Updates the [File] backing this buffer. This should be called when
862 /// the file has changed or has been deleted.
863 pub fn file_updated(&mut self, new_file: Arc<dyn File>, cx: &mut ModelContext<Self>) {
864 let mut file_changed = false;
865
866 if let Some(old_file) = self.file.as_ref() {
867 if new_file.path() != old_file.path() {
868 file_changed = true;
869 }
870
871 if new_file.is_deleted() {
872 if !old_file.is_deleted() {
873 file_changed = true;
874 if !self.is_dirty() {
875 cx.emit(Event::DirtyChanged);
876 }
877 }
878 } else {
879 let new_mtime = new_file.mtime();
880 if new_mtime != old_file.mtime() {
881 file_changed = true;
882
883 if !self.is_dirty() {
884 self.reload(cx).close();
885 }
886 }
887 }
888 } else {
889 file_changed = true;
890 };
891
892 self.file = Some(new_file);
893 if file_changed {
894 self.file_update_count += 1;
895 cx.emit(Event::FileHandleChanged);
896 cx.notify();
897 }
898 }
899
900 /// Returns the current diff base, see [Buffer::set_diff_base].
901 pub fn diff_base(&self) -> Option<&Rope> {
902 self.diff_base.as_ref()
903 }
904
905 /// Sets the text that will be used to compute a Git diff
906 /// against the buffer text.
907 pub fn set_diff_base(&mut self, diff_base: Option<String>, cx: &mut ModelContext<Self>) {
908 self.diff_base = diff_base
909 .map(|mut raw_diff_base| {
910 LineEnding::normalize(&mut raw_diff_base);
911 raw_diff_base
912 })
913 .map(Rope::from);
914 self.diff_base_version += 1;
915 if let Some(recalc_task) = self.git_diff_recalc(cx) {
916 cx.spawn(|buffer, mut cx| async move {
917 recalc_task.await;
918 buffer
919 .update(&mut cx, |_, cx| {
920 cx.emit(Event::DiffBaseChanged);
921 })
922 .ok();
923 })
924 .detach();
925 }
926 }
927
928 /// Returns a number, unique per diff base set to the buffer.
929 pub fn diff_base_version(&self) -> usize {
930 self.diff_base_version
931 }
932
933 /// Recomputes the Git diff status.
934 pub fn git_diff_recalc(&mut self, cx: &mut ModelContext<Self>) -> Option<Task<()>> {
935 let diff_base = self.diff_base.clone()?;
936 let snapshot = self.snapshot();
937
938 let mut diff = self.git_diff.clone();
939 let diff = cx.background_executor().spawn(async move {
940 diff.update(&diff_base, &snapshot).await;
941 diff
942 });
943
944 Some(cx.spawn(|this, mut cx| async move {
945 let buffer_diff = diff.await;
946 this.update(&mut cx, |this, cx| {
947 this.git_diff = buffer_diff;
948 this.git_diff_update_count += 1;
949 cx.emit(Event::DiffUpdated);
950 })
951 .ok();
952 }))
953 }
954
955 /// Returns the primary [Language] assigned to this [Buffer].
956 pub fn language(&self) -> Option<&Arc<Language>> {
957 self.language.as_ref()
958 }
959
960 /// Returns the [Language] at the given location.
961 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<Arc<Language>> {
962 let offset = position.to_offset(self);
963 self.syntax_map
964 .lock()
965 .layers_for_range(offset..offset, &self.text)
966 .last()
967 .map(|info| info.language.clone())
968 .or_else(|| self.language.clone())
969 }
970
971 /// The number of times the buffer was parsed.
972 pub fn parse_count(&self) -> usize {
973 self.parse_count
974 }
975
976 /// The number of times selections were updated.
977 pub fn selections_update_count(&self) -> usize {
978 self.selections_update_count
979 }
980
981 /// The number of times diagnostics were updated.
982 pub fn diagnostics_update_count(&self) -> usize {
983 self.diagnostics_update_count
984 }
985
986 /// The number of times the underlying file was updated.
987 pub fn file_update_count(&self) -> usize {
988 self.file_update_count
989 }
990
991 /// The number of times the git diff status was updated.
992 pub fn git_diff_update_count(&self) -> usize {
993 self.git_diff_update_count
994 }
995
996 /// Whether the buffer is being parsed in the background.
997 #[cfg(any(test, feature = "test-support"))]
998 pub fn is_parsing(&self) -> bool {
999 self.parsing_in_background
1000 }
1001
1002 /// Indicates whether the buffer contains any regions that may be
1003 /// written in a language that hasn't been loaded yet.
1004 pub fn contains_unknown_injections(&self) -> bool {
1005 self.syntax_map.lock().contains_unknown_injections()
1006 }
1007
1008 #[cfg(test)]
1009 pub fn set_sync_parse_timeout(&mut self, timeout: Duration) {
1010 self.sync_parse_timeout = timeout;
1011 }
1012
1013 /// Called after an edit to synchronize the buffer's main parse tree with
1014 /// the buffer's new underlying state.
1015 ///
1016 /// Locks the syntax map and interpolates the edits since the last reparse
1017 /// into the foreground syntax tree.
1018 ///
1019 /// Then takes a stable snapshot of the syntax map before unlocking it.
1020 /// The snapshot with the interpolated edits is sent to a background thread,
1021 /// where we ask Tree-sitter to perform an incremental parse.
1022 ///
1023 /// Meanwhile, in the foreground, we block the main thread for up to 1ms
1024 /// waiting on the parse to complete. As soon as it completes, we proceed
1025 /// synchronously, unless a 1ms timeout elapses.
1026 ///
1027 /// If we time out waiting on the parse, we spawn a second task waiting
1028 /// until the parse does complete and return with the interpolated tree still
1029 /// in the foreground. When the background parse completes, call back into
1030 /// the main thread and assign the foreground parse state.
1031 ///
1032 /// If the buffer or grammar changed since the start of the background parse,
1033 /// initiate an additional reparse recursively. To avoid concurrent parses
1034 /// for the same buffer, we only initiate a new parse if we are not already
1035 /// parsing in the background.
1036 pub fn reparse(&mut self, cx: &mut ModelContext<Self>) {
1037 if self.parsing_in_background {
1038 return;
1039 }
1040 let language = if let Some(language) = self.language.clone() {
1041 language
1042 } else {
1043 return;
1044 };
1045
1046 let text = self.text_snapshot();
1047 let parsed_version = self.version();
1048
1049 let mut syntax_map = self.syntax_map.lock();
1050 syntax_map.interpolate(&text);
1051 let language_registry = syntax_map.language_registry();
1052 let mut syntax_snapshot = syntax_map.snapshot();
1053 drop(syntax_map);
1054
1055 let parse_task = cx.background_executor().spawn({
1056 let language = language.clone();
1057 let language_registry = language_registry.clone();
1058 async move {
1059 syntax_snapshot.reparse(&text, language_registry, language);
1060 syntax_snapshot
1061 }
1062 });
1063
1064 match cx
1065 .background_executor()
1066 .block_with_timeout(self.sync_parse_timeout, parse_task)
1067 {
1068 Ok(new_syntax_snapshot) => {
1069 self.did_finish_parsing(new_syntax_snapshot, cx);
1070 return;
1071 }
1072 Err(parse_task) => {
1073 self.parsing_in_background = true;
1074 cx.spawn(move |this, mut cx| async move {
1075 let new_syntax_map = parse_task.await;
1076 this.update(&mut cx, move |this, cx| {
1077 let grammar_changed =
1078 this.language.as_ref().map_or(true, |current_language| {
1079 !Arc::ptr_eq(&language, current_language)
1080 });
1081 let language_registry_changed = new_syntax_map
1082 .contains_unknown_injections()
1083 && language_registry.map_or(false, |registry| {
1084 registry.version() != new_syntax_map.language_registry_version()
1085 });
1086 let parse_again = language_registry_changed
1087 || grammar_changed
1088 || this.version.changed_since(&parsed_version);
1089 this.did_finish_parsing(new_syntax_map, cx);
1090 this.parsing_in_background = false;
1091 if parse_again {
1092 this.reparse(cx);
1093 }
1094 })
1095 .ok();
1096 })
1097 .detach();
1098 }
1099 }
1100 }
1101
1102 fn did_finish_parsing(&mut self, syntax_snapshot: SyntaxSnapshot, cx: &mut ModelContext<Self>) {
1103 self.parse_count += 1;
1104 self.syntax_map.lock().did_parse(syntax_snapshot);
1105 self.request_autoindent(cx);
1106 cx.emit(Event::Reparsed);
1107 cx.notify();
1108 }
1109
1110 /// Assign to the buffer a set of diagnostics created by a given language server.
1111 pub fn update_diagnostics(
1112 &mut self,
1113 server_id: LanguageServerId,
1114 diagnostics: DiagnosticSet,
1115 cx: &mut ModelContext<Self>,
1116 ) {
1117 let lamport_timestamp = self.text.lamport_clock.tick();
1118 let op = Operation::UpdateDiagnostics {
1119 server_id,
1120 diagnostics: diagnostics.iter().cloned().collect(),
1121 lamport_timestamp,
1122 };
1123 self.apply_diagnostic_update(server_id, diagnostics, lamport_timestamp, cx);
1124 self.send_operation(op, cx);
1125 }
1126
1127 fn request_autoindent(&mut self, cx: &mut ModelContext<Self>) {
1128 if let Some(indent_sizes) = self.compute_autoindents() {
1129 let indent_sizes = cx.background_executor().spawn(indent_sizes);
1130 match cx
1131 .background_executor()
1132 .block_with_timeout(Duration::from_micros(500), indent_sizes)
1133 {
1134 Ok(indent_sizes) => self.apply_autoindents(indent_sizes, cx),
1135 Err(indent_sizes) => {
1136 self.pending_autoindent = Some(cx.spawn(|this, mut cx| async move {
1137 let indent_sizes = indent_sizes.await;
1138 this.update(&mut cx, |this, cx| {
1139 this.apply_autoindents(indent_sizes, cx);
1140 })
1141 .ok();
1142 }));
1143 }
1144 }
1145 } else {
1146 self.autoindent_requests.clear();
1147 }
1148 }
1149
1150 fn compute_autoindents(&self) -> Option<impl Future<Output = BTreeMap<u32, IndentSize>>> {
1151 let max_rows_between_yields = 100;
1152 let snapshot = self.snapshot();
1153 if snapshot.syntax.is_empty() || self.autoindent_requests.is_empty() {
1154 return None;
1155 }
1156
1157 let autoindent_requests = self.autoindent_requests.clone();
1158 Some(async move {
1159 let mut indent_sizes = BTreeMap::new();
1160 for request in autoindent_requests {
1161 // Resolve each edited range to its row in the current buffer and in the
1162 // buffer before this batch of edits.
1163 let mut row_ranges = Vec::new();
1164 let mut old_to_new_rows = BTreeMap::new();
1165 let mut language_indent_sizes_by_new_row = Vec::new();
1166 for entry in &request.entries {
1167 let position = entry.range.start;
1168 let new_row = position.to_point(&snapshot).row;
1169 let new_end_row = entry.range.end.to_point(&snapshot).row + 1;
1170 language_indent_sizes_by_new_row.push((new_row, entry.indent_size));
1171
1172 if !entry.first_line_is_new {
1173 let old_row = position.to_point(&request.before_edit).row;
1174 old_to_new_rows.insert(old_row, new_row);
1175 }
1176 row_ranges.push((new_row..new_end_row, entry.original_indent_column));
1177 }
1178
1179 // Build a map containing the suggested indentation for each of the edited lines
1180 // with respect to the state of the buffer before these edits. This map is keyed
1181 // by the rows for these lines in the current state of the buffer.
1182 let mut old_suggestions = BTreeMap::<u32, (IndentSize, bool)>::default();
1183 let old_edited_ranges =
1184 contiguous_ranges(old_to_new_rows.keys().copied(), max_rows_between_yields);
1185 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1186 let mut language_indent_size = IndentSize::default();
1187 for old_edited_range in old_edited_ranges {
1188 let suggestions = request
1189 .before_edit
1190 .suggest_autoindents(old_edited_range.clone())
1191 .into_iter()
1192 .flatten();
1193 for (old_row, suggestion) in old_edited_range.zip(suggestions) {
1194 if let Some(suggestion) = suggestion {
1195 let new_row = *old_to_new_rows.get(&old_row).unwrap();
1196
1197 // Find the indent size based on the language for this row.
1198 while let Some((row, size)) = language_indent_sizes.peek() {
1199 if *row > new_row {
1200 break;
1201 }
1202 language_indent_size = *size;
1203 language_indent_sizes.next();
1204 }
1205
1206 let suggested_indent = old_to_new_rows
1207 .get(&suggestion.basis_row)
1208 .and_then(|from_row| {
1209 Some(old_suggestions.get(from_row).copied()?.0)
1210 })
1211 .unwrap_or_else(|| {
1212 request
1213 .before_edit
1214 .indent_size_for_line(suggestion.basis_row)
1215 })
1216 .with_delta(suggestion.delta, language_indent_size);
1217 old_suggestions
1218 .insert(new_row, (suggested_indent, suggestion.within_error));
1219 }
1220 }
1221 yield_now().await;
1222 }
1223
1224 // In block mode, only compute indentation suggestions for the first line
1225 // of each insertion. Otherwise, compute suggestions for every inserted line.
1226 let new_edited_row_ranges = contiguous_ranges(
1227 row_ranges.iter().flat_map(|(range, _)| {
1228 if request.is_block_mode {
1229 range.start..range.start + 1
1230 } else {
1231 range.clone()
1232 }
1233 }),
1234 max_rows_between_yields,
1235 );
1236
1237 // Compute new suggestions for each line, but only include them in the result
1238 // if they differ from the old suggestion for that line.
1239 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1240 let mut language_indent_size = IndentSize::default();
1241 for new_edited_row_range in new_edited_row_ranges {
1242 let suggestions = snapshot
1243 .suggest_autoindents(new_edited_row_range.clone())
1244 .into_iter()
1245 .flatten();
1246 for (new_row, suggestion) in new_edited_row_range.zip(suggestions) {
1247 if let Some(suggestion) = suggestion {
1248 // Find the indent size based on the language for this row.
1249 while let Some((row, size)) = language_indent_sizes.peek() {
1250 if *row > new_row {
1251 break;
1252 }
1253 language_indent_size = *size;
1254 language_indent_sizes.next();
1255 }
1256
1257 let suggested_indent = indent_sizes
1258 .get(&suggestion.basis_row)
1259 .copied()
1260 .unwrap_or_else(|| {
1261 snapshot.indent_size_for_line(suggestion.basis_row)
1262 })
1263 .with_delta(suggestion.delta, language_indent_size);
1264 if old_suggestions.get(&new_row).map_or(
1265 true,
1266 |(old_indentation, was_within_error)| {
1267 suggested_indent != *old_indentation
1268 && (!suggestion.within_error || *was_within_error)
1269 },
1270 ) {
1271 indent_sizes.insert(new_row, suggested_indent);
1272 }
1273 }
1274 }
1275 yield_now().await;
1276 }
1277
1278 // For each block of inserted text, adjust the indentation of the remaining
1279 // lines of the block by the same amount as the first line was adjusted.
1280 if request.is_block_mode {
1281 for (row_range, original_indent_column) in
1282 row_ranges
1283 .into_iter()
1284 .filter_map(|(range, original_indent_column)| {
1285 if range.len() > 1 {
1286 Some((range, original_indent_column?))
1287 } else {
1288 None
1289 }
1290 })
1291 {
1292 let new_indent = indent_sizes
1293 .get(&row_range.start)
1294 .copied()
1295 .unwrap_or_else(|| snapshot.indent_size_for_line(row_range.start));
1296 let delta = new_indent.len as i64 - original_indent_column as i64;
1297 if delta != 0 {
1298 for row in row_range.skip(1) {
1299 indent_sizes.entry(row).or_insert_with(|| {
1300 let mut size = snapshot.indent_size_for_line(row);
1301 if size.kind == new_indent.kind {
1302 match delta.cmp(&0) {
1303 Ordering::Greater => size.len += delta as u32,
1304 Ordering::Less => {
1305 size.len = size.len.saturating_sub(-delta as u32)
1306 }
1307 Ordering::Equal => {}
1308 }
1309 }
1310 size
1311 });
1312 }
1313 }
1314 }
1315 }
1316 }
1317
1318 indent_sizes
1319 })
1320 }
1321
1322 fn apply_autoindents(
1323 &mut self,
1324 indent_sizes: BTreeMap<u32, IndentSize>,
1325 cx: &mut ModelContext<Self>,
1326 ) {
1327 self.autoindent_requests.clear();
1328
1329 let edits: Vec<_> = indent_sizes
1330 .into_iter()
1331 .filter_map(|(row, indent_size)| {
1332 let current_size = indent_size_for_line(self, row);
1333 Self::edit_for_indent_size_adjustment(row, current_size, indent_size)
1334 })
1335 .collect();
1336
1337 self.edit(edits, None, cx);
1338 }
1339
1340 /// Create a minimal edit that will cause the given row to be indented
1341 /// with the given size. After applying this edit, the length of the line
1342 /// will always be at least `new_size.len`.
1343 pub fn edit_for_indent_size_adjustment(
1344 row: u32,
1345 current_size: IndentSize,
1346 new_size: IndentSize,
1347 ) -> Option<(Range<Point>, String)> {
1348 if new_size.kind == current_size.kind {
1349 match new_size.len.cmp(¤t_size.len) {
1350 Ordering::Greater => {
1351 let point = Point::new(row, 0);
1352 Some((
1353 point..point,
1354 iter::repeat(new_size.char())
1355 .take((new_size.len - current_size.len) as usize)
1356 .collect::<String>(),
1357 ))
1358 }
1359
1360 Ordering::Less => Some((
1361 Point::new(row, 0)..Point::new(row, current_size.len - new_size.len),
1362 String::new(),
1363 )),
1364
1365 Ordering::Equal => None,
1366 }
1367 } else {
1368 Some((
1369 Point::new(row, 0)..Point::new(row, current_size.len),
1370 iter::repeat(new_size.char())
1371 .take(new_size.len as usize)
1372 .collect::<String>(),
1373 ))
1374 }
1375 }
1376
1377 /// Spawns a background task that asynchronously computes a `Diff` between the buffer's text
1378 /// and the given new text.
1379 pub fn diff(&self, mut new_text: String, cx: &AppContext) -> Task<Diff> {
1380 let old_text = self.as_rope().clone();
1381 let base_version = self.version();
1382 cx.background_executor()
1383 .spawn_labeled(*BUFFER_DIFF_TASK, async move {
1384 let old_text = old_text.to_string();
1385 let line_ending = LineEnding::detect(&new_text);
1386 LineEnding::normalize(&mut new_text);
1387
1388 let diff = TextDiff::from_chars(old_text.as_str(), new_text.as_str());
1389 let empty: Arc<str> = "".into();
1390
1391 let mut edits = Vec::new();
1392 let mut old_offset = 0;
1393 let mut new_offset = 0;
1394 let mut last_edit: Option<(Range<usize>, Range<usize>)> = None;
1395 for change in diff.iter_all_changes().map(Some).chain([None]) {
1396 if let Some(change) = &change {
1397 let len = change.value().len();
1398 match change.tag() {
1399 ChangeTag::Equal => {
1400 old_offset += len;
1401 new_offset += len;
1402 }
1403 ChangeTag::Delete => {
1404 let old_end_offset = old_offset + len;
1405 if let Some((last_old_range, _)) = &mut last_edit {
1406 last_old_range.end = old_end_offset;
1407 } else {
1408 last_edit =
1409 Some((old_offset..old_end_offset, new_offset..new_offset));
1410 }
1411 old_offset = old_end_offset;
1412 }
1413 ChangeTag::Insert => {
1414 let new_end_offset = new_offset + len;
1415 if let Some((_, last_new_range)) = &mut last_edit {
1416 last_new_range.end = new_end_offset;
1417 } else {
1418 last_edit =
1419 Some((old_offset..old_offset, new_offset..new_end_offset));
1420 }
1421 new_offset = new_end_offset;
1422 }
1423 }
1424 }
1425
1426 if let Some((old_range, new_range)) = &last_edit {
1427 if old_offset > old_range.end
1428 || new_offset > new_range.end
1429 || change.is_none()
1430 {
1431 let text = if new_range.is_empty() {
1432 empty.clone()
1433 } else {
1434 new_text[new_range.clone()].into()
1435 };
1436 edits.push((old_range.clone(), text));
1437 last_edit.take();
1438 }
1439 }
1440 }
1441
1442 Diff {
1443 base_version,
1444 line_ending,
1445 edits,
1446 }
1447 })
1448 }
1449
1450 /// Spawns a background task that searches the buffer for any whitespace
1451 /// at the ends of a lines, and returns a `Diff` that removes that whitespace.
1452 pub fn remove_trailing_whitespace(&self, cx: &AppContext) -> Task<Diff> {
1453 let old_text = self.as_rope().clone();
1454 let line_ending = self.line_ending();
1455 let base_version = self.version();
1456 cx.background_executor().spawn(async move {
1457 let ranges = trailing_whitespace_ranges(&old_text);
1458 let empty = Arc::<str>::from("");
1459 Diff {
1460 base_version,
1461 line_ending,
1462 edits: ranges
1463 .into_iter()
1464 .map(|range| (range, empty.clone()))
1465 .collect(),
1466 }
1467 })
1468 }
1469
1470 /// Ensures that the buffer ends with a single newline character, and
1471 /// no other whitespace.
1472 pub fn ensure_final_newline(&mut self, cx: &mut ModelContext<Self>) {
1473 let len = self.len();
1474 let mut offset = len;
1475 for chunk in self.as_rope().reversed_chunks_in_range(0..len) {
1476 let non_whitespace_len = chunk
1477 .trim_end_matches(|c: char| c.is_ascii_whitespace())
1478 .len();
1479 offset -= chunk.len();
1480 offset += non_whitespace_len;
1481 if non_whitespace_len != 0 {
1482 if offset == len - 1 && chunk.get(non_whitespace_len..) == Some("\n") {
1483 return;
1484 }
1485 break;
1486 }
1487 }
1488 self.edit([(offset..len, "\n")], None, cx);
1489 }
1490
1491 /// Applies a diff to the buffer. If the buffer has changed since the given diff was
1492 /// calculated, then adjust the diff to account for those changes, and discard any
1493 /// parts of the diff that conflict with those changes.
1494 pub fn apply_diff(&mut self, diff: Diff, cx: &mut ModelContext<Self>) -> Option<TransactionId> {
1495 // Check for any edits to the buffer that have occurred since this diff
1496 // was computed.
1497 let snapshot = self.snapshot();
1498 let mut edits_since = snapshot.edits_since::<usize>(&diff.base_version).peekable();
1499 let mut delta = 0;
1500 let adjusted_edits = diff.edits.into_iter().filter_map(|(range, new_text)| {
1501 while let Some(edit_since) = edits_since.peek() {
1502 // If the edit occurs after a diff hunk, then it does not
1503 // affect that hunk.
1504 if edit_since.old.start > range.end {
1505 break;
1506 }
1507 // If the edit precedes the diff hunk, then adjust the hunk
1508 // to reflect the edit.
1509 else if edit_since.old.end < range.start {
1510 delta += edit_since.new_len() as i64 - edit_since.old_len() as i64;
1511 edits_since.next();
1512 }
1513 // If the edit intersects a diff hunk, then discard that hunk.
1514 else {
1515 return None;
1516 }
1517 }
1518
1519 let start = (range.start as i64 + delta) as usize;
1520 let end = (range.end as i64 + delta) as usize;
1521 Some((start..end, new_text))
1522 });
1523
1524 self.start_transaction();
1525 self.text.set_line_ending(diff.line_ending);
1526 self.edit(adjusted_edits, None, cx);
1527 self.end_transaction(cx)
1528 }
1529
1530 fn changed_since_saved_version(&self) -> bool {
1531 self.edits_since::<usize>(&self.saved_version)
1532 .next()
1533 .is_some()
1534 }
1535 /// Checks if the buffer has unsaved changes.
1536 pub fn is_dirty(&self) -> bool {
1537 (self.has_conflict || self.changed_since_saved_version())
1538 || self
1539 .file
1540 .as_ref()
1541 .map_or(false, |file| file.is_deleted() || !file.is_created())
1542 }
1543
1544 /// Checks if the buffer and its file have both changed since the buffer
1545 /// was last saved or reloaded.
1546 pub fn has_conflict(&self) -> bool {
1547 (self.has_conflict || self.changed_since_saved_version())
1548 && self
1549 .file
1550 .as_ref()
1551 .map_or(false, |file| file.mtime() > self.saved_mtime)
1552 }
1553
1554 /// Gets a [`Subscription`] that tracks all of the changes to the buffer's text.
1555 pub fn subscribe(&mut self) -> Subscription {
1556 self.text.subscribe()
1557 }
1558
1559 /// Starts a transaction, if one is not already in-progress. When undoing or
1560 /// redoing edits, all of the edits performed within a transaction are undone
1561 /// or redone together.
1562 pub fn start_transaction(&mut self) -> Option<TransactionId> {
1563 self.start_transaction_at(Instant::now())
1564 }
1565
1566 /// Starts a transaction, providing the current time. Subsequent transactions
1567 /// that occur within a short period of time will be grouped together. This
1568 /// is controlled by the buffer's undo grouping duration.
1569 pub fn start_transaction_at(&mut self, now: Instant) -> Option<TransactionId> {
1570 self.transaction_depth += 1;
1571 if self.was_dirty_before_starting_transaction.is_none() {
1572 self.was_dirty_before_starting_transaction = Some(self.is_dirty());
1573 }
1574 self.text.start_transaction_at(now)
1575 }
1576
1577 /// Terminates the current transaction, if this is the outermost transaction.
1578 pub fn end_transaction(&mut self, cx: &mut ModelContext<Self>) -> Option<TransactionId> {
1579 self.end_transaction_at(Instant::now(), cx)
1580 }
1581
1582 /// Terminates the current transaction, providing the current time. Subsequent transactions
1583 /// that occur within a short period of time will be grouped together. This
1584 /// is controlled by the buffer's undo grouping duration.
1585 pub fn end_transaction_at(
1586 &mut self,
1587 now: Instant,
1588 cx: &mut ModelContext<Self>,
1589 ) -> Option<TransactionId> {
1590 assert!(self.transaction_depth > 0);
1591 self.transaction_depth -= 1;
1592 let was_dirty = if self.transaction_depth == 0 {
1593 self.was_dirty_before_starting_transaction.take().unwrap()
1594 } else {
1595 false
1596 };
1597 if let Some((transaction_id, start_version)) = self.text.end_transaction_at(now) {
1598 self.did_edit(&start_version, was_dirty, cx);
1599 Some(transaction_id)
1600 } else {
1601 None
1602 }
1603 }
1604
1605 /// Manually add a transaction to the buffer's undo history.
1606 pub fn push_transaction(&mut self, transaction: Transaction, now: Instant) {
1607 self.text.push_transaction(transaction, now);
1608 }
1609
1610 /// Prevent the last transaction from being grouped with any subsequent transactions,
1611 /// even if they occur with the buffer's undo grouping duration.
1612 pub fn finalize_last_transaction(&mut self) -> Option<&Transaction> {
1613 self.text.finalize_last_transaction()
1614 }
1615
1616 /// Manually group all changes since a given transaction.
1617 pub fn group_until_transaction(&mut self, transaction_id: TransactionId) {
1618 self.text.group_until_transaction(transaction_id);
1619 }
1620
1621 /// Manually remove a transaction from the buffer's undo history
1622 pub fn forget_transaction(&mut self, transaction_id: TransactionId) {
1623 self.text.forget_transaction(transaction_id);
1624 }
1625
1626 /// Manually merge two adjacent transactions in the buffer's undo history.
1627 pub fn merge_transactions(&mut self, transaction: TransactionId, destination: TransactionId) {
1628 self.text.merge_transactions(transaction, destination);
1629 }
1630
1631 /// Waits for the buffer to receive operations with the given timestamps.
1632 pub fn wait_for_edits(
1633 &mut self,
1634 edit_ids: impl IntoIterator<Item = clock::Lamport>,
1635 ) -> impl Future<Output = Result<()>> {
1636 self.text.wait_for_edits(edit_ids)
1637 }
1638
1639 /// Waits for the buffer to receive the operations necessary for resolving the given anchors.
1640 pub fn wait_for_anchors(
1641 &mut self,
1642 anchors: impl IntoIterator<Item = Anchor>,
1643 ) -> impl 'static + Future<Output = Result<()>> {
1644 self.text.wait_for_anchors(anchors)
1645 }
1646
1647 /// Waits for the buffer to receive operations up to the given version.
1648 pub fn wait_for_version(&mut self, version: clock::Global) -> impl Future<Output = Result<()>> {
1649 self.text.wait_for_version(version)
1650 }
1651
1652 /// Forces all futures returned by [`Buffer::wait_for_version`], [`Buffer::wait_for_edits`], or
1653 /// [`Buffer::wait_for_version`] to resolve with an error.
1654 pub fn give_up_waiting(&mut self) {
1655 self.text.give_up_waiting();
1656 }
1657
1658 /// Stores a set of selections that should be broadcasted to all of the buffer's replicas.
1659 pub fn set_active_selections(
1660 &mut self,
1661 selections: Arc<[Selection<Anchor>]>,
1662 line_mode: bool,
1663 cursor_shape: CursorShape,
1664 cx: &mut ModelContext<Self>,
1665 ) {
1666 let lamport_timestamp = self.text.lamport_clock.tick();
1667 self.remote_selections.insert(
1668 self.text.replica_id(),
1669 SelectionSet {
1670 selections: selections.clone(),
1671 lamport_timestamp,
1672 line_mode,
1673 cursor_shape,
1674 },
1675 );
1676 self.send_operation(
1677 Operation::UpdateSelections {
1678 selections,
1679 line_mode,
1680 lamport_timestamp,
1681 cursor_shape,
1682 },
1683 cx,
1684 );
1685 }
1686
1687 /// Clears the selections, so that other replicas of the buffer do not see any selections for
1688 /// this replica.
1689 pub fn remove_active_selections(&mut self, cx: &mut ModelContext<Self>) {
1690 if self
1691 .remote_selections
1692 .get(&self.text.replica_id())
1693 .map_or(true, |set| !set.selections.is_empty())
1694 {
1695 self.set_active_selections(Arc::from([]), false, Default::default(), cx);
1696 }
1697 }
1698
1699 /// Replaces the buffer's entire text.
1700 pub fn set_text<T>(&mut self, text: T, cx: &mut ModelContext<Self>) -> Option<clock::Lamport>
1701 where
1702 T: Into<Arc<str>>,
1703 {
1704 self.autoindent_requests.clear();
1705 self.edit([(0..self.len(), text)], None, cx)
1706 }
1707
1708 /// Applies the given edits to the buffer. Each edit is specified as a range of text to
1709 /// delete, and a string of text to insert at that location.
1710 ///
1711 /// If an [`AutoindentMode`] is provided, then the buffer will enqueue an auto-indent
1712 /// request for the edited ranges, which will be processed when the buffer finishes
1713 /// parsing.
1714 ///
1715 /// Parsing takes place at the end of a transaction, and may compute synchronously
1716 /// or asynchronously, depending on the changes.
1717 pub fn edit<I, S, T>(
1718 &mut self,
1719 edits_iter: I,
1720 autoindent_mode: Option<AutoindentMode>,
1721 cx: &mut ModelContext<Self>,
1722 ) -> Option<clock::Lamport>
1723 where
1724 I: IntoIterator<Item = (Range<S>, T)>,
1725 S: ToOffset,
1726 T: Into<Arc<str>>,
1727 {
1728 // Skip invalid edits and coalesce contiguous ones.
1729 let mut edits: Vec<(Range<usize>, Arc<str>)> = Vec::new();
1730 for (range, new_text) in edits_iter {
1731 let mut range = range.start.to_offset(self)..range.end.to_offset(self);
1732 if range.start > range.end {
1733 mem::swap(&mut range.start, &mut range.end);
1734 }
1735 let new_text = new_text.into();
1736 if !new_text.is_empty() || !range.is_empty() {
1737 if let Some((prev_range, prev_text)) = edits.last_mut() {
1738 if prev_range.end >= range.start {
1739 prev_range.end = cmp::max(prev_range.end, range.end);
1740 *prev_text = format!("{prev_text}{new_text}").into();
1741 } else {
1742 edits.push((range, new_text));
1743 }
1744 } else {
1745 edits.push((range, new_text));
1746 }
1747 }
1748 }
1749 if edits.is_empty() {
1750 return None;
1751 }
1752
1753 self.start_transaction();
1754 self.pending_autoindent.take();
1755 let autoindent_request = autoindent_mode
1756 .and_then(|mode| self.language.as_ref().map(|_| (self.snapshot(), mode)));
1757
1758 let edit_operation = self.text.edit(edits.iter().cloned());
1759 let edit_id = edit_operation.timestamp();
1760
1761 if let Some((before_edit, mode)) = autoindent_request {
1762 let mut delta = 0isize;
1763 let entries = edits
1764 .into_iter()
1765 .enumerate()
1766 .zip(&edit_operation.as_edit().unwrap().new_text)
1767 .map(|((ix, (range, _)), new_text)| {
1768 let new_text_length = new_text.len();
1769 let old_start = range.start.to_point(&before_edit);
1770 let new_start = (delta + range.start as isize) as usize;
1771 delta += new_text_length as isize - (range.end as isize - range.start as isize);
1772
1773 let mut range_of_insertion_to_indent = 0..new_text_length;
1774 let mut first_line_is_new = false;
1775 let mut original_indent_column = None;
1776
1777 // When inserting an entire line at the beginning of an existing line,
1778 // treat the insertion as new.
1779 if new_text.contains('\n')
1780 && old_start.column <= before_edit.indent_size_for_line(old_start.row).len
1781 {
1782 first_line_is_new = true;
1783 }
1784
1785 // When inserting text starting with a newline, avoid auto-indenting the
1786 // previous line.
1787 if new_text.starts_with('\n') {
1788 range_of_insertion_to_indent.start += 1;
1789 first_line_is_new = true;
1790 }
1791
1792 // Avoid auto-indenting after the insertion.
1793 if let AutoindentMode::Block {
1794 original_indent_columns,
1795 } = &mode
1796 {
1797 original_indent_column =
1798 Some(original_indent_columns.get(ix).copied().unwrap_or_else(|| {
1799 indent_size_for_text(
1800 new_text[range_of_insertion_to_indent.clone()].chars(),
1801 )
1802 .len
1803 }));
1804 if new_text[range_of_insertion_to_indent.clone()].ends_with('\n') {
1805 range_of_insertion_to_indent.end -= 1;
1806 }
1807 }
1808
1809 AutoindentRequestEntry {
1810 first_line_is_new,
1811 original_indent_column,
1812 indent_size: before_edit.language_indent_size_at(range.start, cx),
1813 range: self.anchor_before(new_start + range_of_insertion_to_indent.start)
1814 ..self.anchor_after(new_start + range_of_insertion_to_indent.end),
1815 }
1816 })
1817 .collect();
1818
1819 self.autoindent_requests.push(Arc::new(AutoindentRequest {
1820 before_edit,
1821 entries,
1822 is_block_mode: matches!(mode, AutoindentMode::Block { .. }),
1823 }));
1824 }
1825
1826 self.end_transaction(cx);
1827 self.send_operation(Operation::Buffer(edit_operation), cx);
1828 Some(edit_id)
1829 }
1830
1831 fn did_edit(
1832 &mut self,
1833 old_version: &clock::Global,
1834 was_dirty: bool,
1835 cx: &mut ModelContext<Self>,
1836 ) {
1837 if self.edits_since::<usize>(old_version).next().is_none() {
1838 return;
1839 }
1840
1841 self.reparse(cx);
1842
1843 cx.emit(Event::Edited);
1844 if was_dirty != self.is_dirty() {
1845 cx.emit(Event::DirtyChanged);
1846 }
1847 cx.notify();
1848 }
1849
1850 /// Applies the given remote operations to the buffer.
1851 pub fn apply_ops<I: IntoIterator<Item = Operation>>(
1852 &mut self,
1853 ops: I,
1854 cx: &mut ModelContext<Self>,
1855 ) -> Result<()> {
1856 self.pending_autoindent.take();
1857 let was_dirty = self.is_dirty();
1858 let old_version = self.version.clone();
1859 let mut deferred_ops = Vec::new();
1860 let buffer_ops = ops
1861 .into_iter()
1862 .filter_map(|op| match op {
1863 Operation::Buffer(op) => Some(op),
1864 _ => {
1865 if self.can_apply_op(&op) {
1866 self.apply_op(op, cx);
1867 } else {
1868 deferred_ops.push(op);
1869 }
1870 None
1871 }
1872 })
1873 .collect::<Vec<_>>();
1874 self.text.apply_ops(buffer_ops)?;
1875 self.deferred_ops.insert(deferred_ops);
1876 self.flush_deferred_ops(cx);
1877 self.did_edit(&old_version, was_dirty, cx);
1878 // Notify independently of whether the buffer was edited as the operations could include a
1879 // selection update.
1880 cx.notify();
1881 Ok(())
1882 }
1883
1884 fn flush_deferred_ops(&mut self, cx: &mut ModelContext<Self>) {
1885 let mut deferred_ops = Vec::new();
1886 for op in self.deferred_ops.drain().iter().cloned() {
1887 if self.can_apply_op(&op) {
1888 self.apply_op(op, cx);
1889 } else {
1890 deferred_ops.push(op);
1891 }
1892 }
1893 self.deferred_ops.insert(deferred_ops);
1894 }
1895
1896 fn can_apply_op(&self, operation: &Operation) -> bool {
1897 match operation {
1898 Operation::Buffer(_) => {
1899 unreachable!("buffer operations should never be applied at this layer")
1900 }
1901 Operation::UpdateDiagnostics {
1902 diagnostics: diagnostic_set,
1903 ..
1904 } => diagnostic_set.iter().all(|diagnostic| {
1905 self.text.can_resolve(&diagnostic.range.start)
1906 && self.text.can_resolve(&diagnostic.range.end)
1907 }),
1908 Operation::UpdateSelections { selections, .. } => selections
1909 .iter()
1910 .all(|s| self.can_resolve(&s.start) && self.can_resolve(&s.end)),
1911 Operation::UpdateCompletionTriggers { .. } => true,
1912 }
1913 }
1914
1915 fn apply_op(&mut self, operation: Operation, cx: &mut ModelContext<Self>) {
1916 match operation {
1917 Operation::Buffer(_) => {
1918 unreachable!("buffer operations should never be applied at this layer")
1919 }
1920 Operation::UpdateDiagnostics {
1921 server_id,
1922 diagnostics: diagnostic_set,
1923 lamport_timestamp,
1924 } => {
1925 let snapshot = self.snapshot();
1926 self.apply_diagnostic_update(
1927 server_id,
1928 DiagnosticSet::from_sorted_entries(diagnostic_set.iter().cloned(), &snapshot),
1929 lamport_timestamp,
1930 cx,
1931 );
1932 }
1933 Operation::UpdateSelections {
1934 selections,
1935 lamport_timestamp,
1936 line_mode,
1937 cursor_shape,
1938 } => {
1939 if let Some(set) = self.remote_selections.get(&lamport_timestamp.replica_id) {
1940 if set.lamport_timestamp > lamport_timestamp {
1941 return;
1942 }
1943 }
1944
1945 self.remote_selections.insert(
1946 lamport_timestamp.replica_id,
1947 SelectionSet {
1948 selections,
1949 lamport_timestamp,
1950 line_mode,
1951 cursor_shape,
1952 },
1953 );
1954 self.text.lamport_clock.observe(lamport_timestamp);
1955 self.selections_update_count += 1;
1956 }
1957 Operation::UpdateCompletionTriggers {
1958 triggers,
1959 lamport_timestamp,
1960 } => {
1961 self.completion_triggers = triggers;
1962 self.text.lamport_clock.observe(lamport_timestamp);
1963 }
1964 }
1965 }
1966
1967 fn apply_diagnostic_update(
1968 &mut self,
1969 server_id: LanguageServerId,
1970 diagnostics: DiagnosticSet,
1971 lamport_timestamp: clock::Lamport,
1972 cx: &mut ModelContext<Self>,
1973 ) {
1974 if lamport_timestamp > self.diagnostics_timestamp {
1975 let ix = self.diagnostics.binary_search_by_key(&server_id, |e| e.0);
1976 if diagnostics.len() == 0 {
1977 if let Ok(ix) = ix {
1978 self.diagnostics.remove(ix);
1979 }
1980 } else {
1981 match ix {
1982 Err(ix) => self.diagnostics.insert(ix, (server_id, diagnostics)),
1983 Ok(ix) => self.diagnostics[ix].1 = diagnostics,
1984 };
1985 }
1986 self.diagnostics_timestamp = lamport_timestamp;
1987 self.diagnostics_update_count += 1;
1988 self.text.lamport_clock.observe(lamport_timestamp);
1989 cx.notify();
1990 cx.emit(Event::DiagnosticsUpdated);
1991 }
1992 }
1993
1994 fn send_operation(&mut self, operation: Operation, cx: &mut ModelContext<Self>) {
1995 cx.emit(Event::Operation(operation));
1996 }
1997
1998 /// Removes the selections for a given peer.
1999 pub fn remove_peer(&mut self, replica_id: ReplicaId, cx: &mut ModelContext<Self>) {
2000 self.remote_selections.remove(&replica_id);
2001 cx.notify();
2002 }
2003
2004 /// Undoes the most recent transaction.
2005 pub fn undo(&mut self, cx: &mut ModelContext<Self>) -> Option<TransactionId> {
2006 let was_dirty = self.is_dirty();
2007 let old_version = self.version.clone();
2008
2009 if let Some((transaction_id, operation)) = self.text.undo() {
2010 self.send_operation(Operation::Buffer(operation), cx);
2011 self.did_edit(&old_version, was_dirty, cx);
2012 Some(transaction_id)
2013 } else {
2014 None
2015 }
2016 }
2017
2018 /// Manually undoes a specific transaction in the buffer's undo history.
2019 pub fn undo_transaction(
2020 &mut self,
2021 transaction_id: TransactionId,
2022 cx: &mut ModelContext<Self>,
2023 ) -> bool {
2024 let was_dirty = self.is_dirty();
2025 let old_version = self.version.clone();
2026 if let Some(operation) = self.text.undo_transaction(transaction_id) {
2027 self.send_operation(Operation::Buffer(operation), cx);
2028 self.did_edit(&old_version, was_dirty, cx);
2029 true
2030 } else {
2031 false
2032 }
2033 }
2034
2035 /// Manually undoes all changes after a given transaction in the buffer's undo history.
2036 pub fn undo_to_transaction(
2037 &mut self,
2038 transaction_id: TransactionId,
2039 cx: &mut ModelContext<Self>,
2040 ) -> bool {
2041 let was_dirty = self.is_dirty();
2042 let old_version = self.version.clone();
2043
2044 let operations = self.text.undo_to_transaction(transaction_id);
2045 let undone = !operations.is_empty();
2046 for operation in operations {
2047 self.send_operation(Operation::Buffer(operation), cx);
2048 }
2049 if undone {
2050 self.did_edit(&old_version, was_dirty, cx)
2051 }
2052 undone
2053 }
2054
2055 /// Manually redoes a specific transaction in the buffer's redo history.
2056 pub fn redo(&mut self, cx: &mut ModelContext<Self>) -> Option<TransactionId> {
2057 let was_dirty = self.is_dirty();
2058 let old_version = self.version.clone();
2059
2060 if let Some((transaction_id, operation)) = self.text.redo() {
2061 self.send_operation(Operation::Buffer(operation), cx);
2062 self.did_edit(&old_version, was_dirty, cx);
2063 Some(transaction_id)
2064 } else {
2065 None
2066 }
2067 }
2068
2069 /// Manually undoes all changes until a given transaction in the buffer's redo history.
2070 pub fn redo_to_transaction(
2071 &mut self,
2072 transaction_id: TransactionId,
2073 cx: &mut ModelContext<Self>,
2074 ) -> bool {
2075 let was_dirty = self.is_dirty();
2076 let old_version = self.version.clone();
2077
2078 let operations = self.text.redo_to_transaction(transaction_id);
2079 let redone = !operations.is_empty();
2080 for operation in operations {
2081 self.send_operation(Operation::Buffer(operation), cx);
2082 }
2083 if redone {
2084 self.did_edit(&old_version, was_dirty, cx)
2085 }
2086 redone
2087 }
2088
2089 /// Override current completion triggers with the user-provided completion triggers.
2090 pub fn set_completion_triggers(&mut self, triggers: Vec<String>, cx: &mut ModelContext<Self>) {
2091 self.completion_triggers.clone_from(&triggers);
2092 self.completion_triggers_timestamp = self.text.lamport_clock.tick();
2093 self.send_operation(
2094 Operation::UpdateCompletionTriggers {
2095 triggers,
2096 lamport_timestamp: self.completion_triggers_timestamp,
2097 },
2098 cx,
2099 );
2100 cx.notify();
2101 }
2102
2103 /// Returns a list of strings which trigger a completion menu for this language.
2104 /// Usually this is driven by LSP server which returns a list of trigger characters for completions.
2105 pub fn completion_triggers(&self) -> &[String] {
2106 &self.completion_triggers
2107 }
2108}
2109
2110#[doc(hidden)]
2111#[cfg(any(test, feature = "test-support"))]
2112impl Buffer {
2113 pub fn edit_via_marked_text(
2114 &mut self,
2115 marked_string: &str,
2116 autoindent_mode: Option<AutoindentMode>,
2117 cx: &mut ModelContext<Self>,
2118 ) {
2119 let edits = self.edits_for_marked_text(marked_string);
2120 self.edit(edits, autoindent_mode, cx);
2121 }
2122
2123 pub fn set_group_interval(&mut self, group_interval: Duration) {
2124 self.text.set_group_interval(group_interval);
2125 }
2126
2127 pub fn randomly_edit<T>(
2128 &mut self,
2129 rng: &mut T,
2130 old_range_count: usize,
2131 cx: &mut ModelContext<Self>,
2132 ) where
2133 T: rand::Rng,
2134 {
2135 let mut edits: Vec<(Range<usize>, String)> = Vec::new();
2136 let mut last_end = None;
2137 for _ in 0..old_range_count {
2138 if last_end.map_or(false, |last_end| last_end >= self.len()) {
2139 break;
2140 }
2141
2142 let new_start = last_end.map_or(0, |last_end| last_end + 1);
2143 let mut range = self.random_byte_range(new_start, rng);
2144 if rng.gen_bool(0.2) {
2145 mem::swap(&mut range.start, &mut range.end);
2146 }
2147 last_end = Some(range.end);
2148
2149 let new_text_len = rng.gen_range(0..10);
2150 let new_text: String = RandomCharIter::new(&mut *rng).take(new_text_len).collect();
2151
2152 edits.push((range, new_text));
2153 }
2154 log::info!("mutating buffer {} with {:?}", self.replica_id(), edits);
2155 self.edit(edits, None, cx);
2156 }
2157
2158 pub fn randomly_undo_redo(&mut self, rng: &mut impl rand::Rng, cx: &mut ModelContext<Self>) {
2159 let was_dirty = self.is_dirty();
2160 let old_version = self.version.clone();
2161
2162 let ops = self.text.randomly_undo_redo(rng);
2163 if !ops.is_empty() {
2164 for op in ops {
2165 self.send_operation(Operation::Buffer(op), cx);
2166 self.did_edit(&old_version, was_dirty, cx);
2167 }
2168 }
2169 }
2170}
2171
2172impl EventEmitter<Event> for Buffer {}
2173
2174impl Deref for Buffer {
2175 type Target = TextBuffer;
2176
2177 fn deref(&self) -> &Self::Target {
2178 &self.text
2179 }
2180}
2181
2182impl BufferSnapshot {
2183 /// Returns [`IndentSize`] for a given line that respects user settings and /// language preferences.
2184 pub fn indent_size_for_line(&self, row: u32) -> IndentSize {
2185 indent_size_for_line(self, row)
2186 }
2187 /// Returns [`IndentSize`] for a given position that respects user settings
2188 /// and language preferences.
2189 pub fn language_indent_size_at<T: ToOffset>(&self, position: T, cx: &AppContext) -> IndentSize {
2190 let settings = language_settings(self.language_at(position), self.file(), cx);
2191 if settings.hard_tabs {
2192 IndentSize::tab()
2193 } else {
2194 IndentSize::spaces(settings.tab_size.get())
2195 }
2196 }
2197
2198 /// Retrieve the suggested indent size for all of the given rows. The unit of indentation
2199 /// is passed in as `single_indent_size`.
2200 pub fn suggested_indents(
2201 &self,
2202 rows: impl Iterator<Item = u32>,
2203 single_indent_size: IndentSize,
2204 ) -> BTreeMap<u32, IndentSize> {
2205 let mut result = BTreeMap::new();
2206
2207 for row_range in contiguous_ranges(rows, 10) {
2208 let suggestions = match self.suggest_autoindents(row_range.clone()) {
2209 Some(suggestions) => suggestions,
2210 _ => break,
2211 };
2212
2213 for (row, suggestion) in row_range.zip(suggestions) {
2214 let indent_size = if let Some(suggestion) = suggestion {
2215 result
2216 .get(&suggestion.basis_row)
2217 .copied()
2218 .unwrap_or_else(|| self.indent_size_for_line(suggestion.basis_row))
2219 .with_delta(suggestion.delta, single_indent_size)
2220 } else {
2221 self.indent_size_for_line(row)
2222 };
2223
2224 result.insert(row, indent_size);
2225 }
2226 }
2227
2228 result
2229 }
2230
2231 fn suggest_autoindents(
2232 &self,
2233 row_range: Range<u32>,
2234 ) -> Option<impl Iterator<Item = Option<IndentSuggestion>> + '_> {
2235 let config = &self.language.as_ref()?.config;
2236 let prev_non_blank_row = self.prev_non_blank_row(row_range.start);
2237
2238 // Find the suggested indentation ranges based on the syntax tree.
2239 let start = Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0);
2240 let end = Point::new(row_range.end, 0);
2241 let range = (start..end).to_offset(&self.text);
2242 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
2243 Some(&grammar.indents_config.as_ref()?.query)
2244 });
2245 let indent_configs = matches
2246 .grammars()
2247 .iter()
2248 .map(|grammar| grammar.indents_config.as_ref().unwrap())
2249 .collect::<Vec<_>>();
2250
2251 let mut indent_ranges = Vec::<Range<Point>>::new();
2252 let mut outdent_positions = Vec::<Point>::new();
2253 while let Some(mat) = matches.peek() {
2254 let mut start: Option<Point> = None;
2255 let mut end: Option<Point> = None;
2256
2257 let config = &indent_configs[mat.grammar_index];
2258 for capture in mat.captures {
2259 if capture.index == config.indent_capture_ix {
2260 start.get_or_insert(Point::from_ts_point(capture.node.start_position()));
2261 end.get_or_insert(Point::from_ts_point(capture.node.end_position()));
2262 } else if Some(capture.index) == config.start_capture_ix {
2263 start = Some(Point::from_ts_point(capture.node.end_position()));
2264 } else if Some(capture.index) == config.end_capture_ix {
2265 end = Some(Point::from_ts_point(capture.node.start_position()));
2266 } else if Some(capture.index) == config.outdent_capture_ix {
2267 outdent_positions.push(Point::from_ts_point(capture.node.start_position()));
2268 }
2269 }
2270
2271 matches.advance();
2272 if let Some((start, end)) = start.zip(end) {
2273 if start.row == end.row {
2274 continue;
2275 }
2276
2277 let range = start..end;
2278 match indent_ranges.binary_search_by_key(&range.start, |r| r.start) {
2279 Err(ix) => indent_ranges.insert(ix, range),
2280 Ok(ix) => {
2281 let prev_range = &mut indent_ranges[ix];
2282 prev_range.end = prev_range.end.max(range.end);
2283 }
2284 }
2285 }
2286 }
2287
2288 let mut error_ranges = Vec::<Range<Point>>::new();
2289 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
2290 Some(&grammar.error_query)
2291 });
2292 while let Some(mat) = matches.peek() {
2293 let node = mat.captures[0].node;
2294 let start = Point::from_ts_point(node.start_position());
2295 let end = Point::from_ts_point(node.end_position());
2296 let range = start..end;
2297 let ix = match error_ranges.binary_search_by_key(&range.start, |r| r.start) {
2298 Ok(ix) | Err(ix) => ix,
2299 };
2300 let mut end_ix = ix;
2301 while let Some(existing_range) = error_ranges.get(end_ix) {
2302 if existing_range.end < end {
2303 end_ix += 1;
2304 } else {
2305 break;
2306 }
2307 }
2308 error_ranges.splice(ix..end_ix, [range]);
2309 matches.advance();
2310 }
2311
2312 outdent_positions.sort();
2313 for outdent_position in outdent_positions {
2314 // find the innermost indent range containing this outdent_position
2315 // set its end to the outdent position
2316 if let Some(range_to_truncate) = indent_ranges
2317 .iter_mut()
2318 .filter(|indent_range| indent_range.contains(&outdent_position))
2319 .last()
2320 {
2321 range_to_truncate.end = outdent_position;
2322 }
2323 }
2324
2325 // Find the suggested indentation increases and decreased based on regexes.
2326 let mut indent_change_rows = Vec::<(u32, Ordering)>::new();
2327 self.for_each_line(
2328 Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0)
2329 ..Point::new(row_range.end, 0),
2330 |row, line| {
2331 if config
2332 .decrease_indent_pattern
2333 .as_ref()
2334 .map_or(false, |regex| regex.is_match(line))
2335 {
2336 indent_change_rows.push((row, Ordering::Less));
2337 }
2338 if config
2339 .increase_indent_pattern
2340 .as_ref()
2341 .map_or(false, |regex| regex.is_match(line))
2342 {
2343 indent_change_rows.push((row + 1, Ordering::Greater));
2344 }
2345 },
2346 );
2347
2348 let mut indent_changes = indent_change_rows.into_iter().peekable();
2349 let mut prev_row = if config.auto_indent_using_last_non_empty_line {
2350 prev_non_blank_row.unwrap_or(0)
2351 } else {
2352 row_range.start.saturating_sub(1)
2353 };
2354 let mut prev_row_start = Point::new(prev_row, self.indent_size_for_line(prev_row).len);
2355 Some(row_range.map(move |row| {
2356 let row_start = Point::new(row, self.indent_size_for_line(row).len);
2357
2358 let mut indent_from_prev_row = false;
2359 let mut outdent_from_prev_row = false;
2360 let mut outdent_to_row = u32::MAX;
2361
2362 while let Some((indent_row, delta)) = indent_changes.peek() {
2363 match indent_row.cmp(&row) {
2364 Ordering::Equal => match delta {
2365 Ordering::Less => outdent_from_prev_row = true,
2366 Ordering::Greater => indent_from_prev_row = true,
2367 _ => {}
2368 },
2369
2370 Ordering::Greater => break,
2371 Ordering::Less => {}
2372 }
2373
2374 indent_changes.next();
2375 }
2376
2377 for range in &indent_ranges {
2378 if range.start.row >= row {
2379 break;
2380 }
2381 if range.start.row == prev_row && range.end > row_start {
2382 indent_from_prev_row = true;
2383 }
2384 if range.end > prev_row_start && range.end <= row_start {
2385 outdent_to_row = outdent_to_row.min(range.start.row);
2386 }
2387 }
2388
2389 let within_error = error_ranges
2390 .iter()
2391 .any(|e| e.start.row < row && e.end > row_start);
2392
2393 let suggestion = if outdent_to_row == prev_row
2394 || (outdent_from_prev_row && indent_from_prev_row)
2395 {
2396 Some(IndentSuggestion {
2397 basis_row: prev_row,
2398 delta: Ordering::Equal,
2399 within_error,
2400 })
2401 } else if indent_from_prev_row {
2402 Some(IndentSuggestion {
2403 basis_row: prev_row,
2404 delta: Ordering::Greater,
2405 within_error,
2406 })
2407 } else if outdent_to_row < prev_row {
2408 Some(IndentSuggestion {
2409 basis_row: outdent_to_row,
2410 delta: Ordering::Equal,
2411 within_error,
2412 })
2413 } else if outdent_from_prev_row {
2414 Some(IndentSuggestion {
2415 basis_row: prev_row,
2416 delta: Ordering::Less,
2417 within_error,
2418 })
2419 } else if config.auto_indent_using_last_non_empty_line || !self.is_line_blank(prev_row)
2420 {
2421 Some(IndentSuggestion {
2422 basis_row: prev_row,
2423 delta: Ordering::Equal,
2424 within_error,
2425 })
2426 } else {
2427 None
2428 };
2429
2430 prev_row = row;
2431 prev_row_start = row_start;
2432 suggestion
2433 }))
2434 }
2435
2436 fn prev_non_blank_row(&self, mut row: u32) -> Option<u32> {
2437 while row > 0 {
2438 row -= 1;
2439 if !self.is_line_blank(row) {
2440 return Some(row);
2441 }
2442 }
2443 None
2444 }
2445
2446 /// Iterates over chunks of text in the given range of the buffer. Text is chunked
2447 /// in an arbitrary way due to being stored in a [`Rope`](text::Rope). The text is also
2448 /// returned in chunks where each chunk has a single syntax highlighting style and
2449 /// diagnostic status.
2450 pub fn chunks<T: ToOffset>(&self, range: Range<T>, language_aware: bool) -> BufferChunks {
2451 let range = range.start.to_offset(self)..range.end.to_offset(self);
2452
2453 let mut syntax = None;
2454 let mut diagnostic_endpoints = Vec::new();
2455 if language_aware {
2456 let captures = self.syntax.captures(range.clone(), &self.text, |grammar| {
2457 grammar.highlights_query.as_ref()
2458 });
2459 let highlight_maps = captures
2460 .grammars()
2461 .into_iter()
2462 .map(|grammar| grammar.highlight_map())
2463 .collect();
2464 syntax = Some((captures, highlight_maps));
2465 for entry in self.diagnostics_in_range::<_, usize>(range.clone(), false) {
2466 diagnostic_endpoints.push(DiagnosticEndpoint {
2467 offset: entry.range.start,
2468 is_start: true,
2469 severity: entry.diagnostic.severity,
2470 is_unnecessary: entry.diagnostic.is_unnecessary,
2471 });
2472 diagnostic_endpoints.push(DiagnosticEndpoint {
2473 offset: entry.range.end,
2474 is_start: false,
2475 severity: entry.diagnostic.severity,
2476 is_unnecessary: entry.diagnostic.is_unnecessary,
2477 });
2478 }
2479 diagnostic_endpoints
2480 .sort_unstable_by_key(|endpoint| (endpoint.offset, !endpoint.is_start));
2481 }
2482
2483 BufferChunks::new(self.text.as_rope(), range, syntax, diagnostic_endpoints)
2484 }
2485
2486 /// Invokes the given callback for each line of text in the given range of the buffer.
2487 /// Uses callback to avoid allocating a string for each line.
2488 fn for_each_line(&self, range: Range<Point>, mut callback: impl FnMut(u32, &str)) {
2489 let mut line = String::new();
2490 let mut row = range.start.row;
2491 for chunk in self
2492 .as_rope()
2493 .chunks_in_range(range.to_offset(self))
2494 .chain(["\n"])
2495 {
2496 for (newline_ix, text) in chunk.split('\n').enumerate() {
2497 if newline_ix > 0 {
2498 callback(row, &line);
2499 row += 1;
2500 line.clear();
2501 }
2502 line.push_str(text);
2503 }
2504 }
2505 }
2506
2507 /// Iterates over every [`SyntaxLayer`] in the buffer.
2508 pub fn syntax_layers(&self) -> impl Iterator<Item = SyntaxLayer> + '_ {
2509 self.syntax.layers_for_range(0..self.len(), &self.text)
2510 }
2511
2512 pub fn syntax_layer_at<D: ToOffset>(&self, position: D) -> Option<SyntaxLayer> {
2513 let offset = position.to_offset(self);
2514 self.syntax
2515 .layers_for_range(offset..offset, &self.text)
2516 .filter(|l| l.node().end_byte() > offset)
2517 .last()
2518 }
2519
2520 /// Returns the main [Language]
2521 pub fn language(&self) -> Option<&Arc<Language>> {
2522 self.language.as_ref()
2523 }
2524
2525 /// Returns the [Language] at the given location.
2526 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<&Arc<Language>> {
2527 self.syntax_layer_at(position)
2528 .map(|info| info.language)
2529 .or(self.language.as_ref())
2530 }
2531
2532 /// Returns the settings for the language at the given location.
2533 pub fn settings_at<'a, D: ToOffset>(
2534 &self,
2535 position: D,
2536 cx: &'a AppContext,
2537 ) -> &'a LanguageSettings {
2538 language_settings(self.language_at(position), self.file.as_ref(), cx)
2539 }
2540
2541 /// Returns the [LanguageScope] at the given location.
2542 pub fn language_scope_at<D: ToOffset>(&self, position: D) -> Option<LanguageScope> {
2543 let offset = position.to_offset(self);
2544 let mut scope = None;
2545 let mut smallest_range: Option<Range<usize>> = None;
2546
2547 // Use the layer that has the smallest node intersecting the given point.
2548 for layer in self.syntax.layers_for_range(offset..offset, &self.text) {
2549 let mut cursor = layer.node().walk();
2550
2551 let mut range = None;
2552 loop {
2553 let child_range = cursor.node().byte_range();
2554 if !child_range.to_inclusive().contains(&offset) {
2555 break;
2556 }
2557
2558 range = Some(child_range);
2559 if cursor.goto_first_child_for_byte(offset).is_none() {
2560 break;
2561 }
2562 }
2563
2564 if let Some(range) = range {
2565 if smallest_range
2566 .as_ref()
2567 .map_or(true, |smallest_range| range.len() < smallest_range.len())
2568 {
2569 smallest_range = Some(range);
2570 scope = Some(LanguageScope {
2571 language: layer.language.clone(),
2572 override_id: layer.override_id(offset, &self.text),
2573 });
2574 }
2575 }
2576 }
2577
2578 scope.or_else(|| {
2579 self.language.clone().map(|language| LanguageScope {
2580 language,
2581 override_id: None,
2582 })
2583 })
2584 }
2585
2586 /// Returns a tuple of the range and character kind of the word
2587 /// surrounding the given position.
2588 pub fn surrounding_word<T: ToOffset>(&self, start: T) -> (Range<usize>, Option<CharKind>) {
2589 let mut start = start.to_offset(self);
2590 let mut end = start;
2591 let mut next_chars = self.chars_at(start).peekable();
2592 let mut prev_chars = self.reversed_chars_at(start).peekable();
2593
2594 let scope = self.language_scope_at(start);
2595 let kind = |c| char_kind(&scope, c);
2596 let word_kind = cmp::max(
2597 prev_chars.peek().copied().map(kind),
2598 next_chars.peek().copied().map(kind),
2599 );
2600
2601 for ch in prev_chars {
2602 if Some(kind(ch)) == word_kind && ch != '\n' {
2603 start -= ch.len_utf8();
2604 } else {
2605 break;
2606 }
2607 }
2608
2609 for ch in next_chars {
2610 if Some(kind(ch)) == word_kind && ch != '\n' {
2611 end += ch.len_utf8();
2612 } else {
2613 break;
2614 }
2615 }
2616
2617 (start..end, word_kind)
2618 }
2619
2620 /// Returns the range for the closes syntax node enclosing the given range.
2621 pub fn range_for_syntax_ancestor<T: ToOffset>(&self, range: Range<T>) -> Option<Range<usize>> {
2622 let range = range.start.to_offset(self)..range.end.to_offset(self);
2623 let mut result: Option<Range<usize>> = None;
2624 'outer: for layer in self.syntax.layers_for_range(range.clone(), &self.text) {
2625 let mut cursor = layer.node().walk();
2626
2627 // Descend to the first leaf that touches the start of the range,
2628 // and if the range is non-empty, extends beyond the start.
2629 while cursor.goto_first_child_for_byte(range.start).is_some() {
2630 if !range.is_empty() && cursor.node().end_byte() == range.start {
2631 cursor.goto_next_sibling();
2632 }
2633 }
2634
2635 // Ascend to the smallest ancestor that strictly contains the range.
2636 loop {
2637 let node_range = cursor.node().byte_range();
2638 if node_range.start <= range.start
2639 && node_range.end >= range.end
2640 && node_range.len() > range.len()
2641 {
2642 break;
2643 }
2644 if !cursor.goto_parent() {
2645 continue 'outer;
2646 }
2647 }
2648
2649 let left_node = cursor.node();
2650 let mut layer_result = left_node.byte_range();
2651
2652 // For an empty range, try to find another node immediately to the right of the range.
2653 if left_node.end_byte() == range.start {
2654 let mut right_node = None;
2655 while !cursor.goto_next_sibling() {
2656 if !cursor.goto_parent() {
2657 break;
2658 }
2659 }
2660
2661 while cursor.node().start_byte() == range.start {
2662 right_node = Some(cursor.node());
2663 if !cursor.goto_first_child() {
2664 break;
2665 }
2666 }
2667
2668 // If there is a candidate node on both sides of the (empty) range, then
2669 // decide between the two by favoring a named node over an anonymous token.
2670 // If both nodes are the same in that regard, favor the right one.
2671 if let Some(right_node) = right_node {
2672 if right_node.is_named() || !left_node.is_named() {
2673 layer_result = right_node.byte_range();
2674 }
2675 }
2676 }
2677
2678 if let Some(previous_result) = &result {
2679 if previous_result.len() < layer_result.len() {
2680 continue;
2681 }
2682 }
2683 result = Some(layer_result);
2684 }
2685
2686 result
2687 }
2688
2689 /// Returns the outline for the buffer.
2690 ///
2691 /// This method allows passing an optional [SyntaxTheme] to
2692 /// syntax-highlight the returned symbols.
2693 pub fn outline(&self, theme: Option<&SyntaxTheme>) -> Option<Outline<Anchor>> {
2694 self.outline_items_containing(0..self.len(), true, theme)
2695 .map(Outline::new)
2696 }
2697
2698 /// Returns all the symbols that contain the given position.
2699 ///
2700 /// This method allows passing an optional [SyntaxTheme] to
2701 /// syntax-highlight the returned symbols.
2702 pub fn symbols_containing<T: ToOffset>(
2703 &self,
2704 position: T,
2705 theme: Option<&SyntaxTheme>,
2706 ) -> Option<Vec<OutlineItem<Anchor>>> {
2707 let position = position.to_offset(self);
2708 let mut items = self.outline_items_containing(
2709 position.saturating_sub(1)..self.len().min(position + 1),
2710 false,
2711 theme,
2712 )?;
2713 let mut prev_depth = None;
2714 items.retain(|item| {
2715 let result = prev_depth.map_or(true, |prev_depth| item.depth > prev_depth);
2716 prev_depth = Some(item.depth);
2717 result
2718 });
2719 Some(items)
2720 }
2721
2722 fn outline_items_containing(
2723 &self,
2724 range: Range<usize>,
2725 include_extra_context: bool,
2726 theme: Option<&SyntaxTheme>,
2727 ) -> Option<Vec<OutlineItem<Anchor>>> {
2728 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
2729 grammar.outline_config.as_ref().map(|c| &c.query)
2730 });
2731 let configs = matches
2732 .grammars()
2733 .iter()
2734 .map(|g| g.outline_config.as_ref().unwrap())
2735 .collect::<Vec<_>>();
2736
2737 let mut stack = Vec::<Range<usize>>::new();
2738 let mut items = Vec::new();
2739 while let Some(mat) = matches.peek() {
2740 let config = &configs[mat.grammar_index];
2741 let item_node = mat.captures.iter().find_map(|cap| {
2742 if cap.index == config.item_capture_ix {
2743 Some(cap.node)
2744 } else {
2745 None
2746 }
2747 })?;
2748
2749 let item_range = item_node.byte_range();
2750 if item_range.end < range.start || item_range.start > range.end {
2751 matches.advance();
2752 continue;
2753 }
2754
2755 let mut buffer_ranges = Vec::new();
2756 for capture in mat.captures {
2757 let node_is_name;
2758 if capture.index == config.name_capture_ix {
2759 node_is_name = true;
2760 } else if Some(capture.index) == config.context_capture_ix
2761 || (Some(capture.index) == config.extra_context_capture_ix
2762 && include_extra_context)
2763 {
2764 node_is_name = false;
2765 } else {
2766 continue;
2767 }
2768
2769 let mut range = capture.node.start_byte()..capture.node.end_byte();
2770 let start = capture.node.start_position();
2771 if capture.node.end_position().row > start.row {
2772 range.end =
2773 range.start + self.line_len(start.row as u32) as usize - start.column;
2774 }
2775
2776 if !range.is_empty() {
2777 buffer_ranges.push((range, node_is_name));
2778 }
2779 }
2780
2781 if buffer_ranges.is_empty() {
2782 matches.advance();
2783 continue;
2784 }
2785
2786 let mut text = String::new();
2787 let mut highlight_ranges = Vec::new();
2788 let mut name_ranges = Vec::new();
2789 let mut chunks = self.chunks(
2790 buffer_ranges.first().unwrap().0.start..buffer_ranges.last().unwrap().0.end,
2791 true,
2792 );
2793 let mut last_buffer_range_end = 0;
2794 for (buffer_range, is_name) in buffer_ranges {
2795 if !text.is_empty() && buffer_range.start > last_buffer_range_end {
2796 text.push(' ');
2797 }
2798 last_buffer_range_end = buffer_range.end;
2799 if is_name {
2800 let mut start = text.len();
2801 let end = start + buffer_range.len();
2802
2803 // When multiple names are captured, then the matcheable text
2804 // includes the whitespace in between the names.
2805 if !name_ranges.is_empty() {
2806 start -= 1;
2807 }
2808
2809 name_ranges.push(start..end);
2810 }
2811
2812 let mut offset = buffer_range.start;
2813 chunks.seek(offset);
2814 for mut chunk in chunks.by_ref() {
2815 if chunk.text.len() > buffer_range.end - offset {
2816 chunk.text = &chunk.text[0..(buffer_range.end - offset)];
2817 offset = buffer_range.end;
2818 } else {
2819 offset += chunk.text.len();
2820 }
2821 let style = chunk
2822 .syntax_highlight_id
2823 .zip(theme)
2824 .and_then(|(highlight, theme)| highlight.style(theme));
2825 if let Some(style) = style {
2826 let start = text.len();
2827 let end = start + chunk.text.len();
2828 highlight_ranges.push((start..end, style));
2829 }
2830 text.push_str(chunk.text);
2831 if offset >= buffer_range.end {
2832 break;
2833 }
2834 }
2835 }
2836
2837 matches.advance();
2838 while stack.last().map_or(false, |prev_range| {
2839 prev_range.start > item_range.start || prev_range.end < item_range.end
2840 }) {
2841 stack.pop();
2842 }
2843 stack.push(item_range.clone());
2844
2845 items.push(OutlineItem {
2846 depth: stack.len() - 1,
2847 range: self.anchor_after(item_range.start)..self.anchor_before(item_range.end),
2848 text,
2849 highlight_ranges,
2850 name_ranges,
2851 })
2852 }
2853 Some(items)
2854 }
2855
2856 /// For each grammar in the language, runs the provided
2857 /// [tree_sitter::Query] against the given range.
2858 pub fn matches(
2859 &self,
2860 range: Range<usize>,
2861 query: fn(&Grammar) -> Option<&tree_sitter::Query>,
2862 ) -> SyntaxMapMatches {
2863 self.syntax.matches(range, self, query)
2864 }
2865
2866 /// Returns bracket range pairs overlapping or adjacent to `range`
2867 pub fn bracket_ranges<T: ToOffset>(
2868 &self,
2869 range: Range<T>,
2870 ) -> impl Iterator<Item = (Range<usize>, Range<usize>)> + '_ {
2871 // Find bracket pairs that *inclusively* contain the given range.
2872 let range = range.start.to_offset(self).saturating_sub(1)
2873 ..self.len().min(range.end.to_offset(self) + 1);
2874
2875 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
2876 grammar.brackets_config.as_ref().map(|c| &c.query)
2877 });
2878 let configs = matches
2879 .grammars()
2880 .iter()
2881 .map(|grammar| grammar.brackets_config.as_ref().unwrap())
2882 .collect::<Vec<_>>();
2883
2884 iter::from_fn(move || {
2885 while let Some(mat) = matches.peek() {
2886 let mut open = None;
2887 let mut close = None;
2888 let config = &configs[mat.grammar_index];
2889 for capture in mat.captures {
2890 if capture.index == config.open_capture_ix {
2891 open = Some(capture.node.byte_range());
2892 } else if capture.index == config.close_capture_ix {
2893 close = Some(capture.node.byte_range());
2894 }
2895 }
2896
2897 matches.advance();
2898
2899 let Some((open, close)) = open.zip(close) else {
2900 continue;
2901 };
2902
2903 let bracket_range = open.start..=close.end;
2904 if !bracket_range.overlaps(&range) {
2905 continue;
2906 }
2907
2908 return Some((open, close));
2909 }
2910 None
2911 })
2912 }
2913
2914 /// Returns enclosing bracket ranges containing the given range
2915 pub fn enclosing_bracket_ranges<T: ToOffset>(
2916 &self,
2917 range: Range<T>,
2918 ) -> impl Iterator<Item = (Range<usize>, Range<usize>)> + '_ {
2919 let range = range.start.to_offset(self)..range.end.to_offset(self);
2920
2921 self.bracket_ranges(range.clone())
2922 .filter(move |(open, close)| open.start <= range.start && close.end >= range.end)
2923 }
2924
2925 /// Returns the smallest enclosing bracket ranges containing the given range or None if no brackets contain range
2926 ///
2927 /// Can optionally pass a range_filter to filter the ranges of brackets to consider
2928 pub fn innermost_enclosing_bracket_ranges<T: ToOffset>(
2929 &self,
2930 range: Range<T>,
2931 range_filter: Option<&dyn Fn(Range<usize>, Range<usize>) -> bool>,
2932 ) -> Option<(Range<usize>, Range<usize>)> {
2933 let range = range.start.to_offset(self)..range.end.to_offset(self);
2934
2935 // Get the ranges of the innermost pair of brackets.
2936 let mut result: Option<(Range<usize>, Range<usize>)> = None;
2937
2938 for (open, close) in self.enclosing_bracket_ranges(range.clone()) {
2939 if let Some(range_filter) = range_filter {
2940 if !range_filter(open.clone(), close.clone()) {
2941 continue;
2942 }
2943 }
2944
2945 let len = close.end - open.start;
2946
2947 if let Some((existing_open, existing_close)) = &result {
2948 let existing_len = existing_close.end - existing_open.start;
2949 if len > existing_len {
2950 continue;
2951 }
2952 }
2953
2954 result = Some((open, close));
2955 }
2956
2957 result
2958 }
2959
2960 /// Returns anchor ranges for any matches of the redaction query.
2961 /// The buffer can be associated with multiple languages, and the redaction query associated with each
2962 /// will be run on the relevant section of the buffer.
2963 pub fn redacted_ranges<T: ToOffset>(
2964 &self,
2965 range: Range<T>,
2966 ) -> impl Iterator<Item = Range<usize>> + '_ {
2967 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
2968 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
2969 grammar
2970 .redactions_config
2971 .as_ref()
2972 .map(|config| &config.query)
2973 });
2974
2975 let configs = syntax_matches
2976 .grammars()
2977 .iter()
2978 .map(|grammar| grammar.redactions_config.as_ref())
2979 .collect::<Vec<_>>();
2980
2981 iter::from_fn(move || {
2982 let redacted_range = syntax_matches
2983 .peek()
2984 .and_then(|mat| {
2985 configs[mat.grammar_index].and_then(|config| {
2986 mat.captures
2987 .iter()
2988 .find(|capture| capture.index == config.redaction_capture_ix)
2989 })
2990 })
2991 .map(|mat| mat.node.byte_range());
2992 syntax_matches.advance();
2993 redacted_range
2994 })
2995 }
2996
2997 pub fn runnable_ranges(
2998 &self,
2999 range: Range<Anchor>,
3000 ) -> impl Iterator<Item = RunnableRange> + '_ {
3001 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
3002
3003 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
3004 grammar.runnable_config.as_ref().map(|config| &config.query)
3005 });
3006
3007 let test_configs = syntax_matches
3008 .grammars()
3009 .iter()
3010 .map(|grammar| grammar.runnable_config.as_ref())
3011 .collect::<Vec<_>>();
3012
3013 iter::from_fn(move || {
3014 let test_range = syntax_matches.peek().and_then(|mat| {
3015 test_configs[mat.grammar_index].and_then(|test_configs| {
3016 let mut tags: SmallVec<[(Range<usize>, RunnableTag); 1]> =
3017 SmallVec::from_iter(mat.captures.iter().filter_map(|capture| {
3018 test_configs
3019 .runnable_tags
3020 .get(&capture.index)
3021 .cloned()
3022 .map(|tag_name| (capture.node.byte_range(), tag_name))
3023 }));
3024 let maximum_range = tags
3025 .iter()
3026 .max_by_key(|(byte_range, _)| byte_range.len())
3027 .map(|(range, _)| range)?
3028 .clone();
3029 tags.sort_by_key(|(range, _)| range == &maximum_range);
3030 let split_point = tags.partition_point(|(range, _)| range != &maximum_range);
3031 let (extra_captures, tags) = tags.split_at(split_point);
3032 let extra_captures = extra_captures
3033 .into_iter()
3034 .map(|(range, name)| {
3035 (
3036 name.0.to_string(),
3037 self.text_for_range(range.clone()).collect::<String>(),
3038 )
3039 })
3040 .collect();
3041 Some(RunnableRange {
3042 run_range: mat
3043 .captures
3044 .iter()
3045 .find(|capture| capture.index == test_configs.run_capture_ix)
3046 .map(|mat| mat.node.byte_range())?,
3047 runnable: Runnable {
3048 tags: tags.into_iter().cloned().map(|(_, tag)| tag).collect(),
3049 language: mat.language,
3050 buffer: self.remote_id(),
3051 },
3052 extra_captures,
3053 buffer_id: self.remote_id(),
3054 })
3055 })
3056 });
3057 syntax_matches.advance();
3058 test_range
3059 })
3060 }
3061
3062 /// Returns selections for remote peers intersecting the given range.
3063 #[allow(clippy::type_complexity)]
3064 pub fn remote_selections_in_range(
3065 &self,
3066 range: Range<Anchor>,
3067 ) -> impl Iterator<
3068 Item = (
3069 ReplicaId,
3070 bool,
3071 CursorShape,
3072 impl Iterator<Item = &Selection<Anchor>> + '_,
3073 ),
3074 > + '_ {
3075 self.remote_selections
3076 .iter()
3077 .filter(|(replica_id, set)| {
3078 **replica_id != self.text.replica_id() && !set.selections.is_empty()
3079 })
3080 .map(move |(replica_id, set)| {
3081 let start_ix = match set.selections.binary_search_by(|probe| {
3082 probe.end.cmp(&range.start, self).then(Ordering::Greater)
3083 }) {
3084 Ok(ix) | Err(ix) => ix,
3085 };
3086 let end_ix = match set.selections.binary_search_by(|probe| {
3087 probe.start.cmp(&range.end, self).then(Ordering::Less)
3088 }) {
3089 Ok(ix) | Err(ix) => ix,
3090 };
3091
3092 (
3093 *replica_id,
3094 set.line_mode,
3095 set.cursor_shape,
3096 set.selections[start_ix..end_ix].iter(),
3097 )
3098 })
3099 }
3100
3101 /// Whether the buffer contains any git changes.
3102 pub fn has_git_diff(&self) -> bool {
3103 !self.git_diff.is_empty()
3104 }
3105
3106 /// Returns all the Git diff hunks intersecting the given
3107 /// row range.
3108 pub fn git_diff_hunks_in_row_range(
3109 &self,
3110 range: Range<BufferRow>,
3111 ) -> impl '_ + Iterator<Item = git::diff::DiffHunk<u32>> {
3112 self.git_diff.hunks_in_row_range(range, self)
3113 }
3114
3115 /// Returns all the Git diff hunks intersecting the given
3116 /// range.
3117 pub fn git_diff_hunks_intersecting_range(
3118 &self,
3119 range: Range<Anchor>,
3120 ) -> impl '_ + Iterator<Item = git::diff::DiffHunk<u32>> {
3121 self.git_diff.hunks_intersecting_range(range, self)
3122 }
3123
3124 /// Returns all the Git diff hunks intersecting the given
3125 /// range, in reverse order.
3126 pub fn git_diff_hunks_intersecting_range_rev(
3127 &self,
3128 range: Range<Anchor>,
3129 ) -> impl '_ + Iterator<Item = git::diff::DiffHunk<u32>> {
3130 self.git_diff.hunks_intersecting_range_rev(range, self)
3131 }
3132
3133 /// Returns if the buffer contains any diagnostics.
3134 pub fn has_diagnostics(&self) -> bool {
3135 !self.diagnostics.is_empty()
3136 }
3137
3138 /// Returns all the diagnostics intersecting the given range.
3139 pub fn diagnostics_in_range<'a, T, O>(
3140 &'a self,
3141 search_range: Range<T>,
3142 reversed: bool,
3143 ) -> impl 'a + Iterator<Item = DiagnosticEntry<O>>
3144 where
3145 T: 'a + Clone + ToOffset,
3146 O: 'a + FromAnchor + Ord,
3147 {
3148 let mut iterators: Vec<_> = self
3149 .diagnostics
3150 .iter()
3151 .map(|(_, collection)| {
3152 collection
3153 .range::<T, O>(search_range.clone(), self, true, reversed)
3154 .peekable()
3155 })
3156 .collect();
3157
3158 std::iter::from_fn(move || {
3159 let (next_ix, _) = iterators
3160 .iter_mut()
3161 .enumerate()
3162 .flat_map(|(ix, iter)| Some((ix, iter.peek()?)))
3163 .min_by(|(_, a), (_, b)| {
3164 let cmp = a
3165 .range
3166 .start
3167 .cmp(&b.range.start)
3168 // when range is equal, sort by diagnostic severity
3169 .then(a.diagnostic.severity.cmp(&b.diagnostic.severity))
3170 // and stabilize order with group_id
3171 .then(a.diagnostic.group_id.cmp(&b.diagnostic.group_id));
3172 if reversed {
3173 cmp.reverse()
3174 } else {
3175 cmp
3176 }
3177 })?;
3178 iterators[next_ix].next()
3179 })
3180 }
3181
3182 /// Returns all the diagnostic groups associated with the given
3183 /// language server id. If no language server id is provided,
3184 /// all diagnostics groups are returned.
3185 pub fn diagnostic_groups(
3186 &self,
3187 language_server_id: Option<LanguageServerId>,
3188 ) -> Vec<(LanguageServerId, DiagnosticGroup<Anchor>)> {
3189 let mut groups = Vec::new();
3190
3191 if let Some(language_server_id) = language_server_id {
3192 if let Ok(ix) = self
3193 .diagnostics
3194 .binary_search_by_key(&language_server_id, |e| e.0)
3195 {
3196 self.diagnostics[ix]
3197 .1
3198 .groups(language_server_id, &mut groups, self);
3199 }
3200 } else {
3201 for (language_server_id, diagnostics) in self.diagnostics.iter() {
3202 diagnostics.groups(*language_server_id, &mut groups, self);
3203 }
3204 }
3205
3206 groups.sort_by(|(id_a, group_a), (id_b, group_b)| {
3207 let a_start = &group_a.entries[group_a.primary_ix].range.start;
3208 let b_start = &group_b.entries[group_b.primary_ix].range.start;
3209 a_start.cmp(b_start, self).then_with(|| id_a.cmp(id_b))
3210 });
3211
3212 groups
3213 }
3214
3215 /// Returns an iterator over the diagnostics for the given group.
3216 pub fn diagnostic_group<'a, O>(
3217 &'a self,
3218 group_id: usize,
3219 ) -> impl 'a + Iterator<Item = DiagnosticEntry<O>>
3220 where
3221 O: 'a + FromAnchor,
3222 {
3223 self.diagnostics
3224 .iter()
3225 .flat_map(move |(_, set)| set.group(group_id, self))
3226 }
3227
3228 /// The number of times diagnostics were updated.
3229 pub fn diagnostics_update_count(&self) -> usize {
3230 self.diagnostics_update_count
3231 }
3232
3233 /// The number of times the buffer was parsed.
3234 pub fn parse_count(&self) -> usize {
3235 self.parse_count
3236 }
3237
3238 /// The number of times selections were updated.
3239 pub fn selections_update_count(&self) -> usize {
3240 self.selections_update_count
3241 }
3242
3243 /// Returns a snapshot of underlying file.
3244 pub fn file(&self) -> Option<&Arc<dyn File>> {
3245 self.file.as_ref()
3246 }
3247
3248 /// Resolves the file path (relative to the worktree root) associated with the underlying file.
3249 pub fn resolve_file_path(&self, cx: &AppContext, include_root: bool) -> Option<PathBuf> {
3250 if let Some(file) = self.file() {
3251 if file.path().file_name().is_none() || include_root {
3252 Some(file.full_path(cx))
3253 } else {
3254 Some(file.path().to_path_buf())
3255 }
3256 } else {
3257 None
3258 }
3259 }
3260
3261 /// The number of times the underlying file was updated.
3262 pub fn file_update_count(&self) -> usize {
3263 self.file_update_count
3264 }
3265
3266 /// The number of times the git diff status was updated.
3267 pub fn git_diff_update_count(&self) -> usize {
3268 self.git_diff_update_count
3269 }
3270}
3271
3272fn indent_size_for_line(text: &text::BufferSnapshot, row: u32) -> IndentSize {
3273 indent_size_for_text(text.chars_at(Point::new(row, 0)))
3274}
3275
3276fn indent_size_for_text(text: impl Iterator<Item = char>) -> IndentSize {
3277 let mut result = IndentSize::spaces(0);
3278 for c in text {
3279 let kind = match c {
3280 ' ' => IndentKind::Space,
3281 '\t' => IndentKind::Tab,
3282 _ => break,
3283 };
3284 if result.len == 0 {
3285 result.kind = kind;
3286 }
3287 result.len += 1;
3288 }
3289 result
3290}
3291
3292impl Clone for BufferSnapshot {
3293 fn clone(&self) -> Self {
3294 Self {
3295 text: self.text.clone(),
3296 git_diff: self.git_diff.clone(),
3297 syntax: self.syntax.clone(),
3298 file: self.file.clone(),
3299 remote_selections: self.remote_selections.clone(),
3300 diagnostics: self.diagnostics.clone(),
3301 selections_update_count: self.selections_update_count,
3302 diagnostics_update_count: self.diagnostics_update_count,
3303 file_update_count: self.file_update_count,
3304 git_diff_update_count: self.git_diff_update_count,
3305 language: self.language.clone(),
3306 parse_count: self.parse_count,
3307 }
3308 }
3309}
3310
3311impl Deref for BufferSnapshot {
3312 type Target = text::BufferSnapshot;
3313
3314 fn deref(&self) -> &Self::Target {
3315 &self.text
3316 }
3317}
3318
3319unsafe impl<'a> Send for BufferChunks<'a> {}
3320
3321impl<'a> BufferChunks<'a> {
3322 pub(crate) fn new(
3323 text: &'a Rope,
3324 range: Range<usize>,
3325 syntax: Option<(SyntaxMapCaptures<'a>, Vec<HighlightMap>)>,
3326 diagnostic_endpoints: Vec<DiagnosticEndpoint>,
3327 ) -> Self {
3328 let mut highlights = None;
3329 if let Some((captures, highlight_maps)) = syntax {
3330 highlights = Some(BufferChunkHighlights {
3331 captures,
3332 next_capture: None,
3333 stack: Default::default(),
3334 highlight_maps,
3335 })
3336 }
3337
3338 let diagnostic_endpoints = diagnostic_endpoints.into_iter().peekable();
3339 let chunks = text.chunks_in_range(range.clone());
3340
3341 BufferChunks {
3342 range,
3343 chunks,
3344 diagnostic_endpoints,
3345 error_depth: 0,
3346 warning_depth: 0,
3347 information_depth: 0,
3348 hint_depth: 0,
3349 unnecessary_depth: 0,
3350 highlights,
3351 }
3352 }
3353
3354 /// Seeks to the given byte offset in the buffer.
3355 pub fn seek(&mut self, offset: usize) {
3356 self.range.start = offset;
3357 self.chunks.seek(self.range.start);
3358 if let Some(highlights) = self.highlights.as_mut() {
3359 highlights
3360 .stack
3361 .retain(|(end_offset, _)| *end_offset > offset);
3362 if let Some(capture) = &highlights.next_capture {
3363 if offset >= capture.node.start_byte() {
3364 let next_capture_end = capture.node.end_byte();
3365 if offset < next_capture_end {
3366 highlights.stack.push((
3367 next_capture_end,
3368 highlights.highlight_maps[capture.grammar_index].get(capture.index),
3369 ));
3370 }
3371 highlights.next_capture.take();
3372 }
3373 }
3374 highlights.captures.set_byte_range(self.range.clone());
3375 }
3376 }
3377
3378 /// The current byte offset in the buffer.
3379 pub fn offset(&self) -> usize {
3380 self.range.start
3381 }
3382
3383 fn update_diagnostic_depths(&mut self, endpoint: DiagnosticEndpoint) {
3384 let depth = match endpoint.severity {
3385 DiagnosticSeverity::ERROR => &mut self.error_depth,
3386 DiagnosticSeverity::WARNING => &mut self.warning_depth,
3387 DiagnosticSeverity::INFORMATION => &mut self.information_depth,
3388 DiagnosticSeverity::HINT => &mut self.hint_depth,
3389 _ => return,
3390 };
3391 if endpoint.is_start {
3392 *depth += 1;
3393 } else {
3394 *depth -= 1;
3395 }
3396
3397 if endpoint.is_unnecessary {
3398 if endpoint.is_start {
3399 self.unnecessary_depth += 1;
3400 } else {
3401 self.unnecessary_depth -= 1;
3402 }
3403 }
3404 }
3405
3406 fn current_diagnostic_severity(&self) -> Option<DiagnosticSeverity> {
3407 if self.error_depth > 0 {
3408 Some(DiagnosticSeverity::ERROR)
3409 } else if self.warning_depth > 0 {
3410 Some(DiagnosticSeverity::WARNING)
3411 } else if self.information_depth > 0 {
3412 Some(DiagnosticSeverity::INFORMATION)
3413 } else if self.hint_depth > 0 {
3414 Some(DiagnosticSeverity::HINT)
3415 } else {
3416 None
3417 }
3418 }
3419
3420 fn current_code_is_unnecessary(&self) -> bool {
3421 self.unnecessary_depth > 0
3422 }
3423}
3424
3425impl<'a> Iterator for BufferChunks<'a> {
3426 type Item = Chunk<'a>;
3427
3428 fn next(&mut self) -> Option<Self::Item> {
3429 let mut next_capture_start = usize::MAX;
3430 let mut next_diagnostic_endpoint = usize::MAX;
3431
3432 if let Some(highlights) = self.highlights.as_mut() {
3433 while let Some((parent_capture_end, _)) = highlights.stack.last() {
3434 if *parent_capture_end <= self.range.start {
3435 highlights.stack.pop();
3436 } else {
3437 break;
3438 }
3439 }
3440
3441 if highlights.next_capture.is_none() {
3442 highlights.next_capture = highlights.captures.next();
3443 }
3444
3445 while let Some(capture) = highlights.next_capture.as_ref() {
3446 if self.range.start < capture.node.start_byte() {
3447 next_capture_start = capture.node.start_byte();
3448 break;
3449 } else {
3450 let highlight_id =
3451 highlights.highlight_maps[capture.grammar_index].get(capture.index);
3452 highlights
3453 .stack
3454 .push((capture.node.end_byte(), highlight_id));
3455 highlights.next_capture = highlights.captures.next();
3456 }
3457 }
3458 }
3459
3460 while let Some(endpoint) = self.diagnostic_endpoints.peek().copied() {
3461 if endpoint.offset <= self.range.start {
3462 self.update_diagnostic_depths(endpoint);
3463 self.diagnostic_endpoints.next();
3464 } else {
3465 next_diagnostic_endpoint = endpoint.offset;
3466 break;
3467 }
3468 }
3469
3470 if let Some(chunk) = self.chunks.peek() {
3471 let chunk_start = self.range.start;
3472 let mut chunk_end = (self.chunks.offset() + chunk.len())
3473 .min(next_capture_start)
3474 .min(next_diagnostic_endpoint);
3475 let mut highlight_id = None;
3476 if let Some(highlights) = self.highlights.as_ref() {
3477 if let Some((parent_capture_end, parent_highlight_id)) = highlights.stack.last() {
3478 chunk_end = chunk_end.min(*parent_capture_end);
3479 highlight_id = Some(*parent_highlight_id);
3480 }
3481 }
3482
3483 let slice =
3484 &chunk[chunk_start - self.chunks.offset()..chunk_end - self.chunks.offset()];
3485 self.range.start = chunk_end;
3486 if self.range.start == self.chunks.offset() + chunk.len() {
3487 self.chunks.next().unwrap();
3488 }
3489
3490 Some(Chunk {
3491 text: slice,
3492 syntax_highlight_id: highlight_id,
3493 diagnostic_severity: self.current_diagnostic_severity(),
3494 is_unnecessary: self.current_code_is_unnecessary(),
3495 ..Default::default()
3496 })
3497 } else {
3498 None
3499 }
3500 }
3501}
3502
3503impl operation_queue::Operation for Operation {
3504 fn lamport_timestamp(&self) -> clock::Lamport {
3505 match self {
3506 Operation::Buffer(_) => {
3507 unreachable!("buffer operations should never be deferred at this layer")
3508 }
3509 Operation::UpdateDiagnostics {
3510 lamport_timestamp, ..
3511 }
3512 | Operation::UpdateSelections {
3513 lamport_timestamp, ..
3514 }
3515 | Operation::UpdateCompletionTriggers {
3516 lamport_timestamp, ..
3517 } => *lamport_timestamp,
3518 }
3519 }
3520}
3521
3522impl Default for Diagnostic {
3523 fn default() -> Self {
3524 Self {
3525 source: Default::default(),
3526 code: None,
3527 severity: DiagnosticSeverity::ERROR,
3528 message: Default::default(),
3529 group_id: 0,
3530 is_primary: false,
3531 is_disk_based: false,
3532 is_unnecessary: false,
3533 }
3534 }
3535}
3536
3537impl IndentSize {
3538 /// Returns an [IndentSize] representing the given spaces.
3539 pub fn spaces(len: u32) -> Self {
3540 Self {
3541 len,
3542 kind: IndentKind::Space,
3543 }
3544 }
3545
3546 /// Returns an [IndentSize] representing a tab.
3547 pub fn tab() -> Self {
3548 Self {
3549 len: 1,
3550 kind: IndentKind::Tab,
3551 }
3552 }
3553
3554 /// An iterator over the characters represented by this [IndentSize].
3555 pub fn chars(&self) -> impl Iterator<Item = char> {
3556 iter::repeat(self.char()).take(self.len as usize)
3557 }
3558
3559 /// The character representation of this [IndentSize].
3560 pub fn char(&self) -> char {
3561 match self.kind {
3562 IndentKind::Space => ' ',
3563 IndentKind::Tab => '\t',
3564 }
3565 }
3566
3567 /// Consumes the current [IndentSize] and returns a new one that has
3568 /// been shrunk or enlarged by the given size along the given direction.
3569 pub fn with_delta(mut self, direction: Ordering, size: IndentSize) -> Self {
3570 match direction {
3571 Ordering::Less => {
3572 if self.kind == size.kind && self.len >= size.len {
3573 self.len -= size.len;
3574 }
3575 }
3576 Ordering::Equal => {}
3577 Ordering::Greater => {
3578 if self.len == 0 {
3579 self = size;
3580 } else if self.kind == size.kind {
3581 self.len += size.len;
3582 }
3583 }
3584 }
3585 self
3586 }
3587}
3588
3589#[cfg(any(test, feature = "test-support"))]
3590pub struct TestFile {
3591 pub path: Arc<Path>,
3592 pub root_name: String,
3593}
3594
3595#[cfg(any(test, feature = "test-support"))]
3596impl File for TestFile {
3597 fn path(&self) -> &Arc<Path> {
3598 &self.path
3599 }
3600
3601 fn full_path(&self, _: &gpui::AppContext) -> PathBuf {
3602 PathBuf::from(&self.root_name).join(self.path.as_ref())
3603 }
3604
3605 fn as_local(&self) -> Option<&dyn LocalFile> {
3606 None
3607 }
3608
3609 fn mtime(&self) -> Option<SystemTime> {
3610 unimplemented!()
3611 }
3612
3613 fn file_name<'a>(&'a self, _: &'a gpui::AppContext) -> &'a std::ffi::OsStr {
3614 self.path().file_name().unwrap_or(self.root_name.as_ref())
3615 }
3616
3617 fn worktree_id(&self) -> usize {
3618 0
3619 }
3620
3621 fn is_deleted(&self) -> bool {
3622 unimplemented!()
3623 }
3624
3625 fn as_any(&self) -> &dyn std::any::Any {
3626 unimplemented!()
3627 }
3628
3629 fn to_proto(&self) -> rpc::proto::File {
3630 unimplemented!()
3631 }
3632
3633 fn is_private(&self) -> bool {
3634 false
3635 }
3636}
3637
3638pub(crate) fn contiguous_ranges(
3639 values: impl Iterator<Item = u32>,
3640 max_len: usize,
3641) -> impl Iterator<Item = Range<u32>> {
3642 let mut values = values;
3643 let mut current_range: Option<Range<u32>> = None;
3644 std::iter::from_fn(move || loop {
3645 if let Some(value) = values.next() {
3646 if let Some(range) = &mut current_range {
3647 if value == range.end && range.len() < max_len {
3648 range.end += 1;
3649 continue;
3650 }
3651 }
3652
3653 let prev_range = current_range.clone();
3654 current_range = Some(value..(value + 1));
3655 if prev_range.is_some() {
3656 return prev_range;
3657 }
3658 } else {
3659 return current_range.take();
3660 }
3661 })
3662}
3663
3664/// Returns the [CharKind] for the given character. When a scope is provided,
3665/// the function checks if the character is considered a word character
3666/// based on the language scope's word character settings.
3667pub fn char_kind(scope: &Option<LanguageScope>, c: char) -> CharKind {
3668 if c.is_whitespace() {
3669 return CharKind::Whitespace;
3670 } else if c.is_alphanumeric() || c == '_' {
3671 return CharKind::Word;
3672 }
3673
3674 if let Some(scope) = scope {
3675 if let Some(characters) = scope.word_characters() {
3676 if characters.contains(&c) {
3677 return CharKind::Word;
3678 }
3679 }
3680 }
3681
3682 CharKind::Punctuation
3683}
3684
3685/// Find all of the ranges of whitespace that occur at the ends of lines
3686/// in the given rope.
3687///
3688/// This could also be done with a regex search, but this implementation
3689/// avoids copying text.
3690pub fn trailing_whitespace_ranges(rope: &Rope) -> Vec<Range<usize>> {
3691 let mut ranges = Vec::new();
3692
3693 let mut offset = 0;
3694 let mut prev_chunk_trailing_whitespace_range = 0..0;
3695 for chunk in rope.chunks() {
3696 let mut prev_line_trailing_whitespace_range = 0..0;
3697 for (i, line) in chunk.split('\n').enumerate() {
3698 let line_end_offset = offset + line.len();
3699 let trimmed_line_len = line.trim_end_matches(|c| matches!(c, ' ' | '\t')).len();
3700 let mut trailing_whitespace_range = (offset + trimmed_line_len)..line_end_offset;
3701
3702 if i == 0 && trimmed_line_len == 0 {
3703 trailing_whitespace_range.start = prev_chunk_trailing_whitespace_range.start;
3704 }
3705 if !prev_line_trailing_whitespace_range.is_empty() {
3706 ranges.push(prev_line_trailing_whitespace_range);
3707 }
3708
3709 offset = line_end_offset + 1;
3710 prev_line_trailing_whitespace_range = trailing_whitespace_range;
3711 }
3712
3713 offset -= 1;
3714 prev_chunk_trailing_whitespace_range = prev_line_trailing_whitespace_range;
3715 }
3716
3717 if !prev_chunk_trailing_whitespace_range.is_empty() {
3718 ranges.push(prev_chunk_trailing_whitespace_range);
3719 }
3720
3721 ranges
3722}